1/* Tree based points-to analysis
2   Copyright (C) 2005 Free Software Foundation, Inc.
3   Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2 of the License, or
10(at your option) any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; if not, write to the Free Software
19Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
20*/
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "ggc.h"
27#include "obstack.h"
28#include "bitmap.h"
29#include "flags.h"
30#include "rtl.h"
31#include "tm_p.h"
32#include "hard-reg-set.h"
33#include "basic-block.h"
34#include "output.h"
35#include "errors.h"
36#include "diagnostic.h"
37#include "tree.h"
38#include "c-common.h"
39#include "tree-flow.h"
40#include "tree-inline.h"
41#include "varray.h"
42#include "c-tree.h"
43#include "tree-gimple.h"
44#include "hashtab.h"
45#include "function.h"
46#include "cgraph.h"
47#include "tree-pass.h"
48#include "timevar.h"
49#include "alloc-pool.h"
50#include "splay-tree.h"
51#include "tree-ssa-structalias.h"
52#include "params.h"
53
54/* The idea behind this analyzer is to generate set constraints from the
55   program, then solve the resulting constraints in order to generate the
56   points-to sets.
57
58   Set constraints are a way of modeling program analysis problems that
59   involve sets.  They consist of an inclusion constraint language,
60   describing the variables (each variable is a set) and operations that
61   are involved on the variables, and a set of rules that derive facts
62   from these operations.  To solve a system of set constraints, you derive
63   all possible facts under the rules, which gives you the correct sets
64   as a consequence.
65
66   See  "Efficient Field-sensitive pointer analysis for C" by "David
67   J. Pearce and Paul H. J. Kelly and Chris Hankin, at
68   http://citeseer.ist.psu.edu/pearce04efficient.html
69
70   Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
71   of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
72   http://citeseer.ist.psu.edu/heintze01ultrafast.html
73
74   There are three types of constraint expressions, DEREF, ADDRESSOF, and
75   SCALAR.  Each constraint expression consists of a constraint type,
76   a variable, and an offset.
77
78   SCALAR is a constraint expression type used to represent x, whether
79   it appears on the LHS or the RHS of a statement.
80   DEREF is a constraint expression type used to represent *x, whether
81   it appears on the LHS or the RHS of a statement.
82   ADDRESSOF is a constraint expression used to represent &x, whether
83   it appears on the LHS or the RHS of a statement.
84
85   Each pointer variable in the program is assigned an integer id, and
86   each field of a structure variable is assigned an integer id as well.
87
88   Structure variables are linked to their list of fields through a "next
89   field" in each variable that points to the next field in offset
90   order.
91   Each variable for a structure field has
92
93   1. "size", that tells the size in bits of that field.
94   2. "fullsize, that tells the size in bits of the entire structure.
95   3. "offset", that tells the offset in bits from the beginning of the
96   structure to this field.
97
98   Thus,
99   struct f
100   {
101     int a;
102     int b;
103   } foo;
104   int *bar;
105
106   looks like
107
108   foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
109   foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
110   bar -> id 3, size 32, offset 0, fullsize 32, next NULL
111
112
113  In order to solve the system of set constraints, the following is
114  done:
115
116  1. Each constraint variable x has a solution set associated with it,
117  Sol(x).
118
119  2. Constraints are separated into direct, copy, and complex.
120  Direct constraints are ADDRESSOF constraints that require no extra
121  processing, such as P = &Q
122  Copy constraints are those of the form P = Q.
123  Complex constraints are all the constraints involving dereferences.
124
125  3. All direct constraints of the form P = &Q are processed, such
126  that Q is added to Sol(P)
127
128  4. All complex constraints for a given constraint variable are stored in a
129  linked list attached to that variable's node.
130
131  5. A directed graph is built out of the copy constraints. Each
132  constraint variable is a node in the graph, and an edge from
133  Q to P is added for each copy constraint of the form P = Q
134
135  6. The graph is then walked, and solution sets are
136  propagated along the copy edges, such that an edge from Q to P
137  causes Sol(P) <- Sol(P) union Sol(Q).
138
139  7.  As we visit each node, all complex constraints associated with
140  that node are processed by adding appropriate copy edges to the graph, or the
141  appropriate variables to the solution set.
142
143  8. The process of walking the graph is iterated until no solution
144  sets change.
145
146  Prior to walking the graph in steps 6 and 7, We perform static
147  cycle elimination on the constraint graph, as well
148  as off-line variable substitution.
149
150  TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
151  on and turned into anything), but isn't.  You can just see what offset
152  inside the pointed-to struct it's going to access.
153
154  TODO: Constant bounded arrays can be handled as if they were structs of the
155  same number of elements.
156
157  TODO: Modeling heap and incoming pointers becomes much better if we
158  add fields to them as we discover them, which we could do.
159
160  TODO: We could handle unions, but to be honest, it's probably not
161  worth the pain or slowdown.  */
162
163static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
164  htab_t heapvar_for_stmt;
165static bool use_field_sensitive = true;
166static unsigned int create_variable_info_for (tree, const char *);
167static struct constraint_expr get_constraint_for (tree, bool *);
168static void build_constraint_graph (void);
169
170static bitmap_obstack ptabitmap_obstack;
171static bitmap_obstack iteration_obstack;
172DEF_VEC_P(constraint_t);
173DEF_VEC_ALLOC_P(constraint_t,heap);
174
175static struct constraint_stats
176{
177  unsigned int total_vars;
178  unsigned int collapsed_vars;
179  unsigned int unified_vars_static;
180  unsigned int unified_vars_dynamic;
181  unsigned int iterations;
182} stats;
183
184struct variable_info
185{
186  /* ID of this variable  */
187  unsigned int id;
188
189  /* Name of this variable */
190  const char *name;
191
192  /* Tree that this variable is associated with.  */
193  tree decl;
194
195  /* Offset of this variable, in bits, from the base variable  */
196  unsigned HOST_WIDE_INT offset;
197
198  /* Size of the variable, in bits.  */
199  unsigned HOST_WIDE_INT size;
200
201  /* Full size of the base variable, in bits.  */
202  unsigned HOST_WIDE_INT fullsize;
203
204  /* A link to the variable for the next field in this structure.  */
205  struct variable_info *next;
206
207  /* Node in the graph that represents the constraints and points-to
208     solution for the variable.  */
209  unsigned int node;
210
211  /* True if the address of this variable is taken.  Needed for
212     variable substitution.  */
213  unsigned int address_taken:1;
214
215  /* True if this variable is the target of a dereference.  Needed for
216     variable substitution.  */
217  unsigned int indirect_target:1;
218
219  /* True if this is a variable created by the constraint analysis, such as
220     heap variables and constraints we had to break up.  */
221  unsigned int is_artificial_var:1;
222
223  /* True if this is a special variable whose solution set should not be
224     changed.  */
225  unsigned int is_special_var:1;
226
227  /* True for variables whose size is not known or variable.  */
228  unsigned int is_unknown_size_var:1;
229
230  /* True for variables that have unions somewhere in them.  */
231  unsigned int has_union:1;
232
233  /* True if this is a heap variable.  */
234  unsigned int is_heap_var:1;
235
236  /* Points-to set for this variable.  */
237  bitmap solution;
238
239  /* Variable ids represented by this node.  */
240  bitmap variables;
241
242  /* Vector of complex constraints for this node.  Complex
243     constraints are those involving dereferences.  */
244  VEC(constraint_t,heap) *complex;
245
246  /* Variable id this was collapsed to due to type unsafety.
247     This should be unused completely after build_constraint_graph, or
248     something is broken.  */
249  struct variable_info *collapsed_to;
250};
251typedef struct variable_info *varinfo_t;
252
253static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
254
255/* Pool of variable info structures.  */
256static alloc_pool variable_info_pool;
257
258DEF_VEC_P(varinfo_t);
259
260DEF_VEC_ALLOC_P(varinfo_t, heap);
261
262/* Table of variable info structures for constraint variables.  Indexed directly
263   by variable info id.  */
264static VEC(varinfo_t,heap) *varmap;
265
266/* Return the varmap element N */
267
268static inline varinfo_t
269get_varinfo (unsigned int n)
270{
271  return VEC_index(varinfo_t, varmap, n);
272}
273
274/* Return the varmap element N, following the collapsed_to link.  */
275
276static inline varinfo_t
277get_varinfo_fc (unsigned int n)
278{
279  varinfo_t v = VEC_index(varinfo_t, varmap, n);
280
281  if (v->collapsed_to)
282    return v->collapsed_to;
283  return v;
284}
285
286/* Variable that represents the unknown pointer.  */
287static varinfo_t var_anything;
288static tree anything_tree;
289static unsigned int anything_id;
290
291/* Variable that represents the NULL pointer.  */
292static varinfo_t var_nothing;
293static tree nothing_tree;
294static unsigned int nothing_id;
295
296/* Variable that represents read only memory.  */
297static varinfo_t var_readonly;
298static tree readonly_tree;
299static unsigned int readonly_id;
300
301/* Variable that represents integers.  This is used for when people do things
302   like &0->a.b.  */
303static varinfo_t var_integer;
304static tree integer_tree;
305static unsigned int integer_id;
306
307/* Variable that represents arbitrary offsets into an object.  Used to
308   represent pointer arithmetic, which may not legally escape the
309   bounds of an object.  */
310static varinfo_t var_anyoffset;
311static tree anyoffset_tree;
312static unsigned int anyoffset_id;
313
314
315/* Lookup a heap var for FROM, and return it if we find one.  */
316
317static tree
318heapvar_lookup (tree from)
319{
320  struct tree_map *h, in;
321  in.from = from;
322
323  h = htab_find_with_hash (heapvar_for_stmt, &in, htab_hash_pointer (from));
324  if (h)
325    return h->to;
326  return NULL_TREE;
327}
328
329/* Insert a mapping FROM->TO in the heap var for statement
330   hashtable.  */
331
332static void
333heapvar_insert (tree from, tree to)
334{
335  struct tree_map *h;
336  void **loc;
337
338  h = ggc_alloc (sizeof (struct tree_map));
339  h->hash = htab_hash_pointer (from);
340  h->from = from;
341  h->to = to;
342  loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
343  *(struct tree_map **) loc = h;
344}
345
346/* Return a new variable info structure consisting for a variable
347   named NAME, and using constraint graph node NODE.  */
348
349static varinfo_t
350new_var_info (tree t, unsigned int id, const char *name, unsigned int node)
351{
352  varinfo_t ret = pool_alloc (variable_info_pool);
353
354  ret->id = id;
355  ret->name = name;
356  ret->decl = t;
357  ret->node = node;
358  ret->address_taken = false;
359  ret->indirect_target = false;
360  ret->is_artificial_var = false;
361  ret->is_heap_var = false;
362  ret->is_special_var = false;
363  ret->is_unknown_size_var = false;
364  ret->has_union = false;
365  ret->solution = BITMAP_ALLOC (&ptabitmap_obstack);
366  bitmap_clear (ret->solution);
367  ret->variables = BITMAP_ALLOC (&ptabitmap_obstack);
368  bitmap_clear (ret->variables);
369  ret->complex = NULL;
370  ret->next = NULL;
371  ret->collapsed_to = NULL;
372  return ret;
373}
374
375typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
376
377/* An expression that appears in a constraint.  */
378
379struct constraint_expr
380{
381  /* Constraint type.  */
382  constraint_expr_type type;
383
384  /* Variable we are referring to in the constraint.  */
385  unsigned int var;
386
387  /* Offset, in bits, of this constraint from the beginning of
388     variables it ends up referring to.
389
390     IOW, in a deref constraint, we would deref, get the result set,
391     then add OFFSET to each member.   */
392  unsigned HOST_WIDE_INT offset;
393};
394
395static struct constraint_expr do_deref (struct constraint_expr);
396
397/* Our set constraints are made up of two constraint expressions, one
398   LHS, and one RHS.
399
400   As described in the introduction, our set constraints each represent an
401   operation between set valued variables.
402*/
403struct constraint
404{
405  struct constraint_expr lhs;
406  struct constraint_expr rhs;
407};
408
409/* List of constraints that we use to build the constraint graph from.  */
410
411static VEC(constraint_t,heap) *constraints;
412static alloc_pool constraint_pool;
413
414/* An edge in the constraint graph.  We technically have no use for
415   the src, since it will always be the same node that we are indexing
416   into the pred/succ arrays with, but it's nice for checking
417   purposes.  The edges are weighted, with a bit set in weights for
418   each edge from src to dest with that weight.  */
419
420struct constraint_edge
421{
422  unsigned int src;
423  unsigned int dest;
424  bitmap weights;
425};
426
427typedef struct constraint_edge *constraint_edge_t;
428static alloc_pool constraint_edge_pool;
429
430/* Return a new constraint edge from SRC to DEST.  */
431
432static constraint_edge_t
433new_constraint_edge (unsigned int src, unsigned int dest)
434{
435  constraint_edge_t ret = pool_alloc (constraint_edge_pool);
436  ret->src = src;
437  ret->dest = dest;
438  ret->weights = NULL;
439  return ret;
440}
441
442DEF_VEC_P(constraint_edge_t);
443DEF_VEC_ALLOC_P(constraint_edge_t,heap);
444
445
446/* The constraint graph is simply a set of adjacency vectors, one per
447   variable. succs[x] is the vector of successors for variable x, and preds[x]
448   is the vector of predecessors for variable x.
449   IOW, all edges are "forward" edges, which is not like our CFG.
450   So remember that
451   preds[x]->src == x, and
452   succs[x]->src == x.  */
453
454struct constraint_graph
455{
456  VEC(constraint_edge_t,heap) **succs;
457  VEC(constraint_edge_t,heap) **preds;
458};
459
460typedef struct constraint_graph *constraint_graph_t;
461
462static constraint_graph_t graph;
463
464/* Create a new constraint consisting of LHS and RHS expressions.  */
465
466static constraint_t
467new_constraint (const struct constraint_expr lhs,
468		const struct constraint_expr rhs)
469{
470  constraint_t ret = pool_alloc (constraint_pool);
471  ret->lhs = lhs;
472  ret->rhs = rhs;
473  return ret;
474}
475
476/* Print out constraint C to FILE.  */
477
478void
479dump_constraint (FILE *file, constraint_t c)
480{
481  if (c->lhs.type == ADDRESSOF)
482    fprintf (file, "&");
483  else if (c->lhs.type == DEREF)
484    fprintf (file, "*");
485  fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
486  if (c->lhs.offset != 0)
487    fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
488  fprintf (file, " = ");
489  if (c->rhs.type == ADDRESSOF)
490    fprintf (file, "&");
491  else if (c->rhs.type == DEREF)
492    fprintf (file, "*");
493  fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
494  if (c->rhs.offset != 0)
495    fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
496  fprintf (file, "\n");
497}
498
499/* Print out constraint C to stderr.  */
500
501void
502debug_constraint (constraint_t c)
503{
504  dump_constraint (stderr, c);
505}
506
507/* Print out all constraints to FILE */
508
509void
510dump_constraints (FILE *file)
511{
512  int i;
513  constraint_t c;
514  for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
515    dump_constraint (file, c);
516}
517
518/* Print out all constraints to stderr.  */
519
520void
521debug_constraints (void)
522{
523  dump_constraints (stderr);
524}
525
526/* SOLVER FUNCTIONS
527
528   The solver is a simple worklist solver, that works on the following
529   algorithm:
530
531   sbitmap changed_nodes = all ones;
532   changed_count = number of nodes;
533   For each node that was already collapsed:
534       changed_count--;
535
536
537   while (changed_count > 0)
538   {
539     compute topological ordering for constraint graph
540
541     find and collapse cycles in the constraint graph (updating
542     changed if necessary)
543
544     for each node (n) in the graph in topological order:
545       changed_count--;
546
547       Process each complex constraint associated with the node,
548       updating changed if necessary.
549
550       For each outgoing edge from n, propagate the solution from n to
551       the destination of the edge, updating changed as necessary.
552
553   }  */
554
555/* Return true if two constraint expressions A and B are equal.  */
556
557static bool
558constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
559{
560  return a.type == b.type
561    && a.var == b.var
562    && a.offset == b.offset;
563}
564
565/* Return true if constraint expression A is less than constraint expression
566   B.  This is just arbitrary, but consistent, in order to give them an
567   ordering.  */
568
569static bool
570constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
571{
572  if (a.type == b.type)
573    {
574      if (a.var == b.var)
575	return a.offset < b.offset;
576      else
577	return a.var < b.var;
578    }
579  else
580    return a.type < b.type;
581}
582
583/* Return true if constraint A is less than constraint B.  This is just
584   arbitrary, but consistent, in order to give them an ordering.  */
585
586static bool
587constraint_less (const constraint_t a, const constraint_t b)
588{
589  if (constraint_expr_less (a->lhs, b->lhs))
590    return true;
591  else if (constraint_expr_less (b->lhs, a->lhs))
592    return false;
593  else
594    return constraint_expr_less (a->rhs, b->rhs);
595}
596
597/* Return true if two constraints A and B are equal.  */
598
599static bool
600constraint_equal (struct constraint a, struct constraint b)
601{
602  return constraint_expr_equal (a.lhs, b.lhs)
603    && constraint_expr_equal (a.rhs, b.rhs);
604}
605
606
607/* Find a constraint LOOKFOR in the sorted constraint vector VEC */
608
609static constraint_t
610constraint_vec_find (VEC(constraint_t,heap) *vec,
611		     struct constraint lookfor)
612{
613  unsigned int place;
614  constraint_t found;
615
616  if (vec == NULL)
617    return NULL;
618
619  place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
620  if (place >= VEC_length (constraint_t, vec))
621    return NULL;
622  found = VEC_index (constraint_t, vec, place);
623  if (!constraint_equal (*found, lookfor))
624    return NULL;
625  return found;
626}
627
628/* Union two constraint vectors, TO and FROM.  Put the result in TO.  */
629
630static void
631constraint_set_union (VEC(constraint_t,heap) **to,
632		      VEC(constraint_t,heap) **from)
633{
634  int i;
635  constraint_t c;
636
637  for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
638    {
639      if (constraint_vec_find (*to, *c) == NULL)
640	{
641	  unsigned int place = VEC_lower_bound (constraint_t, *to, c,
642						constraint_less);
643	  VEC_safe_insert (constraint_t, heap, *to, place, c);
644	}
645    }
646}
647
648/* Take a solution set SET, add OFFSET to each member of the set, and
649   overwrite SET with the result when done.  */
650
651static void
652solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
653{
654  bitmap result = BITMAP_ALLOC (&iteration_obstack);
655  unsigned int i;
656  bitmap_iterator bi;
657
658  EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
659    {
660      /* If this is a properly sized variable, only add offset if it's
661	 less than end.  Otherwise, it is globbed to a single
662	 variable.  */
663
664      if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
665	{
666	  unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
667	  varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
668	  if (!v)
669	    continue;
670	  bitmap_set_bit (result, v->id);
671	}
672      else if (get_varinfo (i)->is_artificial_var
673	       || get_varinfo (i)->has_union
674	       || get_varinfo (i)->is_unknown_size_var)
675	{
676	  bitmap_set_bit (result, i);
677	}
678    }
679
680  bitmap_copy (set, result);
681  BITMAP_FREE (result);
682}
683
684/* Union solution sets TO and FROM, and add INC to each member of FROM in the
685   process.  */
686
687static bool
688set_union_with_increment  (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
689{
690  if (inc == 0)
691    return bitmap_ior_into (to, from);
692  else
693    {
694      bitmap tmp;
695      bool res;
696
697      tmp = BITMAP_ALLOC (&iteration_obstack);
698      bitmap_copy (tmp, from);
699      solution_set_add (tmp, inc);
700      res = bitmap_ior_into (to, tmp);
701      BITMAP_FREE (tmp);
702      return res;
703    }
704}
705
706/* Insert constraint C into the list of complex constraints for VAR.  */
707
708static void
709insert_into_complex (unsigned int var, constraint_t c)
710{
711  varinfo_t vi = get_varinfo (var);
712  unsigned int place = VEC_lower_bound (constraint_t, vi->complex, c,
713					constraint_less);
714  VEC_safe_insert (constraint_t, heap, vi->complex, place, c);
715}
716
717
718/* Compare two constraint edges A and B, return true if they are equal.  */
719
720static bool
721constraint_edge_equal (struct constraint_edge a, struct constraint_edge b)
722{
723  return a.src == b.src && a.dest == b.dest;
724}
725
726/* Compare two constraint edges, return true if A is less than B */
727
728static bool
729constraint_edge_less (const constraint_edge_t a, const constraint_edge_t b)
730{
731  if (a->dest < b->dest)
732    return true;
733  else if (a->dest == b->dest)
734    return a->src < b->src;
735  else
736    return false;
737}
738
739/* Find the constraint edge that matches LOOKFOR, in VEC.
740   Return the edge, if found, NULL otherwise.  */
741
742static constraint_edge_t
743constraint_edge_vec_find (VEC(constraint_edge_t,heap) *vec,
744			  struct constraint_edge lookfor)
745{
746  unsigned int place;
747  constraint_edge_t edge;
748
749  place = VEC_lower_bound (constraint_edge_t, vec, &lookfor,
750			   constraint_edge_less);
751  edge = VEC_index (constraint_edge_t, vec, place);
752  if (!constraint_edge_equal (*edge, lookfor))
753    return NULL;
754  return edge;
755}
756
757/* Condense two variable nodes into a single variable node, by moving
758   all associated info from SRC to TO.  */
759
760static void
761condense_varmap_nodes (unsigned int to, unsigned int src)
762{
763  varinfo_t tovi = get_varinfo (to);
764  varinfo_t srcvi = get_varinfo (src);
765  unsigned int i;
766  constraint_t c;
767  bitmap_iterator bi;
768
769  /* the src node, and all its variables, are now the to node.  */
770  srcvi->node = to;
771  EXECUTE_IF_SET_IN_BITMAP (srcvi->variables, 0, i, bi)
772    get_varinfo (i)->node = to;
773
774  /* Merge the src node variables and the to node variables.  */
775  bitmap_set_bit (tovi->variables, src);
776  bitmap_ior_into (tovi->variables, srcvi->variables);
777  bitmap_clear (srcvi->variables);
778
779  /* Move all complex constraints from src node into to node  */
780  for (i = 0; VEC_iterate (constraint_t, srcvi->complex, i, c); i++)
781    {
782      /* In complex constraints for node src, we may have either
783	 a = *src, and *src = a.  */
784
785      if (c->rhs.type == DEREF)
786	c->rhs.var = to;
787      else
788	c->lhs.var = to;
789    }
790  constraint_set_union (&tovi->complex, &srcvi->complex);
791  VEC_free (constraint_t, heap, srcvi->complex);
792  srcvi->complex = NULL;
793}
794
795/* Erase EDGE from GRAPH.  This routine only handles self-edges
796   (e.g. an edge from a to a).  */
797
798static void
799erase_graph_self_edge (constraint_graph_t graph, struct constraint_edge edge)
800{
801  VEC(constraint_edge_t,heap) *predvec = graph->preds[edge.src];
802  VEC(constraint_edge_t,heap) *succvec = graph->succs[edge.dest];
803  unsigned int place;
804  gcc_assert (edge.src == edge.dest);
805
806  /* Remove from the successors.  */
807  place = VEC_lower_bound (constraint_edge_t, succvec, &edge,
808			   constraint_edge_less);
809
810  /* Make sure we found the edge.  */
811#ifdef ENABLE_CHECKING
812  {
813    constraint_edge_t tmp = VEC_index (constraint_edge_t, succvec, place);
814    gcc_assert (constraint_edge_equal (*tmp, edge));
815  }
816#endif
817  VEC_ordered_remove (constraint_edge_t, succvec, place);
818
819  /* Remove from the predecessors.  */
820  place = VEC_lower_bound (constraint_edge_t, predvec, &edge,
821			   constraint_edge_less);
822
823  /* Make sure we found the edge.  */
824#ifdef ENABLE_CHECKING
825  {
826    constraint_edge_t tmp = VEC_index (constraint_edge_t, predvec, place);
827    gcc_assert (constraint_edge_equal (*tmp, edge));
828  }
829#endif
830  VEC_ordered_remove (constraint_edge_t, predvec, place);
831}
832
833/* Remove edges involving NODE from GRAPH.  */
834
835static void
836clear_edges_for_node (constraint_graph_t graph, unsigned int node)
837{
838  VEC(constraint_edge_t,heap) *succvec = graph->succs[node];
839  VEC(constraint_edge_t,heap) *predvec = graph->preds[node];
840  constraint_edge_t c;
841  int i;
842
843  /* Walk the successors, erase the associated preds.  */
844  for (i = 0; VEC_iterate (constraint_edge_t, succvec, i, c); i++)
845    if (c->dest != node)
846      {
847	unsigned int place;
848	struct constraint_edge lookfor;
849	lookfor.src = c->dest;
850	lookfor.dest = node;
851	place = VEC_lower_bound (constraint_edge_t, graph->preds[c->dest],
852				 &lookfor, constraint_edge_less);
853	VEC_ordered_remove (constraint_edge_t, graph->preds[c->dest], place);
854      }
855  /* Walk the preds, erase the associated succs.  */
856  for (i =0; VEC_iterate (constraint_edge_t, predvec, i, c); i++)
857    if (c->dest != node)
858      {
859	unsigned int place;
860	struct constraint_edge lookfor;
861	lookfor.src = c->dest;
862	lookfor.dest = node;
863	place = VEC_lower_bound (constraint_edge_t, graph->succs[c->dest],
864				 &lookfor, constraint_edge_less);
865	VEC_ordered_remove (constraint_edge_t, graph->succs[c->dest], place);
866      }
867
868  VEC_free (constraint_edge_t, heap, graph->preds[node]);
869  VEC_free (constraint_edge_t, heap, graph->succs[node]);
870  graph->preds[node] = NULL;
871  graph->succs[node] = NULL;
872}
873
874static bool edge_added = false;
875
876/* Add edge NEWE to the graph.  */
877
878static bool
879add_graph_edge (constraint_graph_t graph, struct constraint_edge newe)
880{
881  unsigned int place;
882  unsigned int src = newe.src;
883  unsigned int dest = newe.dest;
884  VEC(constraint_edge_t,heap) *vec;
885
886  vec = graph->preds[src];
887  place = VEC_lower_bound (constraint_edge_t, vec, &newe,
888			   constraint_edge_less);
889  if (place == VEC_length (constraint_edge_t, vec)
890      || VEC_index (constraint_edge_t, vec, place)->dest != dest)
891    {
892      constraint_edge_t edge = new_constraint_edge (src, dest);
893      bitmap weightbitmap;
894
895      weightbitmap = BITMAP_ALLOC (&ptabitmap_obstack);
896      edge->weights = weightbitmap;
897      VEC_safe_insert (constraint_edge_t, heap, graph->preds[edge->src],
898		       place, edge);
899      edge = new_constraint_edge (dest, src);
900      edge->weights = weightbitmap;
901      place = VEC_lower_bound (constraint_edge_t, graph->succs[edge->src],
902			       edge, constraint_edge_less);
903      VEC_safe_insert (constraint_edge_t, heap, graph->succs[edge->src],
904		       place, edge);
905      edge_added = true;
906      return true;
907    }
908  else
909    return false;
910}
911
912
913/* Return the bitmap representing the weights of edge LOOKFOR */
914
915static bitmap
916get_graph_weights (constraint_graph_t graph, struct constraint_edge lookfor)
917{
918  constraint_edge_t edge;
919  unsigned int src = lookfor.src;
920  VEC(constraint_edge_t,heap) *vec;
921  vec = graph->preds[src];
922  edge = constraint_edge_vec_find (vec, lookfor);
923  gcc_assert (edge != NULL);
924  return edge->weights;
925}
926
927
928/* Merge GRAPH nodes FROM and TO into node TO.  */
929
930static void
931merge_graph_nodes (constraint_graph_t graph, unsigned int to,
932		   unsigned int from)
933{
934  VEC(constraint_edge_t,heap) *succvec = graph->succs[from];
935  VEC(constraint_edge_t,heap) *predvec = graph->preds[from];
936  int i;
937  constraint_edge_t c;
938
939  /* Merge all the predecessor edges.  */
940
941  for (i = 0; VEC_iterate (constraint_edge_t, predvec, i, c); i++)
942    {
943      unsigned int d = c->dest;
944      struct constraint_edge olde;
945      struct constraint_edge newe;
946      bitmap temp;
947      bitmap weights;
948      if (c->dest == from)
949	d = to;
950      newe.src = to;
951      newe.dest = d;
952      add_graph_edge (graph, newe);
953      olde.src = from;
954      olde.dest = c->dest;
955      olde.weights = NULL;
956      temp = get_graph_weights (graph, olde);
957      weights = get_graph_weights (graph, newe);
958      bitmap_ior_into (weights, temp);
959    }
960
961  /* Merge all the successor edges.  */
962  for (i = 0; VEC_iterate (constraint_edge_t, succvec, i, c); i++)
963    {
964      unsigned int d = c->dest;
965      struct constraint_edge olde;
966      struct constraint_edge newe;
967      bitmap temp;
968      bitmap weights;
969      if (c->dest == from)
970	d = to;
971      newe.src = d;
972      newe.dest = to;
973      add_graph_edge (graph, newe);
974      olde.src = c->dest;
975      olde.dest = from;
976      olde.weights = NULL;
977      temp = get_graph_weights (graph, olde);
978      weights = get_graph_weights (graph, newe);
979      bitmap_ior_into (weights, temp);
980    }
981  clear_edges_for_node (graph, from);
982}
983
984/* Add a graph edge to GRAPH, going from TO to FROM, with WEIGHT, if
985   it doesn't exist in the graph already.
986   Return false if the edge already existed, true otherwise.  */
987
988static bool
989int_add_graph_edge (constraint_graph_t graph, unsigned int to,
990		    unsigned int from, unsigned HOST_WIDE_INT weight)
991{
992  if (to == from && weight == 0)
993    {
994      return false;
995    }
996  else
997    {
998      bool r;
999      struct constraint_edge edge;
1000      edge.src = to;
1001      edge.dest = from;
1002      edge.weights = NULL;
1003      r = add_graph_edge (graph, edge);
1004      r |= !bitmap_bit_p (get_graph_weights (graph, edge), weight);
1005      bitmap_set_bit (get_graph_weights (graph, edge), weight);
1006      return r;
1007    }
1008}
1009
1010
1011/* Return true if LOOKFOR is an existing graph edge.  */
1012
1013static bool
1014valid_graph_edge (constraint_graph_t graph, struct constraint_edge lookfor)
1015{
1016  return constraint_edge_vec_find (graph->preds[lookfor.src], lookfor) != NULL;
1017}
1018
1019
1020/* Build the constraint graph.  */
1021
1022static void
1023build_constraint_graph (void)
1024{
1025  int i = 0;
1026  constraint_t c;
1027
1028  graph = xmalloc (sizeof (struct constraint_graph));
1029  graph->succs = xcalloc (VEC_length (varinfo_t, varmap),
1030			  sizeof (*graph->succs));
1031  graph->preds = xcalloc (VEC_length (varinfo_t, varmap),
1032			  sizeof (*graph->preds));
1033
1034  for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1035    {
1036      struct constraint_expr lhs = c->lhs;
1037      struct constraint_expr rhs = c->rhs;
1038      unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1039      unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1040
1041      if (lhs.type == DEREF)
1042	{
1043	  /* *x = y or *x = &y (complex) */
1044	  if (rhs.type == ADDRESSOF || rhsvar > anything_id)
1045	    insert_into_complex (lhsvar, c);
1046	}
1047      else if (rhs.type == DEREF)
1048	{
1049	  /* !special var= *y */
1050	  if (!(get_varinfo (lhsvar)->is_special_var))
1051	    insert_into_complex (rhsvar, c);
1052	}
1053      else if (rhs.type == ADDRESSOF)
1054	{
1055	  /* x = &y */
1056	  bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1057	}
1058      else if (lhsvar > anything_id)
1059	{
1060	  /* Ignore 0 weighted self edges, as they can't possibly contribute
1061	     anything */
1062	  if (lhsvar != rhsvar || rhs.offset != 0 || lhs.offset != 0)
1063	    {
1064
1065	      struct constraint_edge edge;
1066	      edge.src = lhsvar;
1067	      edge.dest = rhsvar;
1068	      /* x = y (simple) */
1069	      add_graph_edge (graph, edge);
1070	      bitmap_set_bit (get_graph_weights (graph, edge),
1071			      rhs.offset);
1072	    }
1073
1074	}
1075    }
1076}
1077
1078
1079/* Changed variables on the last iteration.  */
1080static unsigned int changed_count;
1081static sbitmap changed;
1082
1083DEF_VEC_I(unsigned);
1084DEF_VEC_ALLOC_I(unsigned,heap);
1085
1086
1087/* Strongly Connected Component visitation info.  */
1088
1089struct scc_info
1090{
1091  sbitmap visited;
1092  sbitmap in_component;
1093  int current_index;
1094  unsigned int *visited_index;
1095  VEC(unsigned,heap) *scc_stack;
1096  VEC(unsigned,heap) *unification_queue;
1097};
1098
1099
1100/* Recursive routine to find strongly connected components in GRAPH.
1101   SI is the SCC info to store the information in, and N is the id of current
1102   graph node we are processing.
1103
1104   This is Tarjan's strongly connected component finding algorithm, as
1105   modified by Nuutila to keep only non-root nodes on the stack.
1106   The algorithm can be found in "On finding the strongly connected
1107   connected components in a directed graph" by Esko Nuutila and Eljas
1108   Soisalon-Soininen, in Information Processing Letters volume 49,
1109   number 1, pages 9-14.  */
1110
1111static void
1112scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1113{
1114  constraint_edge_t c;
1115  int i;
1116
1117  gcc_assert (get_varinfo (n)->node == n);
1118  SET_BIT (si->visited, n);
1119  RESET_BIT (si->in_component, n);
1120  si->visited_index[n] = si->current_index ++;
1121
1122  /* Visit all the successors.  */
1123  for (i = 0; VEC_iterate (constraint_edge_t, graph->succs[n], i, c); i++)
1124    {
1125      /* We only want to find and collapse the zero weight edges. */
1126      if (bitmap_bit_p (c->weights, 0))
1127	{
1128	  unsigned int w = c->dest;
1129	  if (!TEST_BIT (si->visited, w))
1130	    scc_visit (graph, si, w);
1131	  if (!TEST_BIT (si->in_component, w))
1132	    {
1133	      unsigned int t = get_varinfo (w)->node;
1134	      unsigned int nnode = get_varinfo (n)->node;
1135	      if (si->visited_index[t] < si->visited_index[nnode])
1136		get_varinfo (n)->node = t;
1137	    }
1138	}
1139    }
1140
1141  /* See if any components have been identified.  */
1142  if (get_varinfo (n)->node == n)
1143    {
1144      unsigned int t = si->visited_index[n];
1145      SET_BIT (si->in_component, n);
1146      while (VEC_length (unsigned, si->scc_stack) != 0
1147	     && t < si->visited_index[VEC_last (unsigned, si->scc_stack)])
1148	{
1149	  unsigned int w = VEC_pop (unsigned, si->scc_stack);
1150	  get_varinfo (w)->node = n;
1151	  SET_BIT (si->in_component, w);
1152	  /* Mark this node for collapsing.  */
1153	  VEC_safe_push (unsigned, heap, si->unification_queue, w);
1154	}
1155    }
1156  else
1157    VEC_safe_push (unsigned, heap, si->scc_stack, n);
1158}
1159
1160
1161/* Collapse two variables into one variable.  */
1162
1163static void
1164collapse_nodes (constraint_graph_t graph, unsigned int to, unsigned int from)
1165{
1166  bitmap tosol, fromsol;
1167  struct constraint_edge edge;
1168
1169
1170  condense_varmap_nodes (to, from);
1171  tosol = get_varinfo (to)->solution;
1172  fromsol = get_varinfo (from)->solution;
1173  bitmap_ior_into (tosol, fromsol);
1174  merge_graph_nodes (graph, to, from);
1175  edge.src = to;
1176  edge.dest = to;
1177  edge.weights = NULL;
1178  if (valid_graph_edge (graph, edge))
1179    {
1180      bitmap weights = get_graph_weights (graph, edge);
1181      bitmap_clear_bit (weights, 0);
1182      if (bitmap_empty_p (weights))
1183	erase_graph_self_edge (graph, edge);
1184    }
1185  bitmap_clear (fromsol);
1186  get_varinfo (to)->address_taken |= get_varinfo (from)->address_taken;
1187  get_varinfo (to)->indirect_target |= get_varinfo (from)->indirect_target;
1188}
1189
1190
1191/* Unify nodes in GRAPH that we have found to be part of a cycle.
1192   SI is the Strongly Connected Components information structure that tells us
1193   what components to unify.
1194   UPDATE_CHANGED should be set to true if the changed sbitmap and changed
1195   count should be updated to reflect the unification.  */
1196
1197static void
1198process_unification_queue (constraint_graph_t graph, struct scc_info *si,
1199			   bool update_changed)
1200{
1201  size_t i = 0;
1202  bitmap tmp = BITMAP_ALLOC (update_changed ? &iteration_obstack : NULL);
1203  bitmap_clear (tmp);
1204
1205  /* We proceed as follows:
1206
1207     For each component in the queue (components are delineated by
1208     when current_queue_element->node != next_queue_element->node):
1209
1210        rep = representative node for component
1211
1212        For each node (tounify) to be unified in the component,
1213           merge the solution for tounify into tmp bitmap
1214
1215           clear solution for tounify
1216
1217           merge edges from tounify into rep
1218
1219	   merge complex constraints from tounify into rep
1220
1221	   update changed count to note that tounify will never change
1222	   again
1223
1224	Merge tmp into solution for rep, marking rep changed if this
1225	changed rep's solution.
1226
1227	Delete any 0 weighted self-edges we now have for rep.  */
1228  while (i != VEC_length (unsigned, si->unification_queue))
1229    {
1230      unsigned int tounify = VEC_index (unsigned, si->unification_queue, i);
1231      unsigned int n = get_varinfo (tounify)->node;
1232
1233      if (dump_file && (dump_flags & TDF_DETAILS))
1234	fprintf (dump_file, "Unifying %s to %s\n",
1235		 get_varinfo (tounify)->name,
1236		 get_varinfo (n)->name);
1237      if (update_changed)
1238	stats.unified_vars_dynamic++;
1239      else
1240	stats.unified_vars_static++;
1241      bitmap_ior_into (tmp, get_varinfo (tounify)->solution);
1242      merge_graph_nodes (graph, n, tounify);
1243      condense_varmap_nodes (n, tounify);
1244
1245      if (update_changed && TEST_BIT (changed, tounify))
1246	{
1247	  RESET_BIT (changed, tounify);
1248	  if (!TEST_BIT (changed, n))
1249	    SET_BIT (changed, n);
1250	  else
1251	    {
1252	      gcc_assert (changed_count > 0);
1253	      changed_count--;
1254	    }
1255	}
1256
1257      bitmap_clear (get_varinfo (tounify)->solution);
1258      ++i;
1259
1260      /* If we've either finished processing the entire queue, or
1261	 finished processing all nodes for component n, update the solution for
1262	 n.  */
1263      if (i == VEC_length (unsigned, si->unification_queue)
1264	  || get_varinfo (VEC_index (unsigned, si->unification_queue, i))->node != n)
1265	{
1266	  struct constraint_edge edge;
1267
1268	  /* If the solution changes because of the merging, we need to mark
1269	     the variable as changed.  */
1270	  if (bitmap_ior_into (get_varinfo (n)->solution, tmp))
1271	    {
1272	      if (update_changed && !TEST_BIT (changed, n))
1273		{
1274		  SET_BIT (changed, n);
1275		  changed_count++;
1276		}
1277	    }
1278	  bitmap_clear (tmp);
1279	  edge.src = n;
1280	  edge.dest = n;
1281	  edge.weights = NULL;
1282	  if (valid_graph_edge (graph, edge))
1283	    {
1284	      bitmap weights = get_graph_weights (graph, edge);
1285	      bitmap_clear_bit (weights, 0);
1286	      if (bitmap_empty_p (weights))
1287		erase_graph_self_edge (graph, edge);
1288	    }
1289	}
1290    }
1291  BITMAP_FREE (tmp);
1292}
1293
1294
1295/* Information needed to compute the topological ordering of a graph.  */
1296
1297struct topo_info
1298{
1299  /* sbitmap of visited nodes.  */
1300  sbitmap visited;
1301  /* Array that stores the topological order of the graph, *in
1302     reverse*.  */
1303  VEC(unsigned,heap) *topo_order;
1304};
1305
1306
1307/* Initialize and return a topological info structure.  */
1308
1309static struct topo_info *
1310init_topo_info (void)
1311{
1312  size_t size = VEC_length (varinfo_t, varmap);
1313  struct topo_info *ti = xmalloc (sizeof (struct topo_info));
1314  ti->visited = sbitmap_alloc (size);
1315  sbitmap_zero (ti->visited);
1316  ti->topo_order = VEC_alloc (unsigned, heap, 1);
1317  return ti;
1318}
1319
1320
1321/* Free the topological sort info pointed to by TI.  */
1322
1323static void
1324free_topo_info (struct topo_info *ti)
1325{
1326  sbitmap_free (ti->visited);
1327  VEC_free (unsigned, heap, ti->topo_order);
1328  free (ti);
1329}
1330
1331/* Visit the graph in topological order, and store the order in the
1332   topo_info structure.  */
1333
1334static void
1335topo_visit (constraint_graph_t graph, struct topo_info *ti,
1336	    unsigned int n)
1337{
1338  VEC(constraint_edge_t,heap) *succs = graph->succs[n];
1339  constraint_edge_t c;
1340  int i;
1341  SET_BIT (ti->visited, n);
1342  for (i = 0; VEC_iterate (constraint_edge_t, succs, i, c); i++)
1343    {
1344      if (!TEST_BIT (ti->visited, c->dest))
1345	topo_visit (graph, ti, c->dest);
1346    }
1347  VEC_safe_push (unsigned, heap, ti->topo_order, n);
1348}
1349
1350/* Return true if variable N + OFFSET is a legal field of N.  */
1351
1352static bool
1353type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1354{
1355  varinfo_t ninfo = get_varinfo (n);
1356
1357  /* For things we've globbed to single variables, any offset into the
1358     variable acts like the entire variable, so that it becomes offset
1359     0.  */
1360  if (ninfo->is_special_var
1361      || ninfo->is_artificial_var
1362      || ninfo->is_unknown_size_var)
1363    {
1364      *offset = 0;
1365      return true;
1366    }
1367  return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1368}
1369
1370/* Process a constraint C that represents *x = &y.  */
1371
1372static void
1373do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1374		  constraint_t c, bitmap delta)
1375{
1376  unsigned int rhs = c->rhs.var;
1377  unsigned int j;
1378  bitmap_iterator bi;
1379
1380  /* For each member j of Delta (Sol(x)), add x to Sol(j)  */
1381  EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1382    {
1383      unsigned HOST_WIDE_INT offset = c->lhs.offset;
1384      if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1385	{
1386	/* *x != NULL && *x != ANYTHING*/
1387	  varinfo_t v;
1388	  unsigned int t;
1389	  bitmap sol;
1390	  unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1391
1392	  v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1393	  if (!v)
1394	    continue;
1395	  t = v->node;
1396	  sol = get_varinfo (t)->solution;
1397	  if (!bitmap_bit_p (sol, rhs))
1398	    {
1399	      bitmap_set_bit (sol, rhs);
1400	      if (!TEST_BIT (changed, t))
1401		{
1402		  SET_BIT (changed, t);
1403		  changed_count++;
1404		}
1405	    }
1406	}
1407      else if (dump_file && !(get_varinfo (j)->is_special_var))
1408	fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1409
1410    }
1411}
1412
1413/* Process a constraint C that represents x = *y, using DELTA as the
1414   starting solution.  */
1415
1416static void
1417do_sd_constraint (constraint_graph_t graph, constraint_t c,
1418		  bitmap delta)
1419{
1420  unsigned int lhs = get_varinfo (c->lhs.var)->node;
1421  bool flag = false;
1422  bitmap sol = get_varinfo (lhs)->solution;
1423  unsigned int j;
1424  bitmap_iterator bi;
1425
1426  /* For each variable j in delta (Sol(y)), add
1427     an edge in the graph from j to x, and union Sol(j) into Sol(x).  */
1428  EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1429    {
1430      unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1431      if (type_safe (j, &roffset))
1432	{
1433	  varinfo_t v;
1434	  unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1435	  unsigned int t;
1436
1437	  v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1438	  if (!v)
1439	    continue;
1440	  t = v->node;
1441	  if (int_add_graph_edge (graph, lhs, t, 0))
1442	    flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1443	}
1444      else if (dump_file && !(get_varinfo (j)->is_special_var))
1445	fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1446
1447    }
1448
1449  /* If the LHS solution changed, mark the var as changed.  */
1450  if (flag)
1451    {
1452      get_varinfo (lhs)->solution = sol;
1453      if (!TEST_BIT (changed, lhs))
1454	{
1455	  SET_BIT (changed, lhs);
1456	  changed_count++;
1457	}
1458    }
1459}
1460
1461/* Process a constraint C that represents *x = y.  */
1462
1463static void
1464do_ds_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1465{
1466  unsigned int rhs = get_varinfo (c->rhs.var)->node;
1467  unsigned HOST_WIDE_INT roff = c->rhs.offset;
1468  bitmap sol = get_varinfo (rhs)->solution;
1469  unsigned int j;
1470  bitmap_iterator bi;
1471
1472  /* For each member j of delta (Sol(x)), add an edge from y to j and
1473     union Sol(y) into Sol(j) */
1474  EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1475    {
1476      unsigned HOST_WIDE_INT loff = c->lhs.offset;
1477      if (type_safe (j, &loff) && !(get_varinfo(j)->is_special_var))
1478	{
1479	  varinfo_t v;
1480	  unsigned int t;
1481	  unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1482
1483	  v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1484	  if (!v)
1485	    continue;
1486	  t = v->node;
1487	  if (int_add_graph_edge (graph, t, rhs, roff))
1488	    {
1489	      bitmap tmp = get_varinfo (t)->solution;
1490	      if (set_union_with_increment (tmp, sol, roff))
1491		{
1492		  get_varinfo (t)->solution = tmp;
1493		  if (t == rhs)
1494		    {
1495		      sol = get_varinfo (rhs)->solution;
1496		    }
1497		  if (!TEST_BIT (changed, t))
1498		    {
1499		      SET_BIT (changed, t);
1500		      changed_count++;
1501		    }
1502		}
1503	    }
1504	}
1505      else if (dump_file && !(get_varinfo (j)->is_special_var))
1506	fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1507    }
1508}
1509
1510/* Handle a non-simple (simple meaning requires no iteration), non-copy
1511   constraint (IE *x = &y, x = *y, and *x = y).  */
1512
1513static void
1514do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1515{
1516  if (c->lhs.type == DEREF)
1517    {
1518      if (c->rhs.type == ADDRESSOF)
1519	{
1520	  /* *x = &y */
1521	  do_da_constraint (graph, c, delta);
1522	}
1523      else
1524	{
1525	  /* *x = y */
1526	  do_ds_constraint (graph, c, delta);
1527	}
1528    }
1529  else
1530    {
1531      /* x = *y */
1532      if (!(get_varinfo (c->lhs.var)->is_special_var))
1533	do_sd_constraint (graph, c, delta);
1534    }
1535}
1536
1537/* Initialize and return a new SCC info structure.  */
1538
1539static struct scc_info *
1540init_scc_info (void)
1541{
1542  struct scc_info *si = xmalloc (sizeof (struct scc_info));
1543  size_t size = VEC_length (varinfo_t, varmap);
1544
1545  si->current_index = 0;
1546  si->visited = sbitmap_alloc (size);
1547  sbitmap_zero (si->visited);
1548  si->in_component = sbitmap_alloc (size);
1549  sbitmap_ones (si->in_component);
1550  si->visited_index = xcalloc (sizeof (unsigned int), size + 1);
1551  si->scc_stack = VEC_alloc (unsigned, heap, 1);
1552  si->unification_queue = VEC_alloc (unsigned, heap, 1);
1553  return si;
1554}
1555
1556/* Free an SCC info structure pointed to by SI */
1557
1558static void
1559free_scc_info (struct scc_info *si)
1560{
1561  sbitmap_free (si->visited);
1562  sbitmap_free (si->in_component);
1563  free (si->visited_index);
1564  VEC_free (unsigned, heap, si->scc_stack);
1565  VEC_free (unsigned, heap, si->unification_queue);
1566  free(si);
1567}
1568
1569
1570/* Find cycles in GRAPH that occur, using strongly connected components, and
1571   collapse the cycles into a single representative node.  if UPDATE_CHANGED
1572   is true, then update the changed sbitmap to note those nodes whose
1573   solutions have changed as a result of collapsing.  */
1574
1575static void
1576find_and_collapse_graph_cycles (constraint_graph_t graph, bool update_changed)
1577{
1578  unsigned int i;
1579  unsigned int size = VEC_length (varinfo_t, varmap);
1580  struct scc_info *si = init_scc_info ();
1581
1582  for (i = 0; i != size; ++i)
1583    if (!TEST_BIT (si->visited, i) && get_varinfo (i)->node == i)
1584      scc_visit (graph, si, i);
1585  process_unification_queue (graph, si, update_changed);
1586  free_scc_info (si);
1587}
1588
1589/* Compute a topological ordering for GRAPH, and store the result in the
1590   topo_info structure TI.  */
1591
1592static void
1593compute_topo_order (constraint_graph_t graph,
1594		    struct topo_info *ti)
1595{
1596  unsigned int i;
1597  unsigned int size = VEC_length (varinfo_t, varmap);
1598
1599  for (i = 0; i != size; ++i)
1600    if (!TEST_BIT (ti->visited, i) && get_varinfo (i)->node == i)
1601      topo_visit (graph, ti, i);
1602}
1603
1604/* Return true if bitmap B is empty, or a bitmap other than bit 0 is set. */
1605
1606static bool
1607bitmap_other_than_zero_bit_set (bitmap b)
1608{
1609  unsigned int i;
1610  bitmap_iterator bi;
1611
1612  if (bitmap_empty_p (b))
1613    return false;
1614  EXECUTE_IF_SET_IN_BITMAP (b, 1, i, bi)
1615    return true;
1616  return false;
1617}
1618
1619/* Perform offline variable substitution.
1620
1621   This is a linear time way of identifying variables that must have
1622   equivalent points-to sets, including those caused by static cycles,
1623   and single entry subgraphs, in the constraint graph.
1624
1625   The technique is described in "Off-line variable substitution for
1626   scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1627   in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56.  */
1628
1629static void
1630perform_var_substitution (constraint_graph_t graph)
1631{
1632  struct topo_info *ti = init_topo_info ();
1633
1634  /* Compute the topological ordering of the graph, then visit each
1635     node in topological order.  */
1636  compute_topo_order (graph, ti);
1637
1638  while (VEC_length (unsigned, ti->topo_order) != 0)
1639    {
1640      unsigned int i = VEC_pop (unsigned, ti->topo_order);
1641      unsigned int pred;
1642      varinfo_t vi = get_varinfo (i);
1643      bool okay_to_elim = false;
1644      unsigned int root = VEC_length (varinfo_t, varmap);
1645      VEC(constraint_edge_t,heap) *predvec = graph->preds[i];
1646      constraint_edge_t ce;
1647      bitmap tmp;
1648
1649      /* We can't eliminate things whose address is taken, or which is
1650	 the target of a dereference.  */
1651      if (vi->address_taken || vi->indirect_target)
1652	continue;
1653
1654      /* See if all predecessors of I are ripe for elimination */
1655      for (pred = 0; VEC_iterate (constraint_edge_t, predvec, pred, ce); pred++)
1656	{
1657	  bitmap weight;
1658	  unsigned int w;
1659	  weight = get_graph_weights (graph, *ce);
1660
1661	  /* We can't eliminate variables that have nonzero weighted
1662	     edges between them.  */
1663	  if (bitmap_other_than_zero_bit_set (weight))
1664	    {
1665	      okay_to_elim = false;
1666	      break;
1667	    }
1668	  w = get_varinfo (ce->dest)->node;
1669
1670	  /* We can't eliminate the node if one of the predecessors is
1671	     part of a different strongly connected component.  */
1672	  if (!okay_to_elim)
1673	    {
1674	      root = w;
1675	      okay_to_elim = true;
1676	    }
1677	  else if (w != root)
1678	    {
1679	      okay_to_elim = false;
1680	      break;
1681	    }
1682
1683	  /* Theorem 4 in Rountev and Chandra: If i is a direct node,
1684	     then Solution(i) is a subset of Solution (w), where w is a
1685	     predecessor in the graph.
1686	     Corollary: If all predecessors of i have the same
1687	     points-to set, then i has that same points-to set as
1688	     those predecessors.  */
1689	  tmp = BITMAP_ALLOC (NULL);
1690	  bitmap_and_compl (tmp, get_varinfo (i)->solution,
1691			    get_varinfo (w)->solution);
1692	  if (!bitmap_empty_p (tmp))
1693	    {
1694	      okay_to_elim = false;
1695	      BITMAP_FREE (tmp);
1696	      break;
1697	    }
1698	  BITMAP_FREE (tmp);
1699	}
1700
1701      /* See if the root is different than the original node.
1702	 If so, we've found an equivalence.  */
1703      if (root != get_varinfo (i)->node && okay_to_elim)
1704	{
1705	  /* Found an equivalence */
1706	  get_varinfo (i)->node = root;
1707	  collapse_nodes (graph, root, i);
1708	  if (dump_file && (dump_flags & TDF_DETAILS))
1709	    fprintf (dump_file, "Collapsing %s into %s\n",
1710		     get_varinfo (i)->name,
1711		     get_varinfo (root)->name);
1712	  stats.collapsed_vars++;
1713	}
1714    }
1715
1716  free_topo_info (ti);
1717}
1718
1719
1720/* Solve the constraint graph GRAPH using our worklist solver.
1721   This is based on the PW* family of solvers from the "Efficient Field
1722   Sensitive Pointer Analysis for C" paper.
1723   It works by iterating over all the graph nodes, processing the complex
1724   constraints and propagating the copy constraints, until everything stops
1725   changed.  This corresponds to steps 6-8 in the solving list given above.  */
1726
1727static void
1728solve_graph (constraint_graph_t graph)
1729{
1730  unsigned int size = VEC_length (varinfo_t, varmap);
1731  unsigned int i;
1732
1733  changed_count = size;
1734  changed = sbitmap_alloc (size);
1735  sbitmap_ones (changed);
1736
1737  /* The already collapsed/unreachable nodes will never change, so we
1738     need to  account for them in changed_count.  */
1739  for (i = 0; i < size; i++)
1740    if (get_varinfo (i)->node != i)
1741      changed_count--;
1742
1743  while (changed_count > 0)
1744    {
1745      unsigned int i;
1746      struct topo_info *ti = init_topo_info ();
1747      stats.iterations++;
1748
1749      bitmap_obstack_initialize (&iteration_obstack);
1750
1751      if (edge_added)
1752	{
1753	  /* We already did cycle elimination once, when we did
1754	     variable substitution, so we don't need it again for the
1755	     first iteration.  */
1756	  if (stats.iterations > 1)
1757	    find_and_collapse_graph_cycles (graph, true);
1758
1759	  edge_added = false;
1760	}
1761
1762      compute_topo_order (graph, ti);
1763
1764      while (VEC_length (unsigned, ti->topo_order) != 0)
1765	{
1766	  i = VEC_pop (unsigned, ti->topo_order);
1767	  gcc_assert (get_varinfo (i)->node == i);
1768
1769	  /* If the node has changed, we need to process the
1770	     complex constraints and outgoing edges again.  */
1771	  if (TEST_BIT (changed, i))
1772	    {
1773	      unsigned int j;
1774	      constraint_t c;
1775	      constraint_edge_t e;
1776	      bitmap solution;
1777	      VEC(constraint_t,heap) *complex = get_varinfo (i)->complex;
1778	      VEC(constraint_edge_t,heap) *succs;
1779
1780	      RESET_BIT (changed, i);
1781	      changed_count--;
1782
1783	      /* Process the complex constraints */
1784	      solution = get_varinfo (i)->solution;
1785	      for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
1786		do_complex_constraint (graph, c, solution);
1787
1788	      /* Propagate solution to all successors.  */
1789	      succs = graph->succs[i];
1790	      for (j = 0; VEC_iterate (constraint_edge_t, succs, j, e); j++)
1791		{
1792		  bitmap tmp = get_varinfo (e->dest)->solution;
1793		  bool flag = false;
1794		  unsigned int k;
1795		  bitmap weights = e->weights;
1796		  bitmap_iterator bi;
1797
1798		  gcc_assert (!bitmap_empty_p (weights));
1799		  EXECUTE_IF_SET_IN_BITMAP (weights, 0, k, bi)
1800		    flag |= set_union_with_increment (tmp, solution, k);
1801
1802		  if (flag)
1803		    {
1804		      get_varinfo (e->dest)->solution = tmp;
1805		      if (!TEST_BIT (changed, e->dest))
1806			{
1807			  SET_BIT (changed, e->dest);
1808			  changed_count++;
1809			}
1810		    }
1811		}
1812	    }
1813	}
1814      free_topo_info (ti);
1815      bitmap_obstack_release (&iteration_obstack);
1816    }
1817
1818  sbitmap_free (changed);
1819}
1820
1821
1822/* CONSTRAINT AND VARIABLE GENERATION FUNCTIONS */
1823
1824/* Map from trees to variable ids.  */
1825static htab_t id_for_tree;
1826
1827typedef struct tree_id
1828{
1829  tree t;
1830  unsigned int id;
1831} *tree_id_t;
1832
1833/* Hash a tree id structure.  */
1834
1835static hashval_t
1836tree_id_hash (const void *p)
1837{
1838  const tree_id_t ta = (tree_id_t) p;
1839  return htab_hash_pointer (ta->t);
1840}
1841
1842/* Return true if the tree in P1 and the tree in P2 are the same.  */
1843
1844static int
1845tree_id_eq (const void *p1, const void *p2)
1846{
1847  const tree_id_t ta1 = (tree_id_t) p1;
1848  const tree_id_t ta2 = (tree_id_t) p2;
1849  return ta1->t == ta2->t;
1850}
1851
1852/* Insert ID as the variable id for tree T in the hashtable.  */
1853
1854static void
1855insert_id_for_tree (tree t, int id)
1856{
1857  void **slot;
1858  struct tree_id finder;
1859  tree_id_t new_pair;
1860
1861  finder.t = t;
1862  slot = htab_find_slot (id_for_tree, &finder, INSERT);
1863  gcc_assert (*slot == NULL);
1864  new_pair = xmalloc (sizeof (struct tree_id));
1865  new_pair->t = t;
1866  new_pair->id = id;
1867  *slot = (void *)new_pair;
1868}
1869
1870/* Find the variable id for tree T in ID_FOR_TREE.  If T does not
1871   exist in the hash table, return false, otherwise, return true and
1872   set *ID to the id we found.  */
1873
1874static bool
1875lookup_id_for_tree (tree t, unsigned int *id)
1876{
1877  tree_id_t pair;
1878  struct tree_id finder;
1879
1880  finder.t = t;
1881  pair = htab_find (id_for_tree,  &finder);
1882  if (pair == NULL)
1883    return false;
1884  *id = pair->id;
1885  return true;
1886}
1887
1888/* Return a printable name for DECL  */
1889
1890static const char *
1891alias_get_name (tree decl)
1892{
1893  const char *res = get_name (decl);
1894  char *temp;
1895  int num_printed = 0;
1896
1897  if (res != NULL)
1898    return res;
1899
1900  res = "NULL";
1901  if (TREE_CODE (decl) == SSA_NAME)
1902    {
1903      num_printed = asprintf (&temp, "%s_%u",
1904			      alias_get_name (SSA_NAME_VAR (decl)),
1905			      SSA_NAME_VERSION (decl));
1906    }
1907  else if (DECL_P (decl))
1908    {
1909      num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
1910    }
1911  if (num_printed > 0)
1912    {
1913      res = ggc_strdup (temp);
1914      free (temp);
1915    }
1916  return res;
1917}
1918
1919/* Find the variable id for tree T in the hashtable.
1920   If T doesn't exist in the hash table, create an entry for it.  */
1921
1922static unsigned int
1923get_id_for_tree (tree t)
1924{
1925  tree_id_t pair;
1926  struct tree_id finder;
1927
1928  finder.t = t;
1929  pair = htab_find (id_for_tree,  &finder);
1930  if (pair == NULL)
1931    return create_variable_info_for (t, alias_get_name (t));
1932
1933  return pair->id;
1934}
1935
1936/* Get a constraint expression from an SSA_VAR_P node.  */
1937
1938static struct constraint_expr
1939get_constraint_exp_from_ssa_var (tree t)
1940{
1941  struct constraint_expr cexpr;
1942
1943  gcc_assert (SSA_VAR_P (t) || DECL_P (t));
1944
1945  /* For parameters, get at the points-to set for the actual parm
1946     decl.  */
1947  if (TREE_CODE (t) == SSA_NAME
1948      && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
1949      && default_def (SSA_NAME_VAR (t)) == t)
1950    return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
1951
1952  cexpr.type = SCALAR;
1953
1954  cexpr.var = get_id_for_tree (t);
1955  /* If we determine the result is "anything", and we know this is readonly,
1956     say it points to readonly memory instead.  */
1957  if (cexpr.var == anything_id && TREE_READONLY (t))
1958    {
1959      cexpr.type = ADDRESSOF;
1960      cexpr.var = readonly_id;
1961    }
1962
1963  cexpr.offset = 0;
1964  return cexpr;
1965}
1966
1967/* Process a completed constraint T, and add it to the constraint
1968   list.  */
1969
1970static void
1971process_constraint (constraint_t t)
1972{
1973  struct constraint_expr rhs = t->rhs;
1974  struct constraint_expr lhs = t->lhs;
1975
1976  gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
1977  gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
1978
1979  /* ANYTHING == ANYTHING is pointless.  */
1980  if (lhs.var == anything_id && rhs.var == anything_id)
1981    return;
1982
1983  /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
1984  else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
1985    {
1986      rhs = t->lhs;
1987      t->lhs = t->rhs;
1988      t->rhs = rhs;
1989      process_constraint (t);
1990    }
1991  /* This can happen in our IR with things like n->a = *p */
1992  else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
1993    {
1994      /* Split into tmp = *rhs, *lhs = tmp */
1995      tree rhsdecl = get_varinfo (rhs.var)->decl;
1996      tree pointertype = TREE_TYPE (rhsdecl);
1997      tree pointedtotype = TREE_TYPE (pointertype);
1998      tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
1999      struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2000
2001      /* If this is an aggregate of known size, we should have passed
2002	 this off to do_structure_copy, and it should have broken it
2003	 up.  */
2004      gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2005		  || get_varinfo (rhs.var)->is_unknown_size_var);
2006
2007      process_constraint (new_constraint (tmplhs, rhs));
2008      process_constraint (new_constraint (lhs, tmplhs));
2009    }
2010  else if (rhs.type == ADDRESSOF)
2011    {
2012      varinfo_t vi;
2013      gcc_assert (rhs.offset == 0);
2014
2015      for (vi = get_varinfo (rhs.var); vi != NULL; vi = vi->next)
2016	vi->address_taken = true;
2017
2018      VEC_safe_push (constraint_t, heap, constraints, t);
2019    }
2020  else
2021    {
2022      if (lhs.type != DEREF && rhs.type == DEREF)
2023	get_varinfo (lhs.var)->indirect_target = true;
2024      VEC_safe_push (constraint_t, heap, constraints, t);
2025    }
2026}
2027
2028
2029/* Return the position, in bits, of FIELD_DECL from the beginning of its
2030   structure.  */
2031
2032static unsigned HOST_WIDE_INT
2033bitpos_of_field (const tree fdecl)
2034{
2035
2036  if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2037      || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2038    return -1;
2039
2040  return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2041         + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2042}
2043
2044
2045/* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2046   overlaps with a field at [FIELDPOS, FIELDSIZE] */
2047
2048static bool
2049offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2050			     const unsigned HOST_WIDE_INT fieldsize,
2051			     const unsigned HOST_WIDE_INT accesspos,
2052			     const unsigned HOST_WIDE_INT accesssize)
2053{
2054  if (fieldpos == accesspos && fieldsize == accesssize)
2055    return true;
2056  if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2057    return true;
2058  if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2059    return true;
2060
2061  return false;
2062}
2063
2064/* Given a COMPONENT_REF T, return the constraint_expr for it.  */
2065
2066static struct constraint_expr
2067get_constraint_for_component_ref (tree t, bool *need_anyoffset)
2068{
2069  struct constraint_expr result;
2070  HOST_WIDE_INT bitsize = -1;
2071  HOST_WIDE_INT bitpos;
2072  tree offset = NULL_TREE;
2073  enum machine_mode mode;
2074  int unsignedp;
2075  int volatilep;
2076  tree forzero;
2077
2078  result.offset = 0;
2079  result.type = SCALAR;
2080  result.var = 0;
2081
2082  /* Some people like to do cute things like take the address of
2083     &0->a.b */
2084  forzero = t;
2085  while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2086      forzero = TREE_OPERAND (forzero, 0);
2087
2088  if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2089    {
2090      result.offset = 0;
2091      result.var = integer_id;
2092      result.type = SCALAR;
2093      return result;
2094    }
2095
2096  t = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2097			   &unsignedp, &volatilep, false);
2098  result = get_constraint_for (t, need_anyoffset);
2099
2100  /* This can also happen due to weird offsetof type macros.  */
2101  if (TREE_CODE (t) != ADDR_EXPR && result.type == ADDRESSOF)
2102    result.type = SCALAR;
2103
2104  /* If we know where this goes, then yay. Otherwise, booo. */
2105
2106  if (offset == NULL && bitsize != -1)
2107    {
2108      result.offset = bitpos;
2109    }
2110  else if (need_anyoffset)
2111    {
2112      result.offset = 0;
2113      *need_anyoffset = true;
2114    }
2115  else
2116    {
2117      result.var = anything_id;
2118      result.offset = 0;
2119    }
2120
2121  if (result.type == SCALAR)
2122    {
2123      /* In languages like C, you can access one past the end of an
2124	 array.  You aren't allowed to dereference it, so we can
2125	 ignore this constraint. When we handle pointer subtraction,
2126	 we may have to do something cute here.  */
2127
2128      if (result.offset < get_varinfo (result.var)->fullsize
2129	  && bitsize != 0)
2130	{
2131	  /* It's also not true that the constraint will actually start at the
2132	     right offset, it may start in some padding.  We only care about
2133	     setting the constraint to the first actual field it touches, so
2134	     walk to find it.  */
2135	  varinfo_t curr;
2136	  for (curr = get_varinfo (result.var); curr; curr = curr->next)
2137	    {
2138	      if (offset_overlaps_with_access (curr->offset, curr->size,
2139					       result.offset, bitsize))
2140		{
2141		  result.var = curr->id;
2142		  break;
2143
2144		}
2145	    }
2146	  /* assert that we found *some* field there. The user couldn't be
2147	     accessing *only* padding.  */
2148
2149	  gcc_assert (curr);
2150	}
2151      else if (bitsize == 0)
2152	{
2153	  if (dump_file && (dump_flags & TDF_DETAILS))
2154	    fprintf (dump_file, "Access to zero-sized part of variable,"
2155		     "ignoring\n");
2156	}
2157      else
2158	if (dump_file && (dump_flags & TDF_DETAILS))
2159	  fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2160
2161      result.offset = 0;
2162    }
2163
2164  return result;
2165}
2166
2167
2168/* Dereference the constraint expression CONS, and return the result.
2169   DEREF (ADDRESSOF) = SCALAR
2170   DEREF (SCALAR) = DEREF
2171   DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2172   This is needed so that we can handle dereferencing DEREF constraints.  */
2173
2174static struct constraint_expr
2175do_deref (struct constraint_expr cons)
2176{
2177  if (cons.type == SCALAR)
2178    {
2179      cons.type = DEREF;
2180      return cons;
2181    }
2182  else if (cons.type == ADDRESSOF)
2183    {
2184      cons.type = SCALAR;
2185      return cons;
2186    }
2187  else if (cons.type == DEREF)
2188    {
2189      tree tmpvar = create_tmp_var_raw (ptr_type_node, "derefmp");
2190      struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2191      process_constraint (new_constraint (tmplhs, cons));
2192      cons.var = tmplhs.var;
2193      return cons;
2194    }
2195  gcc_unreachable ();
2196}
2197
2198
2199/* Given a tree T, return the constraint expression for it.  */
2200
2201static struct constraint_expr
2202get_constraint_for (tree t, bool *need_anyoffset)
2203{
2204  struct constraint_expr temp;
2205
2206  /* x = integer is all glommed to a single variable, which doesn't
2207     point to anything by itself.  That is, of course, unless it is an
2208     integer constant being treated as a pointer, in which case, we
2209     will return that this is really the addressof anything.  This
2210     happens below, since it will fall into the default case. The only
2211     case we know something about an integer treated like a pointer is
2212     when it is the NULL pointer, and then we just say it points to
2213     NULL.  */
2214  if (TREE_CODE (t) == INTEGER_CST
2215      && !POINTER_TYPE_P (TREE_TYPE (t)))
2216    {
2217      temp.var = integer_id;
2218      temp.type = SCALAR;
2219      temp.offset = 0;
2220      return temp;
2221    }
2222  else if (TREE_CODE (t) == INTEGER_CST
2223	   && integer_zerop (t))
2224    {
2225      temp.var = nothing_id;
2226      temp.type = ADDRESSOF;
2227      temp.offset = 0;
2228      return temp;
2229    }
2230
2231  switch (TREE_CODE_CLASS (TREE_CODE (t)))
2232    {
2233    case tcc_expression:
2234      {
2235	switch (TREE_CODE (t))
2236	  {
2237	  case ADDR_EXPR:
2238	    {
2239	      temp = get_constraint_for (TREE_OPERAND (t, 0), need_anyoffset);
2240	       if (temp.type == DEREF)
2241		 temp.type = SCALAR;
2242	       else
2243		 temp.type = ADDRESSOF;
2244	      return temp;
2245	    }
2246	    break;
2247	  case CALL_EXPR:
2248
2249	    /* XXX: In interprocedural mode, if we didn't have the
2250	       body, we would need to do *each pointer argument =
2251	       &ANYTHING added.  */
2252	    if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2253	      {
2254		varinfo_t vi;
2255		tree heapvar = heapvar_lookup (t);
2256
2257		if (heapvar == NULL)
2258		  {
2259		    heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2260		    DECL_EXTERNAL (heapvar) = 1;
2261		    add_referenced_tmp_var (heapvar);
2262		    heapvar_insert (t, heapvar);
2263		  }
2264
2265		temp.var = create_variable_info_for (heapvar,
2266						     alias_get_name (heapvar));
2267
2268		vi = get_varinfo (temp.var);
2269		vi->is_artificial_var = 1;
2270		vi->is_heap_var = 1;
2271		temp.type = ADDRESSOF;
2272		temp.offset = 0;
2273		return temp;
2274	      }
2275	    /* FALLTHRU */
2276	  default:
2277	    {
2278	      temp.type = ADDRESSOF;
2279	      temp.var = anything_id;
2280	      temp.offset = 0;
2281	      return temp;
2282	    }
2283	  }
2284      }
2285    case tcc_reference:
2286      {
2287	switch (TREE_CODE (t))
2288	  {
2289	  case INDIRECT_REF:
2290	    {
2291	      temp = get_constraint_for (TREE_OPERAND (t, 0), need_anyoffset);
2292	      temp = do_deref (temp);
2293	      return temp;
2294	    }
2295	  case ARRAY_REF:
2296	  case ARRAY_RANGE_REF:
2297	  case COMPONENT_REF:
2298	    temp = get_constraint_for_component_ref (t, need_anyoffset);
2299	    return temp;
2300	  default:
2301	    {
2302	      temp.type = ADDRESSOF;
2303	      temp.var = anything_id;
2304	      temp.offset = 0;
2305	      return temp;
2306	    }
2307	  }
2308      }
2309    case tcc_unary:
2310      {
2311	switch (TREE_CODE (t))
2312	  {
2313	  case NOP_EXPR:
2314	  case CONVERT_EXPR:
2315	  case NON_LVALUE_EXPR:
2316	    {
2317	      tree op = TREE_OPERAND (t, 0);
2318
2319	      /* Cast from non-pointer to pointers are bad news for us.
2320		 Anything else, we see through */
2321	      if (!(POINTER_TYPE_P (TREE_TYPE (t))
2322		    && ! POINTER_TYPE_P (TREE_TYPE (op))))
2323		return get_constraint_for (op, need_anyoffset);
2324
2325	      /* FALLTHRU  */
2326	    }
2327	  default:
2328	    {
2329	      temp.type = ADDRESSOF;
2330	      temp.var = anything_id;
2331	      temp.offset = 0;
2332	      return temp;
2333	    }
2334	  }
2335      }
2336    case tcc_exceptional:
2337      {
2338	switch (TREE_CODE (t))
2339	  {
2340	  case PHI_NODE:
2341	    return get_constraint_for (PHI_RESULT (t), need_anyoffset);
2342	  case SSA_NAME:
2343	    return get_constraint_exp_from_ssa_var (t);
2344	  default:
2345	    {
2346	      temp.type = ADDRESSOF;
2347	      temp.var = anything_id;
2348	      temp.offset = 0;
2349	      return temp;
2350	    }
2351	  }
2352      }
2353    case tcc_declaration:
2354      return get_constraint_exp_from_ssa_var (t);
2355    default:
2356      {
2357	temp.type = ADDRESSOF;
2358	temp.var = anything_id;
2359	temp.offset = 0;
2360	return temp;
2361      }
2362    }
2363}
2364
2365
2366/* Handle the structure copy case where we have a simple structure copy
2367   between LHS and RHS that is of SIZE (in bits)
2368
2369   For each field of the lhs variable (lhsfield)
2370     For each field of the rhs variable at lhsfield.offset (rhsfield)
2371       add the constraint lhsfield = rhsfield
2372
2373   If we fail due to some kind of type unsafety or other thing we
2374   can't handle, return false.  We expect the caller to collapse the
2375   variable in that case.  */
2376
2377static bool
2378do_simple_structure_copy (const struct constraint_expr lhs,
2379			  const struct constraint_expr rhs,
2380			  const unsigned HOST_WIDE_INT size)
2381{
2382  varinfo_t p = get_varinfo (lhs.var);
2383  unsigned HOST_WIDE_INT pstart, last;
2384  pstart = p->offset;
2385  last = p->offset + size;
2386  for (; p && p->offset < last; p = p->next)
2387    {
2388      varinfo_t q;
2389      struct constraint_expr templhs = lhs;
2390      struct constraint_expr temprhs = rhs;
2391      unsigned HOST_WIDE_INT fieldoffset;
2392
2393      templhs.var = p->id;
2394      q = get_varinfo (temprhs.var);
2395      fieldoffset = p->offset - pstart;
2396      q = first_vi_for_offset (q, q->offset + fieldoffset);
2397      if (!q)
2398	return false;
2399      temprhs.var = q->id;
2400      process_constraint (new_constraint (templhs, temprhs));
2401    }
2402  return true;
2403}
2404
2405
2406/* Handle the structure copy case where we have a  structure copy between a
2407   aggregate on the LHS and a dereference of a pointer on the RHS
2408   that is of SIZE (in bits)
2409
2410   For each field of the lhs variable (lhsfield)
2411       rhs.offset = lhsfield->offset
2412       add the constraint lhsfield = rhs
2413*/
2414
2415static void
2416do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2417			     const struct constraint_expr rhs,
2418			     const unsigned HOST_WIDE_INT size)
2419{
2420  varinfo_t p = get_varinfo (lhs.var);
2421  unsigned HOST_WIDE_INT pstart,last;
2422  pstart = p->offset;
2423  last = p->offset + size;
2424
2425  for (; p && p->offset < last; p = p->next)
2426    {
2427      varinfo_t q;
2428      struct constraint_expr templhs = lhs;
2429      struct constraint_expr temprhs = rhs;
2430      unsigned HOST_WIDE_INT fieldoffset;
2431
2432
2433      if (templhs.type == SCALAR)
2434	templhs.var = p->id;
2435      else
2436	templhs.offset = p->offset;
2437
2438      q = get_varinfo (temprhs.var);
2439      fieldoffset = p->offset - pstart;
2440      temprhs.offset += fieldoffset;
2441      process_constraint (new_constraint (templhs, temprhs));
2442    }
2443}
2444
2445/* Handle the structure copy case where we have a structure copy
2446   between a aggregate on the RHS and a dereference of a pointer on
2447   the LHS that is of SIZE (in bits)
2448
2449   For each field of the rhs variable (rhsfield)
2450       lhs.offset = rhsfield->offset
2451       add the constraint lhs = rhsfield
2452*/
2453
2454static void
2455do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2456			     const struct constraint_expr rhs,
2457			     const unsigned HOST_WIDE_INT size)
2458{
2459  varinfo_t p = get_varinfo (rhs.var);
2460  unsigned HOST_WIDE_INT pstart,last;
2461  pstart = p->offset;
2462  last = p->offset + size;
2463
2464  for (; p && p->offset < last; p = p->next)
2465    {
2466      varinfo_t q;
2467      struct constraint_expr templhs = lhs;
2468      struct constraint_expr temprhs = rhs;
2469      unsigned HOST_WIDE_INT fieldoffset;
2470
2471
2472      if (temprhs.type == SCALAR)
2473	temprhs.var = p->id;
2474      else
2475	temprhs.offset = p->offset;
2476
2477      q = get_varinfo (templhs.var);
2478      fieldoffset = p->offset - pstart;
2479      templhs.offset += fieldoffset;
2480      process_constraint (new_constraint (templhs, temprhs));
2481    }
2482}
2483
2484/* Sometimes, frontends like to give us bad type information.  This
2485   function will collapse all the fields from VAR to the end of VAR,
2486   into VAR, so that we treat those fields as a single variable.
2487   We return the variable they were collapsed into.  */
2488
2489static unsigned int
2490collapse_rest_of_var (unsigned int var)
2491{
2492  varinfo_t currvar = get_varinfo (var);
2493  varinfo_t field;
2494
2495  for (field = currvar->next; field; field = field->next)
2496    {
2497      if (dump_file)
2498	fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
2499		 field->name, currvar->name);
2500
2501      gcc_assert (!field->collapsed_to);
2502      field->collapsed_to = currvar;
2503    }
2504
2505  currvar->next = NULL;
2506  currvar->size = currvar->fullsize - currvar->offset;
2507
2508  return currvar->id;
2509}
2510
2511/* Handle aggregate copies by expanding into copies of the respective
2512   fields of the structures.  */
2513
2514static void
2515do_structure_copy (tree lhsop, tree rhsop)
2516{
2517  struct constraint_expr lhs, rhs, tmp;
2518  varinfo_t p;
2519  unsigned HOST_WIDE_INT lhssize;
2520  unsigned HOST_WIDE_INT rhssize;
2521
2522  lhs = get_constraint_for (lhsop, NULL);
2523  rhs = get_constraint_for (rhsop, NULL);
2524
2525  /* If we have special var = x, swap it around.  */
2526  if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2527    {
2528      tmp = lhs;
2529      lhs = rhs;
2530      rhs = tmp;
2531    }
2532
2533  /*  This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2534      possible it's something we could handle.  However, most cases falling
2535      into this are dealing with transparent unions, which are slightly
2536      weird. */
2537  if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2538    {
2539      rhs.type = ADDRESSOF;
2540      rhs.var = anything_id;
2541    }
2542
2543  /* If the RHS is a special var, or an addressof, set all the LHS fields to
2544     that special var.  */
2545  if (rhs.var <= integer_id)
2546    {
2547      for (p = get_varinfo (lhs.var); p; p = p->next)
2548	{
2549	  struct constraint_expr templhs = lhs;
2550	  struct constraint_expr temprhs = rhs;
2551	  if (templhs.type == SCALAR )
2552	    templhs.var = p->id;
2553	  else
2554	    templhs.offset += p->offset;
2555	  process_constraint (new_constraint (templhs, temprhs));
2556	}
2557    }
2558  else
2559    {
2560      tree rhstype = TREE_TYPE (rhsop);
2561      tree lhstype = TREE_TYPE (lhsop);
2562      tree rhstypesize = TYPE_SIZE (rhstype);
2563      tree lhstypesize = TYPE_SIZE (lhstype);
2564
2565      /* If we have a variably sized types on the rhs or lhs, and a deref
2566	 constraint, add the constraint, lhsconstraint = &ANYTHING.
2567	 This is conservatively correct because either the lhs is an unknown
2568	 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2569	 constraint, and every variable it can point to must be unknown sized
2570	 anyway, so we don't need to worry about fields at all.  */
2571      if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2572	  || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2573	{
2574	  rhs.var = anything_id;
2575	  rhs.type = ADDRESSOF;
2576	  rhs.offset = 0;
2577	  process_constraint (new_constraint (lhs, rhs));
2578	  return;
2579	}
2580
2581      /* The size only really matters insofar as we don't set more or less of
2582	 the variable.  If we hit an unknown size var, the size should be the
2583	 whole darn thing.  */
2584      if (get_varinfo (rhs.var)->is_unknown_size_var)
2585	rhssize = ~0;
2586      else
2587	rhssize = TREE_INT_CST_LOW (rhstypesize);
2588
2589      if (get_varinfo (lhs.var)->is_unknown_size_var)
2590	lhssize = ~0;
2591      else
2592	lhssize = TREE_INT_CST_LOW (lhstypesize);
2593
2594
2595      if (rhs.type == SCALAR && lhs.type == SCALAR)
2596	{
2597	  if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
2598	    {
2599	      lhs.var = collapse_rest_of_var (lhs.var);
2600	      rhs.var = collapse_rest_of_var (rhs.var);
2601	      lhs.offset = 0;
2602	      rhs.offset = 0;
2603	      lhs.type = SCALAR;
2604	      rhs.type = SCALAR;
2605	      process_constraint (new_constraint (lhs, rhs));
2606	    }
2607	}
2608      else if (lhs.type != DEREF && rhs.type == DEREF)
2609	do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2610      else if (lhs.type == DEREF && rhs.type != DEREF)
2611	do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2612      else
2613	{
2614	  tree pointedtotype = lhstype;
2615	  tree tmpvar;
2616
2617	  gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
2618	  tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
2619	  do_structure_copy (tmpvar, rhsop);
2620	  do_structure_copy (lhsop, tmpvar);
2621	}
2622    }
2623}
2624
2625/* Update related alias information kept in AI.  This is used when
2626   building name tags, alias sets and deciding grouping heuristics.
2627   STMT is the statement to process.  This function also updates
2628   ADDRESSABLE_VARS.  */
2629
2630static void
2631update_alias_info (tree stmt, struct alias_info *ai)
2632{
2633  bitmap addr_taken;
2634  use_operand_p use_p;
2635  ssa_op_iter iter;
2636  bool stmt_escapes_p = is_escape_site (stmt, ai);
2637  tree op;
2638
2639  /* Mark all the variables whose address are taken by the statement.  */
2640  addr_taken = addresses_taken (stmt);
2641  if (addr_taken)
2642    {
2643      bitmap_ior_into (addressable_vars, addr_taken);
2644
2645      /* If STMT is an escape point, all the addresses taken by it are
2646	 call-clobbered.  */
2647      if (stmt_escapes_p)
2648	{
2649	  bitmap_iterator bi;
2650	  unsigned i;
2651
2652	  EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
2653	    mark_call_clobbered (referenced_var (i));
2654	}
2655    }
2656
2657  /* Process each operand use.  If an operand may be aliased, keep
2658     track of how many times it's being used.  For pointers, determine
2659     whether they are dereferenced by the statement, or whether their
2660     value escapes, etc.  */
2661  FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
2662    {
2663      tree op, var;
2664      var_ann_t v_ann;
2665      struct ptr_info_def *pi;
2666      bool is_store, is_potential_deref;
2667      unsigned num_uses, num_derefs;
2668
2669      op = USE_FROM_PTR (use_p);
2670
2671      /* If STMT is a PHI node, OP may be an ADDR_EXPR.  If so, add it
2672	 to the set of addressable variables.  */
2673      if (TREE_CODE (op) == ADDR_EXPR)
2674	{
2675	  gcc_assert (TREE_CODE (stmt) == PHI_NODE);
2676
2677	  /* PHI nodes don't have annotations for pinning the set
2678	     of addresses taken, so we collect them here.
2679
2680	     FIXME, should we allow PHI nodes to have annotations
2681	     so that they can be treated like regular statements?
2682	     Currently, they are treated as second-class
2683	     statements.  */
2684	  add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
2685	  continue;
2686	}
2687
2688      /* Ignore constants.  */
2689      if (TREE_CODE (op) != SSA_NAME)
2690	continue;
2691
2692      var = SSA_NAME_VAR (op);
2693      v_ann = var_ann (var);
2694
2695      /* If the operand's variable may be aliased, keep track of how
2696	 many times we've referenced it.  This is used for alias
2697	 grouping in compute_flow_insensitive_aliasing.  */
2698      if (may_be_aliased (var))
2699	NUM_REFERENCES_INC (v_ann);
2700
2701      /* We are only interested in pointers.  */
2702      if (!POINTER_TYPE_P (TREE_TYPE (op)))
2703	continue;
2704
2705      pi = get_ptr_info (op);
2706
2707      /* Add OP to AI->PROCESSED_PTRS, if it's not there already.  */
2708      if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
2709	{
2710	  SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
2711	  VARRAY_PUSH_TREE (ai->processed_ptrs, op);
2712	}
2713
2714      /* If STMT is a PHI node, then it will not have pointer
2715	 dereferences and it will not be an escape point.  */
2716      if (TREE_CODE (stmt) == PHI_NODE)
2717	continue;
2718
2719      /* Determine whether OP is a dereferenced pointer, and if STMT
2720	 is an escape point, whether OP escapes.  */
2721      count_uses_and_derefs (op, stmt, &num_uses, &num_derefs, &is_store);
2722
2723      /* Handle a corner case involving address expressions of the
2724	 form '&PTR->FLD'.  The problem with these expressions is that
2725	 they do not represent a dereference of PTR.  However, if some
2726	 other transformation propagates them into an INDIRECT_REF
2727	 expression, we end up with '*(&PTR->FLD)' which is folded
2728	 into 'PTR->FLD'.
2729
2730	 So, if the original code had no other dereferences of PTR,
2731	 the aliaser will not create memory tags for it, and when
2732	 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
2733	 memory operations will receive no V_MAY_DEF/VUSE operands.
2734
2735	 One solution would be to have count_uses_and_derefs consider
2736	 &PTR->FLD a dereference of PTR.  But that is wrong, since it
2737	 is not really a dereference but an offset calculation.
2738
2739	 What we do here is to recognize these special ADDR_EXPR
2740	 nodes.  Since these expressions are never GIMPLE values (they
2741	 are not GIMPLE invariants), they can only appear on the RHS
2742	 of an assignment and their base address is always an
2743	 INDIRECT_REF expression.  */
2744      is_potential_deref = false;
2745      if (TREE_CODE (stmt) == MODIFY_EXPR
2746	  && TREE_CODE (TREE_OPERAND (stmt, 1)) == ADDR_EXPR
2747	  && !is_gimple_val (TREE_OPERAND (stmt, 1)))
2748	{
2749	  /* If the RHS if of the form &PTR->FLD and PTR == OP, then
2750	     this represents a potential dereference of PTR.  */
2751	  tree rhs = TREE_OPERAND (stmt, 1);
2752	  tree base = get_base_address (TREE_OPERAND (rhs, 0));
2753	  if (TREE_CODE (base) == INDIRECT_REF
2754	      && TREE_OPERAND (base, 0) == op)
2755	    is_potential_deref = true;
2756	}
2757
2758      if (num_derefs > 0 || is_potential_deref)
2759	{
2760	  /* Mark OP as dereferenced.  In a subsequent pass,
2761	     dereferenced pointers that point to a set of
2762	     variables will be assigned a name tag to alias
2763	     all the variables OP points to.  */
2764	  pi->is_dereferenced = 1;
2765
2766	  /* Keep track of how many time we've dereferenced each
2767	     pointer.  */
2768	  NUM_REFERENCES_INC (v_ann);
2769
2770	  /* If this is a store operation, mark OP as being
2771	     dereferenced to store, otherwise mark it as being
2772	     dereferenced to load.  */
2773	  if (is_store)
2774	    bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
2775	  else
2776	    bitmap_set_bit (ai->dereferenced_ptrs_load, DECL_UID (var));
2777	}
2778
2779      if (stmt_escapes_p && num_derefs < num_uses)
2780	{
2781	  /* If STMT is an escape point and STMT contains at
2782	     least one direct use of OP, then the value of OP
2783	     escapes and so the pointed-to variables need to
2784	     be marked call-clobbered.  */
2785	  pi->value_escapes_p = 1;
2786
2787	  /* If the statement makes a function call, assume
2788	     that pointer OP will be dereferenced in a store
2789	     operation inside the called function.  */
2790	  if (get_call_expr_in (stmt))
2791	    {
2792	      bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
2793	      pi->is_dereferenced = 1;
2794	    }
2795	}
2796    }
2797
2798  if (TREE_CODE (stmt) == PHI_NODE)
2799    return;
2800
2801  /* Update reference counter for definitions to any
2802     potentially aliased variable.  This is used in the alias
2803     grouping heuristics.  */
2804  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
2805    {
2806      tree var = SSA_NAME_VAR (op);
2807      var_ann_t ann = var_ann (var);
2808      bitmap_set_bit (ai->written_vars, DECL_UID (var));
2809      if (may_be_aliased (var))
2810	NUM_REFERENCES_INC (ann);
2811
2812    }
2813
2814  /* Mark variables in V_MAY_DEF operands as being written to.  */
2815  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
2816    {
2817      tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
2818      bitmap_set_bit (ai->written_vars, DECL_UID (var));
2819    }
2820}
2821
2822
2823/* Handle pointer arithmetic EXPR when creating aliasing constraints.
2824   Expressions of the type PTR + CST can be handled in two ways:
2825
2826   1- If the constraint for PTR is ADDRESSOF for a non-structure
2827      variable, then we can use it directly because adding or
2828      subtracting a constant may not alter the original ADDRESSOF
2829      constraint (i.e., pointer arithmetic may not legally go outside
2830      an object's boundaries).
2831
2832   2- If the constraint for PTR is ADDRESSOF for a structure variable,
2833      then if CST is a compile-time constant that can be used as an
2834      offset, we can determine which sub-variable will be pointed-to
2835      by the expression.
2836
2837   Return true if the expression is handled.  For any other kind of
2838   expression, return false so that each operand can be added as a
2839   separate constraint by the caller.  */
2840
2841static bool
2842handle_ptr_arith (struct constraint_expr lhs, tree expr)
2843{
2844  tree op0, op1;
2845  struct constraint_expr base, offset;
2846
2847  if (TREE_CODE (expr) != PLUS_EXPR
2848      && TREE_CODE (expr) != MINUS_EXPR)
2849    return false;
2850
2851  op0 = TREE_OPERAND (expr, 0);
2852  op1 = TREE_OPERAND (expr, 1);
2853
2854  base = get_constraint_for (op0, NULL);
2855
2856  offset.var = anyoffset_id;
2857  offset.type = ADDRESSOF;
2858  offset.offset = 0;
2859
2860  process_constraint (new_constraint (lhs, base));
2861  process_constraint (new_constraint (lhs, offset));
2862
2863  return true;
2864}
2865
2866
2867/* Walk statement T setting up aliasing constraints according to the
2868   references found in T.  This function is the main part of the
2869   constraint builder.  AI points to auxiliary alias information used
2870   when building alias sets and computing alias grouping heuristics.  */
2871
2872static void
2873find_func_aliases (tree t, struct alias_info *ai)
2874{
2875  struct constraint_expr lhs, rhs;
2876
2877  /* Update various related attributes like escaped addresses, pointer
2878     dereferences for loads and stores.  This is used when creating
2879     name tags and alias sets.  */
2880  update_alias_info (t, ai);
2881
2882  /* Now build constraints expressions.  */
2883  if (TREE_CODE (t) == PHI_NODE)
2884    {
2885      /* Only care about pointers and structures containing
2886	 pointers.  */
2887      if (POINTER_TYPE_P (TREE_TYPE (PHI_RESULT (t)))
2888	  || AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))))
2889	{
2890	  int i;
2891
2892	  lhs = get_constraint_for (PHI_RESULT (t), NULL);
2893	  for (i = 0; i < PHI_NUM_ARGS (t); i++)
2894	    {
2895	      bool need_anyoffset = false;
2896	      tree anyoffsetrhs = PHI_ARG_DEF (t, i);
2897
2898	      rhs = get_constraint_for (PHI_ARG_DEF (t, i), &need_anyoffset);
2899	      process_constraint (new_constraint (lhs, rhs));
2900
2901	      STRIP_NOPS (anyoffsetrhs);
2902	      /* When taking the address of an aggregate
2903	         type, from the LHS we can access any field
2904	         of the RHS.  */
2905	      if (need_anyoffset || (rhs.type == ADDRESSOF
2906		  && !(get_varinfo (rhs.var)->is_special_var)
2907		  && AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (anyoffsetrhs)))))
2908		{
2909		  rhs.var = anyoffset_id;
2910		  rhs.type = ADDRESSOF;
2911		  rhs.offset = 0;
2912		  process_constraint (new_constraint (lhs, rhs));
2913		}
2914	    }
2915	}
2916    }
2917  else if (TREE_CODE (t) == MODIFY_EXPR)
2918    {
2919      tree lhsop = TREE_OPERAND (t, 0);
2920      tree rhsop = TREE_OPERAND (t, 1);
2921      int i;
2922
2923      if (AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
2924	  && AGGREGATE_TYPE_P (TREE_TYPE (rhsop)))
2925	{
2926	  do_structure_copy (lhsop, rhsop);
2927	}
2928      else
2929	{
2930	  /* Only care about operations with pointers, structures
2931	     containing pointers, dereferences, and call expressions.  */
2932	  if (POINTER_TYPE_P (TREE_TYPE (lhsop))
2933	      || AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
2934	      || TREE_CODE (rhsop) == CALL_EXPR)
2935	    {
2936	      lhs = get_constraint_for (lhsop, NULL);
2937	      switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
2938		{
2939		  /* RHS that consist of unary operations,
2940		     exceptional types, or bare decls/constants, get
2941		     handled directly by get_constraint_for.  */
2942		  case tcc_reference:
2943		  case tcc_declaration:
2944		  case tcc_constant:
2945		  case tcc_exceptional:
2946		  case tcc_expression:
2947		  case tcc_unary:
2948		      {
2949			tree anyoffsetrhs = rhsop;
2950			bool need_anyoffset = false;
2951			rhs = get_constraint_for (rhsop, &need_anyoffset);
2952			process_constraint (new_constraint (lhs, rhs));
2953
2954			STRIP_NOPS (anyoffsetrhs);
2955			/* When taking the address of an aggregate
2956			   type, from the LHS we can access any field
2957			   of the RHS.  */
2958			if (need_anyoffset || (rhs.type == ADDRESSOF
2959			    && !(get_varinfo (rhs.var)->is_special_var)
2960			    && (POINTER_TYPE_P (TREE_TYPE (anyoffsetrhs))
2961				|| TREE_CODE (TREE_TYPE (anyoffsetrhs))
2962				   == ARRAY_TYPE)
2963			    && AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (anyoffsetrhs)))))
2964			  {
2965			    rhs.var = anyoffset_id;
2966			    rhs.type = ADDRESSOF;
2967			    rhs.offset = 0;
2968			    process_constraint (new_constraint (lhs, rhs));
2969			  }
2970		      }
2971		    break;
2972
2973		  case tcc_binary:
2974		      {
2975			/* For pointer arithmetic of the form
2976			   PTR + CST, we can simply use PTR's
2977			   constraint because pointer arithmetic is
2978			   not allowed to go out of bounds.  */
2979			if (handle_ptr_arith (lhs, rhsop))
2980			  break;
2981		      }
2982		    /* FALLTHRU  */
2983
2984		  /* Otherwise, walk each operand.  Notice that we
2985		     can't use the operand interface because we need
2986		     to process expressions other than simple operands
2987		     (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR).  */
2988		  default:
2989		    for (i = 0; i < TREE_CODE_LENGTH (TREE_CODE (rhsop)); i++)
2990		      {
2991			tree op = TREE_OPERAND (rhsop, i);
2992			rhs = get_constraint_for (op, NULL);
2993			process_constraint (new_constraint (lhs, rhs));
2994		      }
2995		}
2996	    }
2997	}
2998    }
2999
3000  /* After promoting variables and computing aliasing we will
3001     need to re-scan most statements.  FIXME: Try to minimize the
3002     number of statements re-scanned.  It's not really necessary to
3003     re-scan *all* statements.  */
3004  mark_stmt_modified (t);
3005}
3006
3007
3008/* Find the first varinfo in the same variable as START that overlaps with
3009   OFFSET.
3010   Effectively, walk the chain of fields for the variable START to find the
3011   first field that overlaps with OFFSET.
3012   Return NULL if we can't find one.  */
3013
3014static varinfo_t
3015first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3016{
3017  varinfo_t curr = start;
3018  while (curr)
3019    {
3020      /* We may not find a variable in the field list with the actual
3021	 offset when when we have glommed a structure to a variable.
3022	 In that case, however, offset should still be within the size
3023	 of the variable. */
3024      if (offset >= curr->offset && offset < (curr->offset +  curr->size))
3025	return curr;
3026      curr = curr->next;
3027    }
3028  return NULL;
3029}
3030
3031
3032/* Insert the varinfo FIELD into the field list for BASE, ordered by
3033   offset.  */
3034
3035static void
3036insert_into_field_list (varinfo_t base, varinfo_t field)
3037{
3038  varinfo_t prev = base;
3039  varinfo_t curr = base->next;
3040
3041  if (curr == NULL)
3042    {
3043      prev->next = field;
3044      field->next = NULL;
3045    }
3046  else
3047    {
3048      while (curr)
3049	{
3050	  if (field->offset <= curr->offset)
3051	    break;
3052	  prev = curr;
3053	  curr = curr->next;
3054	}
3055      field->next = prev->next;
3056      prev->next = field;
3057    }
3058}
3059
3060/* qsort comparison function for two fieldoff's PA and PB */
3061
3062static int
3063fieldoff_compare (const void *pa, const void *pb)
3064{
3065  const fieldoff_s *foa = (const fieldoff_s *)pa;
3066  const fieldoff_s *fob = (const fieldoff_s *)pb;
3067  HOST_WIDE_INT foasize, fobsize;
3068
3069  if (foa->offset != fob->offset)
3070    return foa->offset - fob->offset;
3071
3072  foasize = TREE_INT_CST_LOW (DECL_SIZE (foa->field));
3073  fobsize = TREE_INT_CST_LOW (DECL_SIZE (fob->field));
3074  return foasize - fobsize;
3075}
3076
3077/* Sort a fieldstack according to the field offset and sizes.  */
3078void sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3079{
3080  qsort (VEC_address (fieldoff_s, fieldstack),
3081	 VEC_length (fieldoff_s, fieldstack),
3082	 sizeof (fieldoff_s),
3083	 fieldoff_compare);
3084}
3085
3086/* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
3087   of TYPE onto fieldstack, recording their offsets along the way.
3088   OFFSET is used to keep track of the offset in this entire structure, rather
3089   than just the immediately containing structure.  Returns the number
3090   of fields pushed.
3091   HAS_UNION is set to true if we find a union type as a field of
3092   TYPE.  */
3093
3094int
3095push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
3096			     HOST_WIDE_INT offset, bool *has_union)
3097{
3098  tree field;
3099  int count = 0;
3100
3101  for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3102    if (TREE_CODE (field) == FIELD_DECL)
3103      {
3104	bool push = false;
3105	int pushed = 0;
3106
3107	if (has_union
3108	    && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
3109		|| TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
3110	  *has_union = true;
3111
3112	if (!var_can_have_subvars (field))
3113	  push = true;
3114	else if (!(pushed = push_fields_onto_fieldstack
3115		   (TREE_TYPE (field), fieldstack,
3116		    offset + bitpos_of_field (field), has_union))
3117		 && DECL_SIZE (field)
3118		 && !integer_zerop (DECL_SIZE (field)))
3119	  /* Empty structures may have actual size, like in C++. So
3120	     see if we didn't push any subfields and the size is
3121	     nonzero, push the field onto the stack */
3122	  push = true;
3123
3124	if (push)
3125	  {
3126	    fieldoff_s *pair;
3127
3128	    pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3129	    pair->field = field;
3130	    pair->offset = offset + bitpos_of_field (field);
3131	    count++;
3132	  }
3133	else
3134	  count += pushed;
3135      }
3136
3137  return count;
3138}
3139
3140static void
3141make_constraint_to_anything (varinfo_t vi)
3142{
3143  struct constraint_expr lhs, rhs;
3144
3145  lhs.var = vi->id;
3146  lhs.offset = 0;
3147  lhs.type = SCALAR;
3148
3149  rhs.var = anything_id;
3150  rhs.offset =0 ;
3151  rhs.type = ADDRESSOF;
3152  process_constraint (new_constraint (lhs, rhs));
3153}
3154
3155
3156/* Return true if FIELDSTACK contains fields that overlap.
3157   FIELDSTACK is assumed to be sorted by offset.  */
3158
3159static bool
3160check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
3161{
3162  fieldoff_s *fo = NULL;
3163  unsigned int i;
3164  HOST_WIDE_INT lastoffset = -1;
3165
3166  for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3167    {
3168      if (fo->offset == lastoffset)
3169	return true;
3170      lastoffset = fo->offset;
3171    }
3172  return false;
3173}
3174/* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
3175   This will also create any varinfo structures necessary for fields
3176   of DECL.  */
3177
3178static unsigned int
3179create_variable_info_for (tree decl, const char *name)
3180{
3181  unsigned int index = VEC_length (varinfo_t, varmap);
3182  varinfo_t vi;
3183  tree decltype = TREE_TYPE (decl);
3184  bool notokay = false;
3185  bool hasunion;
3186  bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
3187  VEC (fieldoff_s,heap) *fieldstack = NULL;
3188
3189
3190  hasunion = TREE_CODE (decltype) == UNION_TYPE
3191             || TREE_CODE (decltype) == QUAL_UNION_TYPE;
3192  if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
3193    {
3194      push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
3195      if (hasunion)
3196	{
3197	  VEC_free (fieldoff_s, heap, fieldstack);
3198	  notokay = true;
3199	}
3200    }
3201
3202
3203  /* If the variable doesn't have subvars, we may end up needing to
3204     sort the field list and create fake variables for all the
3205     fields.  */
3206  vi = new_var_info (decl, index, name, index);
3207  vi->decl = decl;
3208  vi->offset = 0;
3209  vi->has_union = hasunion;
3210  if (!TYPE_SIZE (decltype)
3211      || TREE_CODE (TYPE_SIZE (decltype)) != INTEGER_CST
3212      || TREE_CODE (decltype) == ARRAY_TYPE
3213      || TREE_CODE (decltype) == UNION_TYPE
3214      || TREE_CODE (decltype) == QUAL_UNION_TYPE)
3215    {
3216      vi->is_unknown_size_var = true;
3217      vi->fullsize = ~0;
3218      vi->size = ~0;
3219    }
3220  else
3221    {
3222      vi->fullsize = TREE_INT_CST_LOW (TYPE_SIZE (decltype));
3223      vi->size = vi->fullsize;
3224    }
3225
3226  insert_id_for_tree (vi->decl, index);
3227  VEC_safe_push (varinfo_t, heap, varmap, vi);
3228  if (is_global)
3229    make_constraint_to_anything (vi);
3230
3231  stats.total_vars++;
3232  if (use_field_sensitive
3233      && !notokay
3234      && !vi->is_unknown_size_var
3235      && var_can_have_subvars (decl)
3236      && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
3237    {
3238      unsigned int newindex = VEC_length (varinfo_t, varmap);
3239      fieldoff_s *fo = NULL;
3240      unsigned int i;
3241      tree field;
3242
3243      for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3244	{
3245	  if (!DECL_SIZE (fo->field)
3246	      || TREE_CODE (DECL_SIZE (fo->field)) != INTEGER_CST
3247	      || TREE_CODE (TREE_TYPE (fo->field)) == ARRAY_TYPE
3248	      || fo->offset < 0)
3249	    {
3250	      notokay = true;
3251	      break;
3252	    }
3253	}
3254
3255      /* We can't sort them if we have a field with a variable sized type,
3256	 which will make notokay = true.  In that case, we are going to return
3257	 without creating varinfos for the fields anyway, so sorting them is a
3258	 waste to boot.  */
3259      if (!notokay)
3260	{
3261	  sort_fieldstack (fieldstack);
3262	  /* Due to some C++ FE issues, like PR 22488, we might end up
3263	     what appear to be overlapping fields even though they,
3264	     in reality, do not overlap.  Until the C++ FE is fixed,
3265	     we will simply disable field-sensitivity for these cases.  */
3266	  notokay = check_for_overlaps (fieldstack);
3267	}
3268
3269
3270      if (VEC_length (fieldoff_s, fieldstack) != 0)
3271	fo = VEC_index (fieldoff_s, fieldstack, 0);
3272
3273      if (fo == NULL || notokay)
3274	{
3275	  vi->is_unknown_size_var = 1;
3276	  vi->fullsize = ~0;
3277	  vi->size = ~0;
3278	  VEC_free (fieldoff_s, heap, fieldstack);
3279	  return index;
3280	}
3281
3282      field = fo->field;
3283      vi->size = TREE_INT_CST_LOW (DECL_SIZE (field));
3284      vi->offset = fo->offset;
3285      for (i = 1; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3286	{
3287	  varinfo_t newvi;
3288	  const char *newname;
3289	  char *tempname;
3290
3291	  field = fo->field;
3292	  newindex = VEC_length (varinfo_t, varmap);
3293	  asprintf (&tempname, "%s.%s", vi->name, alias_get_name (field));
3294	  newname = ggc_strdup (tempname);
3295	  free (tempname);
3296	  newvi = new_var_info (decl, newindex, newname, newindex);
3297	  newvi->offset = fo->offset;
3298	  newvi->size = TREE_INT_CST_LOW (DECL_SIZE (field));
3299	  newvi->fullsize = vi->fullsize;
3300	  insert_into_field_list (vi, newvi);
3301	  VEC_safe_push (varinfo_t, heap, varmap, newvi);
3302	  if (is_global)
3303	    make_constraint_to_anything (newvi);
3304
3305	  stats.total_vars++;
3306	}
3307      VEC_free (fieldoff_s, heap, fieldstack);
3308    }
3309  return index;
3310}
3311
3312/* Print out the points-to solution for VAR to FILE.  */
3313
3314void
3315dump_solution_for_var (FILE *file, unsigned int var)
3316{
3317  varinfo_t vi = get_varinfo (var);
3318  unsigned int i;
3319  bitmap_iterator bi;
3320
3321  fprintf (file, "%s = { ", vi->name);
3322  EXECUTE_IF_SET_IN_BITMAP (get_varinfo (vi->node)->solution, 0, i, bi)
3323    {
3324      fprintf (file, "%s ", get_varinfo (i)->name);
3325    }
3326  fprintf (file, "}\n");
3327}
3328
3329/* Print the points-to solution for VAR to stdout.  */
3330
3331void
3332debug_solution_for_var (unsigned int var)
3333{
3334  dump_solution_for_var (stdout, var);
3335}
3336
3337
3338/* Create varinfo structures for all of the variables in the
3339   function for intraprocedural mode.  */
3340
3341static void
3342intra_create_variable_infos (void)
3343{
3344  tree t;
3345
3346  /* For each incoming argument arg, ARG = &ANYTHING */
3347  for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
3348    {
3349      struct constraint_expr lhs;
3350      struct constraint_expr rhs;
3351      varinfo_t p;
3352
3353      lhs.offset = 0;
3354      lhs.type = SCALAR;
3355      lhs.var  = create_variable_info_for (t, alias_get_name (t));
3356
3357      rhs.var = anything_id;
3358      rhs.type = ADDRESSOF;
3359      rhs.offset = 0;
3360
3361      for (p = get_varinfo (lhs.var); p; p = p->next)
3362	{
3363	  struct constraint_expr temp = lhs;
3364	  temp.var = p->id;
3365	  process_constraint (new_constraint (temp, rhs));
3366	}
3367    }
3368
3369}
3370
3371/* Set bits in INTO corresponding to the variable uids in solution set
3372   FROM  */
3373
3374static void
3375set_uids_in_ptset (bitmap into, bitmap from)
3376{
3377  unsigned int i;
3378  bitmap_iterator bi;
3379  bool found_anyoffset = false;
3380  subvar_t sv;
3381
3382  EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
3383    {
3384      varinfo_t vi = get_varinfo (i);
3385
3386      /* If we find ANYOFFSET in the solution and the solution
3387	 includes SFTs for some structure, then all the SFTs in that
3388	 structure will need to be added to the alias set.  */
3389      if (vi->id == anyoffset_id)
3390	{
3391	  found_anyoffset = true;
3392	  continue;
3393	}
3394
3395      /* The only artificial variables that are allowed in a may-alias
3396	 set are heap variables.  */
3397      if (vi->is_artificial_var && !vi->is_heap_var)
3398	continue;
3399
3400      if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
3401	{
3402	  /* Variables containing unions may need to be converted to
3403	     their SFT's, because SFT's can have unions and we cannot.  */
3404	  for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
3405	    bitmap_set_bit (into, DECL_UID (sv->var));
3406	}
3407      else if (TREE_CODE (vi->decl) == VAR_DECL
3408	       || TREE_CODE (vi->decl) == PARM_DECL
3409	       || TREE_CODE (vi->decl) == RESULT_DECL)
3410	{
3411	  if (found_anyoffset
3412	      && var_can_have_subvars (vi->decl)
3413	      && get_subvars_for_var (vi->decl))
3414	    {
3415	      /* If ANYOFFSET is in the solution set and VI->DECL is
3416		 an aggregate variable with sub-variables, then any of
3417		 the SFTs inside VI->DECL may have been accessed.  Add
3418		 all the sub-vars for VI->DECL.  */
3419	      for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
3420		bitmap_set_bit (into, DECL_UID (sv->var));
3421	    }
3422	  else if (var_can_have_subvars (vi->decl)
3423		   && get_subvars_for_var (vi->decl))
3424	    {
3425	      /* If VI->DECL is an aggregate for which we created
3426		 SFTs, add the SFT corresponding to VI->OFFSET.  */
3427	      tree sft = get_subvar_at (vi->decl, vi->offset);
3428	      bitmap_set_bit (into, DECL_UID (sft));
3429	    }
3430	  else
3431	    {
3432	      /* Otherwise, just add VI->DECL to the alias set.  */
3433	      bitmap_set_bit (into, DECL_UID (vi->decl));
3434	    }
3435	}
3436    }
3437}
3438
3439
3440static bool have_alias_info = false;
3441
3442/* Given a pointer variable P, fill in its points-to set, or return
3443   false if we can't.  */
3444
3445bool
3446find_what_p_points_to (tree p)
3447{
3448  unsigned int id = 0;
3449
3450  if (!have_alias_info)
3451    return false;
3452
3453  if (lookup_id_for_tree (p, &id))
3454    {
3455      varinfo_t vi = get_varinfo (id);
3456
3457      if (vi->is_artificial_var)
3458	return false;
3459
3460      /* See if this is a field or a structure.  */
3461      if (vi->size != vi->fullsize)
3462	{
3463	  /* Nothing currently asks about structure fields directly,
3464	     but when they do, we need code here to hand back the
3465	     points-to set.  */
3466	  if (!var_can_have_subvars (vi->decl)
3467	      || get_subvars_for_var (vi->decl) == NULL)
3468	    return false;
3469	}
3470      else
3471	{
3472	  struct ptr_info_def *pi = get_ptr_info (p);
3473	  unsigned int i;
3474	  bitmap_iterator bi;
3475
3476	  /* This variable may have been collapsed, let's get the real
3477	     variable.  */
3478	  vi = get_varinfo (vi->node);
3479
3480	  /* Translate artificial variables into SSA_NAME_PTR_INFO
3481	     attributes.  */
3482	  EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
3483	    {
3484	      varinfo_t vi = get_varinfo (i);
3485
3486	      if (vi->is_artificial_var)
3487		{
3488		  /* FIXME.  READONLY should be handled better so that
3489		     flow insensitive aliasing can disregard writable
3490		     aliases.  */
3491		  if (vi->id == nothing_id)
3492		    pi->pt_null = 1;
3493		  else if (vi->id == anything_id)
3494		    pi->pt_anything = 1;
3495		  else if (vi->id == readonly_id)
3496		    pi->pt_anything = 1;
3497		  else if (vi->id == integer_id)
3498		    pi->pt_anything = 1;
3499		  else if (vi->is_heap_var)
3500		    pi->pt_global_mem = 1;
3501		}
3502	    }
3503
3504	  if (pi->pt_anything)
3505	    return false;
3506
3507	  if (!pi->pt_vars)
3508	    pi->pt_vars = BITMAP_GGC_ALLOC ();
3509
3510	  set_uids_in_ptset (pi->pt_vars, vi->solution);
3511
3512	  if (bitmap_empty_p (pi->pt_vars))
3513	    pi->pt_vars = NULL;
3514
3515	  return true;
3516	}
3517    }
3518
3519  return false;
3520}
3521
3522
3523/* Initialize things necessary to perform PTA */
3524
3525static void
3526init_alias_vars (void)
3527{
3528  bitmap_obstack_initialize (&ptabitmap_obstack);
3529}
3530
3531
3532/* Dump points-to information to OUTFILE.  */
3533
3534void
3535dump_sa_points_to_info (FILE *outfile)
3536{
3537  unsigned int i;
3538
3539  fprintf (outfile, "\nPoints-to sets\n\n");
3540
3541  if (dump_flags & TDF_STATS)
3542    {
3543      fprintf (outfile, "Stats:\n");
3544      fprintf (outfile, "Total vars:               %d\n", stats.total_vars);
3545      fprintf (outfile, "Statically unified vars:  %d\n",
3546	       stats.unified_vars_static);
3547      fprintf (outfile, "Collapsed vars:           %d\n", stats.collapsed_vars);
3548      fprintf (outfile, "Dynamically unified vars: %d\n",
3549	       stats.unified_vars_dynamic);
3550      fprintf (outfile, "Iterations:               %d\n", stats.iterations);
3551    }
3552
3553  for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
3554    dump_solution_for_var (outfile, i);
3555}
3556
3557
3558/* Debug points-to information to stderr.  */
3559
3560void
3561debug_sa_points_to_info (void)
3562{
3563  dump_sa_points_to_info (stderr);
3564}
3565
3566
3567/* Initialize the always-existing constraint variables for NULL
3568   ANYTHING, READONLY, and INTEGER */
3569
3570static void
3571init_base_vars (void)
3572{
3573  struct constraint_expr lhs, rhs;
3574
3575  /* Create the NULL variable, used to represent that a variable points
3576     to NULL.  */
3577  nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
3578  var_nothing = new_var_info (nothing_tree, 0, "NULL", 0);
3579  insert_id_for_tree (nothing_tree, 0);
3580  var_nothing->is_artificial_var = 1;
3581  var_nothing->offset = 0;
3582  var_nothing->size = ~0;
3583  var_nothing->fullsize = ~0;
3584  var_nothing->is_special_var = 1;
3585  nothing_id = 0;
3586  VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
3587
3588  /* Create the ANYTHING variable, used to represent that a variable
3589     points to some unknown piece of memory.  */
3590  anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
3591  var_anything = new_var_info (anything_tree, 1, "ANYTHING", 1);
3592  insert_id_for_tree (anything_tree, 1);
3593  var_anything->is_artificial_var = 1;
3594  var_anything->size = ~0;
3595  var_anything->offset = 0;
3596  var_anything->next = NULL;
3597  var_anything->fullsize = ~0;
3598  var_anything->is_special_var = 1;
3599  anything_id = 1;
3600
3601  /* Anything points to anything.  This makes deref constraints just
3602     work in the presence of linked list and other p = *p type loops,
3603     by saying that *ANYTHING = ANYTHING. */
3604  VEC_safe_push (varinfo_t, heap, varmap, var_anything);
3605  lhs.type = SCALAR;
3606  lhs.var = anything_id;
3607  lhs.offset = 0;
3608  rhs.type = ADDRESSOF;
3609  rhs.var = anything_id;
3610  rhs.offset = 0;
3611  var_anything->address_taken = true;
3612
3613  /* This specifically does not use process_constraint because
3614     process_constraint ignores all anything = anything constraints, since all
3615     but this one are redundant.  */
3616  VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
3617
3618  /* Create the READONLY variable, used to represent that a variable
3619     points to readonly memory.  */
3620  readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
3621  var_readonly = new_var_info (readonly_tree, 2, "READONLY", 2);
3622  var_readonly->is_artificial_var = 1;
3623  var_readonly->offset = 0;
3624  var_readonly->size = ~0;
3625  var_readonly->fullsize = ~0;
3626  var_readonly->next = NULL;
3627  var_readonly->is_special_var = 1;
3628  insert_id_for_tree (readonly_tree, 2);
3629  readonly_id = 2;
3630  VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
3631
3632  /* readonly memory points to anything, in order to make deref
3633     easier.  In reality, it points to anything the particular
3634     readonly variable can point to, but we don't track this
3635     separately. */
3636  lhs.type = SCALAR;
3637  lhs.var = readonly_id;
3638  lhs.offset = 0;
3639  rhs.type = ADDRESSOF;
3640  rhs.var = anything_id;
3641  rhs.offset = 0;
3642
3643  process_constraint (new_constraint (lhs, rhs));
3644
3645  /* Create the INTEGER variable, used to represent that a variable points
3646     to an INTEGER.  */
3647  integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
3648  var_integer = new_var_info (integer_tree, 3, "INTEGER", 3);
3649  insert_id_for_tree (integer_tree, 3);
3650  var_integer->is_artificial_var = 1;
3651  var_integer->size = ~0;
3652  var_integer->fullsize = ~0;
3653  var_integer->offset = 0;
3654  var_integer->next = NULL;
3655  var_integer->is_special_var = 1;
3656  integer_id = 3;
3657  VEC_safe_push (varinfo_t, heap, varmap, var_integer);
3658
3659  /* *INTEGER = ANYTHING, because we don't know where a dereference of a random
3660     integer will point to.  */
3661  lhs.type = SCALAR;
3662  lhs.var = integer_id;
3663  lhs.offset = 0;
3664  rhs.type = ADDRESSOF;
3665  rhs.var = anything_id;
3666  rhs.offset = 0;
3667  process_constraint (new_constraint (lhs, rhs));
3668
3669  /* Create the ANYOFFSET variable, used to represent an arbitrary offset
3670     inside an object.  This is similar to ANYTHING, but less drastic.
3671     It means that the pointer can point anywhere inside an object,
3672     but not outside of it.  */
3673  anyoffset_tree = create_tmp_var_raw (void_type_node, "ANYOFFSET");
3674  anyoffset_id = 4;
3675  var_anyoffset = new_var_info (anyoffset_tree, anyoffset_id, "ANYOFFSET",
3676                                anyoffset_id);
3677  insert_id_for_tree (anyoffset_tree, anyoffset_id);
3678  var_anyoffset->is_artificial_var = 1;
3679  var_anyoffset->size = ~0;
3680  var_anyoffset->offset = 0;
3681  var_anyoffset->next = NULL;
3682  var_anyoffset->fullsize = ~0;
3683  var_anyoffset->is_special_var = 1;
3684  VEC_safe_push (varinfo_t, heap, varmap, var_anyoffset);
3685
3686  /* ANYOFFSET points to ANYOFFSET.  */
3687  lhs.type = SCALAR;
3688  lhs.var = anyoffset_id;
3689  lhs.offset = 0;
3690  rhs.type = ADDRESSOF;
3691  rhs.var = anyoffset_id;
3692  rhs.offset = 0;
3693  process_constraint (new_constraint (lhs, rhs));
3694}
3695
3696/* Return true if we actually need to solve the constraint graph in order to
3697   get our points-to sets.  This is false when, for example, no addresses are
3698   taken other than special vars, or all points-to sets with members already
3699   contain the anything variable and there are no predecessors for other
3700   sets.  */
3701
3702static bool
3703need_to_solve (void)
3704{
3705  int i;
3706  varinfo_t v;
3707  bool found_address_taken = false;
3708  bool found_non_anything = false;
3709
3710  for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
3711    {
3712      if (v->is_special_var)
3713	continue;
3714
3715      if (v->address_taken)
3716	found_address_taken = true;
3717
3718      if (v->solution
3719	  && !bitmap_empty_p (v->solution)
3720	  && !bitmap_bit_p (v->solution, anything_id))
3721	found_non_anything = true;
3722      else if (bitmap_empty_p (v->solution)
3723	       && VEC_length (constraint_edge_t, graph->preds[v->id]) != 0)
3724	found_non_anything = true;
3725
3726      if (found_address_taken && found_non_anything)
3727	return true;
3728    }
3729
3730  return false;
3731}
3732
3733/* Create points-to sets for the current function.  See the comments
3734   at the start of the file for an algorithmic overview.  */
3735
3736void
3737compute_points_to_sets (struct alias_info *ai)
3738{
3739  basic_block bb;
3740
3741  timevar_push (TV_TREE_PTA);
3742
3743  init_alias_vars ();
3744
3745  constraint_pool = create_alloc_pool ("Constraint pool",
3746				       sizeof (struct constraint), 30);
3747  variable_info_pool = create_alloc_pool ("Variable info pool",
3748					  sizeof (struct variable_info), 30);
3749  constraint_edge_pool = create_alloc_pool ("Constraint edges",
3750					    sizeof (struct constraint_edge), 30);
3751
3752  constraints = VEC_alloc (constraint_t, heap, 8);
3753  varmap = VEC_alloc (varinfo_t, heap, 8);
3754  id_for_tree = htab_create (10, tree_id_hash, tree_id_eq, free);
3755  memset (&stats, 0, sizeof (stats));
3756
3757  init_base_vars ();
3758
3759  intra_create_variable_infos ();
3760
3761  /* Now walk all statements and derive aliases.  */
3762  FOR_EACH_BB (bb)
3763    {
3764      block_stmt_iterator bsi;
3765      tree phi;
3766
3767      for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
3768	if (is_gimple_reg (PHI_RESULT (phi)))
3769	  find_func_aliases (phi, ai);
3770
3771      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3772	find_func_aliases (bsi_stmt (bsi), ai);
3773    }
3774
3775  build_constraint_graph ();
3776
3777  if (dump_file)
3778    {
3779      fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
3780      dump_constraints (dump_file);
3781    }
3782
3783  if (need_to_solve ())
3784    {
3785      if (dump_file)
3786	fprintf (dump_file, "\nCollapsing static cycles and doing variable "
3787		 "substitution:\n");
3788
3789      find_and_collapse_graph_cycles (graph, false);
3790      perform_var_substitution (graph);
3791
3792      if (dump_file)
3793	fprintf (dump_file, "\nSolving graph:\n");
3794
3795      solve_graph (graph);
3796    }
3797
3798  if (dump_file)
3799    dump_sa_points_to_info (dump_file);
3800
3801  have_alias_info = true;
3802
3803  timevar_pop (TV_TREE_PTA);
3804}
3805
3806
3807/* Delete created points-to sets.  */
3808
3809void
3810delete_points_to_sets (void)
3811{
3812  varinfo_t v;
3813  int i;
3814
3815  htab_delete (id_for_tree);
3816  bitmap_obstack_release (&ptabitmap_obstack);
3817  VEC_free (constraint_t, heap, constraints);
3818
3819  for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
3820    {
3821      VEC_free (constraint_edge_t, heap, graph->succs[i]);
3822      VEC_free (constraint_edge_t, heap, graph->preds[i]);
3823      VEC_free (constraint_t, heap, v->complex);
3824    }
3825  free (graph->succs);
3826  free (graph->preds);
3827  free (graph);
3828
3829  VEC_free (varinfo_t, heap, varmap);
3830  free_alloc_pool (variable_info_pool);
3831  free_alloc_pool (constraint_pool);
3832  free_alloc_pool (constraint_edge_pool);
3833
3834  have_alias_info = false;
3835}
3836
3837/* Initialize the heapvar for statement mapping.  */
3838void
3839init_alias_heapvars (void)
3840{
3841  heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq, NULL);
3842}
3843
3844void
3845delete_alias_heapvars (void)
3846{
3847  htab_delete (heapvar_for_stmt);
3848}
3849
3850
3851#include "gt-tree-ssa-structalias.h"
3852