1/* Type based alias analysis.
2   Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010
3   Free Software Foundation, Inc.
4   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22/* This pass determines which types in the program contain only
23   instances that are completely encapsulated by the compilation unit.
24   Those types that are encapsulated must also pass the further
25   requirement that there be no bad operations on any instances of
26   those types.
27
28   A great deal of freedom in compilation is allowed for the instances
29   of those types that pass these conditions.
30*/
31
32/* The code in this module is called by the ipa pass manager. It
33   should be one of the later passes since its information is used by
34   the rest of the compilation. */
35
36#include "config.h"
37#include "system.h"
38#include "coretypes.h"
39#include "tm.h"
40#include "tree.h"
41#include "tree-flow.h"
42#include "tree-inline.h"
43#include "tree-pass.h"
44#include "langhooks.h"
45#include "pointer-set.h"
46#include "splay-tree.h"
47#include "ggc.h"
48#include "ipa-utils.h"
49#include "ipa-type-escape.h"
50#include "gimple.h"
51#include "cgraph.h"
52#include "output.h"
53#include "flags.h"
54#include "timevar.h"
55#include "diagnostic.h"
56#include "langhooks.h"
57
58/* Some of the aliasing is called very early, before this phase is
59   called.  To assure that this is not a problem, we keep track of if
60   this phase has been run.  */
61static bool initialized = false;
62
63/* Scratch bitmap for avoiding work. */
64static bitmap been_there_done_that;
65static bitmap bitmap_tmp;
66
67/* There are two levels of escape that types can undergo.
68
69   EXPOSED_PARAMETER - some instance of the variable is
70   passed by value into an externally visible function or some
71   instance of the variable is passed out of an externally visible
72   function as a return value.  In this case any of the fields of the
73   variable that are pointer types end up having their types marked as
74   FULL_ESCAPE.
75
76   FULL_ESCAPE - when bad things happen to good types. One of the
77   following things happens to the type: (a) either an instance of the
78   variable has its address passed to an externally visible function,
79   (b) the address is taken and some bad cast happens to the address
80   or (c) explicit arithmetic is done to the address.
81*/
82
83enum escape_t
84{
85  EXPOSED_PARAMETER,
86  FULL_ESCAPE
87};
88
89/* The following two bit vectors global_types_* correspond to
90   previous cases above.  During the analysis phase, a bit is set in
91   one of these vectors if an operation of the offending class is
92   discovered to happen on the associated type.  */
93
94static bitmap global_types_exposed_parameter;
95static bitmap global_types_full_escape;
96
97/* All of the types seen in this compilation unit. */
98static bitmap global_types_seen;
99/* Reverse map to take a canon uid and map it to a canon type.  Uid's
100   are never manipulated unless they are associated with a canon
101   type.  */
102static splay_tree uid_to_canon_type;
103
104/* Internal structure of type mapping code.  This maps a canon type
105   name to its canon type.  */
106static splay_tree all_canon_types;
107
108/* Map from type clones to the single canon type.  */
109static splay_tree type_to_canon_type;
110
111/* A splay tree of bitmaps.  An element X in the splay tree has a bit
112   set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (Y)) if there was
113   an operation in the program of the form "&X.Y".  */
114static splay_tree uid_to_addressof_down_map;
115
116/* A splay tree of bitmaps.  An element Y in the splay tree has a bit
117   set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (X)) if there was
118   an operation in the program of the form "&X.Y".  */
119static splay_tree uid_to_addressof_up_map;
120
121/* Tree to hold the subtype maps used to mark subtypes of escaped
122   types.  */
123static splay_tree uid_to_subtype_map;
124
125/* Records tree nodes seen in cgraph_create_edges.  Simply using
126   walk_tree_without_duplicates doesn't guarantee each node is visited
127   once because it gets a new htab upon each recursive call from
128   scan_for_refs.  */
129static struct pointer_set_t *visited_nodes;
130
131/* Visited stmts by walk_use_def_chains function because it's called
132   recursively.  */
133static struct pointer_set_t *visited_stmts;
134
135static bitmap_obstack ipa_obstack;
136
137/* Static functions from this file that are used
138   before being defined.  */
139static unsigned int look_for_casts (tree);
140static bool is_cast_from_non_pointer (tree, gimple, void *);
141
142/* Get the name of TYPE or return the string "<UNNAMED>".  */
143static const char*
144get_name_of_type (tree type)
145{
146  tree name = TYPE_NAME (type);
147
148  if (!name)
149    /* Unnamed type, do what you like here.  */
150    return "<UNNAMED>";
151
152  /* It will be a TYPE_DECL in the case of a typedef, otherwise, an
153     identifier_node */
154  if (TREE_CODE (name) == TYPE_DECL)
155    {
156      /*  Each DECL has a DECL_NAME field which contains an
157	  IDENTIFIER_NODE.  (Some decls, most often labels, may have
158	  zero as the DECL_NAME).  */
159      if (DECL_NAME (name))
160	return IDENTIFIER_POINTER (DECL_NAME (name));
161      else
162	/* Unnamed type, do what you like here.  */
163	return "<UNNAMED>";
164    }
165  else if (TREE_CODE (name) == IDENTIFIER_NODE)
166    return IDENTIFIER_POINTER (name);
167  else
168    return "<UNNAMED>";
169}
170
171struct type_brand_s
172{
173  const char* name;
174  int seq;
175};
176
177/* Splay tree comparison function on type_brand_s structures.  */
178
179static int
180compare_type_brand (splay_tree_key sk1, splay_tree_key sk2)
181{
182  struct type_brand_s * k1 = (struct type_brand_s *) sk1;
183  struct type_brand_s * k2 = (struct type_brand_s *) sk2;
184
185  int value = strcmp(k1->name, k2->name);
186  if (value == 0)
187    return k2->seq - k1->seq;
188  else
189    return value;
190}
191
192/* All of the "unique_type" code is a hack to get around the sleazy
193   implementation used to compile more than file.  Currently gcc does
194   not get rid of multiple instances of the same type that have been
195   collected from different compilation units.  */
196/* This is a trivial algorithm for removing duplicate types.  This
197   would not work for any language that used structural equivalence as
198   the basis of its type system.  */
199/* Return TYPE if no type compatible with TYPE has been seen so far,
200   otherwise return a type compatible with TYPE that has already been
201   processed.  */
202
203static tree
204discover_unique_type (tree type)
205{
206  struct type_brand_s * brand = XNEW (struct type_brand_s);
207  int i = 0;
208  splay_tree_node result;
209
210  brand->name = get_name_of_type (type);
211
212  while (1)
213    {
214      brand->seq = i++;
215      result = splay_tree_lookup (all_canon_types, (splay_tree_key) brand);
216
217      if (result)
218	{
219	  /* Create an alias since this is just the same as
220	     other_type.  */
221	  tree other_type = (tree) result->value;
222	  if (types_compatible_p (type, other_type))
223	    {
224	      free (brand);
225	      /* Insert this new type as an alias for other_type.  */
226	      splay_tree_insert (type_to_canon_type,
227				 (splay_tree_key) type,
228				 (splay_tree_value) other_type);
229	      return other_type;
230	    }
231	  /* Not compatible, look for next instance with same name.  */
232	}
233      else
234	{
235	  /* No more instances, create new one since this is the first
236	     time we saw this type.  */
237	  brand->seq = i++;
238	  /* Insert the new brand.  */
239	  splay_tree_insert (all_canon_types,
240			     (splay_tree_key) brand,
241			     (splay_tree_value) type);
242
243	  /* Insert this new type as an alias for itself.  */
244	  splay_tree_insert (type_to_canon_type,
245			     (splay_tree_key) type,
246			     (splay_tree_value) type);
247
248	  /* Insert the uid for reverse lookup; */
249	  splay_tree_insert (uid_to_canon_type,
250			     (splay_tree_key) TYPE_UID (type),
251			     (splay_tree_value) type);
252
253	  bitmap_set_bit (global_types_seen, TYPE_UID (type));
254	  return type;
255	}
256    }
257}
258
259/* Return true if TYPE is one of the type classes that we are willing
260   to analyze.  This skips the goofy types like arrays of pointers to
261   methods. */
262static bool
263type_to_consider (tree type)
264{
265  /* Strip the *'s off.  */
266  type = TYPE_MAIN_VARIANT (type);
267  while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
268    type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
269
270  switch (TREE_CODE (type))
271    {
272    case BOOLEAN_TYPE:
273    case COMPLEX_TYPE:
274    case ENUMERAL_TYPE:
275    case INTEGER_TYPE:
276    case QUAL_UNION_TYPE:
277    case REAL_TYPE:
278    case FIXED_POINT_TYPE:
279    case RECORD_TYPE:
280    case UNION_TYPE:
281    case VECTOR_TYPE:
282    case VOID_TYPE:
283      return true;
284
285    default:
286      return false;
287    }
288}
289
290/* Get the canon type of TYPE.  If SEE_THRU_PTRS is true, remove all
291   the POINTER_TOs and if SEE_THRU_ARRAYS is true, remove all of the
292   ARRAY_OFs and POINTER_TOs.  */
293
294static tree
295get_canon_type (tree type, bool see_thru_ptrs, bool see_thru_arrays)
296{
297  splay_tree_node result;
298  /* Strip the *'s off.  */
299  if (!type || !type_to_consider (type))
300    return NULL;
301
302  type = TYPE_MAIN_VARIANT (type);
303  if (see_thru_arrays)
304    while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
305      type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
306
307  else if (see_thru_ptrs)
308    while (POINTER_TYPE_P (type))
309	type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
310
311  result = splay_tree_lookup (type_to_canon_type, (splay_tree_key) type);
312
313  if (result == NULL)
314    return discover_unique_type (type);
315  else return (tree) result->value;
316}
317
318/* Same as GET_CANON_TYPE, except return the TYPE_ID rather than the
319   TYPE.  */
320
321static int
322get_canon_type_uid (tree type, bool see_thru_ptrs, bool see_thru_arrays)
323{
324  type = get_canon_type (type, see_thru_ptrs, see_thru_arrays);
325  if (type)
326    return TYPE_UID(type);
327  else return 0;
328}
329
330/* Return 0 if TYPE is a record or union type.  Return a positive
331   number if TYPE is a pointer to a record or union.  The number is
332   the number of pointer types stripped to get to the record or union
333   type.  Return -1 if TYPE is none of the above.  */
334
335int
336ipa_type_escape_star_count_of_interesting_type (tree type)
337{
338  int count = 0;
339  /* Strip the *'s off.  */
340  if (!type)
341    return -1;
342  type = TYPE_MAIN_VARIANT (type);
343  while (POINTER_TYPE_P (type))
344    {
345      type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
346      count++;
347    }
348
349  /* We are interested in records, and unions only.  */
350  if (TREE_CODE (type) == RECORD_TYPE
351      || TREE_CODE (type) == QUAL_UNION_TYPE
352      || TREE_CODE (type) == UNION_TYPE)
353    return count;
354  else
355    return -1;
356}
357
358
359/* Return 0 if TYPE is a record or union type.  Return a positive
360   number if TYPE is a pointer to a record or union.  The number is
361   the number of pointer types stripped to get to the record or union
362   type.  Return -1 if TYPE is none of the above.  */
363
364int
365ipa_type_escape_star_count_of_interesting_or_array_type (tree type)
366{
367  int count = 0;
368  /* Strip the *'s off.  */
369  if (!type)
370    return -1;
371  type = TYPE_MAIN_VARIANT (type);
372  while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
373    {
374      type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
375      count++;
376    }
377
378  /* We are interested in records, and unions only.  */
379  if (TREE_CODE (type) == RECORD_TYPE
380      || TREE_CODE (type) == QUAL_UNION_TYPE
381      || TREE_CODE (type) == UNION_TYPE)
382    return count;
383  else
384    return -1;
385}
386
387
388/* Return true if the record, or union TYPE passed in escapes this
389   compilation unit. Note that all of the pointer-to's are removed
390   before testing since these may not be correct.  */
391
392bool
393ipa_type_escape_type_contained_p (tree type)
394{
395  if (!initialized)
396    return false;
397  return !bitmap_bit_p (global_types_full_escape,
398			get_canon_type_uid (type, true, false));
399}
400
401/* Return true if a modification to a field of type FIELD_TYPE cannot
402   clobber a record of RECORD_TYPE.  */
403
404bool
405ipa_type_escape_field_does_not_clobber_p (tree record_type, tree field_type)
406{
407  splay_tree_node result;
408  int uid;
409
410  if (!initialized)
411    return false;
412
413  /* Strip off all of the pointer tos on the record type.  Strip the
414     same number of pointer tos from the field type.  If the field
415     type has fewer, it could not have been aliased. */
416  record_type = TYPE_MAIN_VARIANT (record_type);
417  field_type = TYPE_MAIN_VARIANT (field_type);
418  while (POINTER_TYPE_P (record_type))
419    {
420      record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
421      if (POINTER_TYPE_P (field_type))
422	field_type = TYPE_MAIN_VARIANT (TREE_TYPE (field_type));
423      else
424	/* However, if field_type is a union, this quick test is not
425	   correct since one of the variants of the union may be a
426	   pointer to type and we cannot see across that here.  So we
427	   just strip the remaining pointer tos off the record type
428	   and fall thru to the more precise code.  */
429	if (TREE_CODE (field_type) == QUAL_UNION_TYPE
430	    || TREE_CODE (field_type) == UNION_TYPE)
431	  {
432	    while (POINTER_TYPE_P (record_type))
433	      record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
434	    break;
435	  }
436	else
437	  return true;
438    }
439
440  record_type = get_canon_type (record_type, true, true);
441  /* The record type must be contained.  The field type may
442     escape.  */
443  if (!ipa_type_escape_type_contained_p (record_type))
444    return false;
445
446  uid = TYPE_UID (record_type);
447  result = splay_tree_lookup (uid_to_addressof_down_map, (splay_tree_key) uid);
448
449  if (result)
450    {
451      bitmap field_type_map = (bitmap) result->value;
452      uid = get_canon_type_uid (field_type, true, true);
453      /* If the bit is there, the address was taken. If not, it
454	 wasn't.  */
455      return !bitmap_bit_p (field_type_map, uid);
456    }
457  else
458    /* No bitmap means no addresses were taken.  */
459    return true;
460}
461
462
463/* Add TYPE to the suspect type set. Return true if the bit needed to
464   be marked.  */
465
466static tree
467mark_type (tree type, enum escape_t escape_status)
468{
469  bitmap map = NULL;
470  int uid;
471
472  type = get_canon_type (type, true, true);
473  if (!type)
474    return NULL;
475
476  switch (escape_status)
477    {
478    case EXPOSED_PARAMETER:
479      map = global_types_exposed_parameter;
480      break;
481    case FULL_ESCAPE:
482      map = global_types_full_escape;
483      break;
484    }
485
486  uid = TYPE_UID (type);
487  if (bitmap_bit_p (map, uid))
488    return type;
489  else
490    {
491      bitmap_set_bit (map, uid);
492      if (escape_status == FULL_ESCAPE)
493	{
494	  /* Efficiency hack. When things are bad, do not mess around
495	     with this type anymore.  */
496	  bitmap_set_bit (global_types_exposed_parameter, uid);
497	}
498    }
499  return type;
500}
501
502/* Add interesting TYPE to the suspect type set. If the set is
503   EXPOSED_PARAMETER and the TYPE is a pointer type, the set is
504   changed to FULL_ESCAPE.  */
505
506static void
507mark_interesting_type (tree type, enum escape_t escape_status)
508{
509  if (!type) return;
510  if (ipa_type_escape_star_count_of_interesting_type (type) >= 0)
511    {
512      if ((escape_status == EXPOSED_PARAMETER)
513	  && POINTER_TYPE_P (type))
514	/* EXPOSED_PARAMETERs are only structs or unions are passed by
515	   value.  Anything passed by reference to an external
516	   function fully exposes the type.  */
517	mark_type (type, FULL_ESCAPE);
518      else
519	mark_type (type, escape_status);
520    }
521}
522
523/* Return true if PARENT is supertype of CHILD.  Both types must be
524   known to be structures or unions. */
525
526static bool
527parent_type_p (tree parent, tree child)
528{
529  int i;
530  tree binfo, base_binfo;
531  if (TYPE_BINFO (parent))
532    for (binfo = TYPE_BINFO (parent), i = 0;
533	 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
534      {
535	tree binfotype = BINFO_TYPE (base_binfo);
536	if (binfotype == child)
537	  return true;
538	else if (parent_type_p (binfotype, child))
539	  return true;
540      }
541  if (TREE_CODE (parent) == UNION_TYPE
542      || TREE_CODE (parent) == QUAL_UNION_TYPE)
543    {
544      tree field;
545      /* Search all of the variants in the union to see if one of them
546	 is the child.  */
547      for (field = TYPE_FIELDS (parent);
548	   field;
549	   field = TREE_CHAIN (field))
550	{
551	  tree field_type;
552	  if (TREE_CODE (field) != FIELD_DECL)
553	    continue;
554
555	  field_type = TREE_TYPE (field);
556	  if (field_type == child)
557	    return true;
558	}
559
560      /* If we did not find it, recursively ask the variants if one of
561	 their children is the child type.  */
562      for (field = TYPE_FIELDS (parent);
563	   field;
564	   field = TREE_CHAIN (field))
565	{
566	  tree field_type;
567	  if (TREE_CODE (field) != FIELD_DECL)
568	    continue;
569
570	  field_type = TREE_TYPE (field);
571	  if (TREE_CODE (field_type) == RECORD_TYPE
572	      || TREE_CODE (field_type) == QUAL_UNION_TYPE
573	      || TREE_CODE (field_type) == UNION_TYPE)
574	    if (parent_type_p (field_type, child))
575	      return true;
576	}
577    }
578
579  if (TREE_CODE (parent) == RECORD_TYPE)
580    {
581      tree field;
582      for (field = TYPE_FIELDS (parent);
583	   field;
584	   field = TREE_CHAIN (field))
585	{
586	  tree field_type;
587	  if (TREE_CODE (field) != FIELD_DECL)
588	    continue;
589
590	  field_type = TREE_TYPE (field);
591	  if (field_type == child)
592	    return true;
593	  /* You can only cast to the first field so if it does not
594	     match, quit.  */
595	  if (TREE_CODE (field_type) == RECORD_TYPE
596	      || TREE_CODE (field_type) == QUAL_UNION_TYPE
597	      || TREE_CODE (field_type) == UNION_TYPE)
598	    {
599	      if (parent_type_p (field_type, child))
600		return true;
601	      else
602		break;
603	    }
604	}
605    }
606  return false;
607}
608
609/* Return the number of pointer tos for TYPE and return TYPE with all
610   of these stripped off.  */
611
612static int
613count_stars (tree* type_ptr)
614{
615  tree type = *type_ptr;
616  int i = 0;
617  type = TYPE_MAIN_VARIANT (type);
618  while (POINTER_TYPE_P (type))
619    {
620      type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
621      i++;
622    }
623
624  *type_ptr = type;
625  return i;
626}
627
628enum cast_type {
629  CT_UP = 0x1,
630  CT_DOWN = 0x2,
631  CT_SIDEWAYS = 0x4,
632  CT_USELESS = 0x8,
633  CT_FROM_P_BAD = 0x10,
634  CT_FROM_NON_P = 0x20,
635  CT_TO_NON_INTER = 0x40,
636  CT_FROM_MALLOC = 0x80,
637  CT_NO_CAST = 0x100
638};
639
640/* Check the cast FROM_TYPE to TO_TYPE.  This function requires that
641   the two types have already passed the
642   ipa_type_escape_star_count_of_interesting_type test.  */
643
644static enum cast_type
645check_cast_type (tree to_type, tree from_type)
646{
647  int to_stars = count_stars (&to_type);
648  int from_stars = count_stars (&from_type);
649  if (to_stars != from_stars)
650    return CT_SIDEWAYS;
651
652  if (to_type == from_type)
653    return CT_USELESS;
654
655  if (parent_type_p (to_type, from_type)) return CT_UP;
656  if (parent_type_p (from_type, to_type)) return CT_DOWN;
657  return CT_SIDEWAYS;
658}
659
660/* This function returns nonzero if VAR is result of call
661   to malloc function.  */
662
663static bool
664is_malloc_result (tree var)
665{
666  gimple def_stmt;
667
668  if (!var)
669    return false;
670
671  if (SSA_NAME_IS_DEFAULT_DEF (var))
672    return false;
673
674  def_stmt = SSA_NAME_DEF_STMT (var);
675
676  if (!is_gimple_call (def_stmt))
677    return false;
678
679  if (var != gimple_call_lhs (def_stmt))
680    return false;
681
682  return ((gimple_call_flags (def_stmt) & ECF_MALLOC) != 0);
683
684}
685
686/* Check a cast FROM this variable, TO_TYPE.  Mark the escaping types
687   if appropriate. Returns cast_type as detected.  */
688
689static enum cast_type
690check_cast (tree to_type, tree from)
691{
692  tree from_type = get_canon_type (TREE_TYPE (from), false, false);
693  bool to_interesting_type, from_interesting_type;
694  enum cast_type cast = CT_NO_CAST;
695
696  to_type = get_canon_type (to_type, false, false);
697  if (!from_type || !to_type || from_type == to_type)
698    return cast;
699
700  to_interesting_type =
701    ipa_type_escape_star_count_of_interesting_type (to_type) >= 0;
702  from_interesting_type =
703    ipa_type_escape_star_count_of_interesting_type (from_type) >= 0;
704
705  if (to_interesting_type)
706    if (from_interesting_type)
707      {
708	/* Both types are interesting. This can be one of four types
709	   of cast: useless, up, down, or sideways.  We do not care
710	   about up or useless.  Sideways casts are always bad and
711	   both sides get marked as escaping.  Downcasts are not
712	   interesting here because if type is marked as escaping, all
713	   of its subtypes escape.  */
714	cast = check_cast_type (to_type, from_type);
715	switch (cast)
716	  {
717	  case CT_UP:
718	  case CT_USELESS:
719	  case CT_DOWN:
720	    break;
721
722	  case CT_SIDEWAYS:
723	    mark_type (to_type, FULL_ESCAPE);
724	    mark_type (from_type, FULL_ESCAPE);
725	    break;
726
727	  default:
728	    break;
729	  }
730      }
731    else
732      {
733	/* This code excludes two cases from marking as escaped:
734
735	1. if this is a cast of index of array of structures/unions
736	that happens before accessing array element, we should not
737	mark it as escaped.
738	2. if this is a cast from the local that is a result from a
739	call to malloc, do not mark the cast as bad.
740
741	*/
742
743	if (POINTER_TYPE_P (to_type) && !POINTER_TYPE_P (from_type))
744	  cast = CT_FROM_NON_P;
745	else if (TREE_CODE (from) == SSA_NAME
746		 && is_malloc_result (from))
747	  cast = CT_FROM_MALLOC;
748	else
749	  {
750	    cast = CT_FROM_P_BAD;
751	    mark_type (to_type, FULL_ESCAPE);
752	  }
753      }
754  else if (from_interesting_type)
755    {
756      mark_type (from_type, FULL_ESCAPE);
757      cast = CT_TO_NON_INTER;
758    }
759
760  return cast;
761}
762
763
764/* Scan assignment statement S to see if there are any casts within it.  */
765
766static unsigned int
767look_for_casts_stmt (gimple s)
768{
769  unsigned int cast = 0;
770
771  gcc_assert (is_gimple_assign (s));
772
773  if (gimple_assign_cast_p (s))
774    {
775      tree castfromvar = gimple_assign_rhs1 (s);
776      cast |= check_cast (TREE_TYPE (gimple_assign_lhs (s)), castfromvar);
777    }
778  else
779    {
780      size_t i;
781      for (i = 0; i < gimple_num_ops (s); i++)
782	cast |= look_for_casts (gimple_op (s, i));
783    }
784
785  if (!cast)
786    cast = CT_NO_CAST;
787
788  return cast;
789}
790
791
792typedef struct cast
793{
794  int type;
795  gimple stmt;
796} cast_t;
797
798/* This function is a callback for walk_use_def_chains function called
799   from is_array_access_through_pointer_and_index.  */
800
801static bool
802is_cast_from_non_pointer (tree var, gimple def_stmt, void *data)
803{
804  if (!def_stmt || !var)
805    return false;
806
807  if (gimple_code (def_stmt) == GIMPLE_PHI)
808    return false;
809
810  if (SSA_NAME_IS_DEFAULT_DEF (var))
811      return false;
812
813  if (is_gimple_assign (def_stmt))
814    {
815      use_operand_p use_p;
816      ssa_op_iter iter;
817      unsigned int cast = look_for_casts_stmt (def_stmt);
818
819      /* Check that only one cast happened, and it's of non-pointer
820	 type.  */
821      if ((cast & CT_FROM_NON_P) == (CT_FROM_NON_P)
822	  && (cast & ~(CT_FROM_NON_P)) == 0)
823	{
824	  ((cast_t *)data)->stmt = def_stmt;
825	  ((cast_t *)data)->type++;
826
827	  FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
828	    {
829	      walk_use_def_chains (USE_FROM_PTR (use_p),
830				   is_cast_from_non_pointer, data, false);
831	      if (((cast_t*)data)->type == -1)
832		break;
833	    }
834	}
835      /* Check that there is no cast, or cast is not harmful. */
836      else if ((cast & CT_NO_CAST) == (CT_NO_CAST)
837	  || (cast & CT_DOWN) == (CT_DOWN)
838	  || (cast & CT_UP) == (CT_UP)
839	  || (cast & CT_USELESS) == (CT_USELESS)
840	  || (cast & CT_FROM_MALLOC) == (CT_FROM_MALLOC))
841	{
842	  FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
843	    {
844	      walk_use_def_chains (USE_FROM_PTR (use_p),
845				   is_cast_from_non_pointer, data, false);
846	      if (((cast_t*)data)->type == -1)
847		break;
848	    }
849	}
850	/* The cast is harmful.  */
851	else
852	  ((cast_t *)data)->type = -1;
853    }
854
855  if (((cast_t*)data)->type == -1)
856    return true;
857
858  return false;
859}
860
861/* When array element a_p[i] is accessed through the pointer a_p
862   and index i, it's translated into the following sequence
863   in gimple:
864
865  i.1_5 = (unsigned int) i_1;
866  D.1605_6 = i.1_5 * 16;
867  D.1606_7 = (struct str_t *) D.1605_6;
868  a_p.2_8 = a_p;
869  D.1608_9 = D.1606_7 + a_p.2_8;
870
871  OP0 and OP1 are of the same pointer types and stand for
872  D.1606_7 and a_p.2_8 or vise versa.
873
874  This function checks that:
875
876  1. one of OP0 and OP1 (D.1606_7) has passed only one cast from
877  non-pointer type (D.1606_7 = (struct str_t *) D.1605_6;).
878
879  2. one of OP0 and OP1 which has passed the cast from
880  non-pointer type (D.1606_7), is actually generated by multiplication of
881  index by size of type to which both OP0 and OP1 point to
882  (in this case D.1605_6 = i.1_5 * 16; ).
883
884  3. an address of def of the var to which was made cast (D.1605_6)
885  was not taken.(How can it happen?)
886
887  The following items are checked implicitly by the end of algorithm:
888
889  4. one of OP0 and OP1 (a_p.2_8) have never been cast
890  (because if it was cast to pointer type, its type, that is also
891  the type of OP0 and OP1, will be marked as escaped during
892  analysis of casting stmt (when check_cast() is called
893  from scan_for_refs for this stmt)).
894
895  5. defs of OP0 and OP1 are not passed into externally visible function
896  (because if they are passed then their type, that is also the type of OP0
897  and OP1, will be marked and escaped during check_call function called from
898  scan_for_refs with call stmt).
899
900  In total, 1-5 guaranty that it's an access to array by pointer and index.
901
902*/
903
904bool
905is_array_access_through_pointer_and_index (enum tree_code code, tree op0,
906					   tree op1, tree *base, tree *offset,
907					   gimple *offset_cast_stmt)
908{
909  tree before_cast;
910  gimple before_cast_def_stmt;
911  cast_t op0_cast, op1_cast;
912
913  *base = NULL;
914  *offset = NULL;
915  *offset_cast_stmt = NULL;
916
917  /* Check 1.  */
918  if (code == POINTER_PLUS_EXPR)
919    {
920      tree op0type = TYPE_MAIN_VARIANT (TREE_TYPE (op0));
921      tree op1type = TYPE_MAIN_VARIANT (TREE_TYPE (op1));
922
923      /* One of op0 and op1 is of pointer type and the other is numerical.  */
924      if (POINTER_TYPE_P (op0type) && NUMERICAL_TYPE_CHECK (op1type))
925	{
926	  *base = op0;
927	  *offset = op1;
928	}
929      else if (POINTER_TYPE_P (op1type) && NUMERICAL_TYPE_CHECK (op0type))
930	{
931	  *base = op1;
932	  *offset = op0;
933	}
934      else
935	return false;
936    }
937  else
938    {
939      /* Init data for walk_use_def_chains function.  */
940      op0_cast.type = op1_cast.type = 0;
941      op0_cast.stmt = op1_cast.stmt = NULL;
942
943      visited_stmts = pointer_set_create ();
944      walk_use_def_chains (op0, is_cast_from_non_pointer,(void *)(&op0_cast),
945			   false);
946      pointer_set_destroy (visited_stmts);
947
948      visited_stmts = pointer_set_create ();
949      walk_use_def_chains (op1, is_cast_from_non_pointer,(void *)(&op1_cast),
950			   false);
951      pointer_set_destroy (visited_stmts);
952
953      if (op0_cast.type == 1 && op1_cast.type == 0)
954	{
955	  *base = op1;
956	  *offset = op0;
957	  *offset_cast_stmt = op0_cast.stmt;
958	}
959      else if (op0_cast.type == 0 && op1_cast.type == 1)
960	{
961	  *base = op0;
962	  *offset = op1;
963	  *offset_cast_stmt = op1_cast.stmt;
964	}
965      else
966	return false;
967    }
968
969  /* Check 2.
970     offset_cast_stmt is of the form:
971     D.1606_7 = (struct str_t *) D.1605_6;  */
972
973  if (*offset_cast_stmt)
974    {
975      before_cast = SINGLE_SSA_TREE_OPERAND (*offset_cast_stmt, SSA_OP_USE);
976      if (!before_cast)
977	return false;
978
979      if (SSA_NAME_IS_DEFAULT_DEF (before_cast))
980	return false;
981
982      before_cast_def_stmt = SSA_NAME_DEF_STMT (before_cast);
983      if (!before_cast_def_stmt)
984	return false;
985    }
986  else
987    before_cast_def_stmt = SSA_NAME_DEF_STMT (*offset);
988
989  /* before_cast_def_stmt should be of the form:
990     D.1605_6 = i.1_5 * 16; */
991
992  if (is_gimple_assign (before_cast_def_stmt))
993    {
994      /* We expect temporary here.  */
995      if (!is_gimple_reg (gimple_assign_lhs (before_cast_def_stmt)))
996	return false;
997
998      if (gimple_assign_rhs_code (before_cast_def_stmt) == MULT_EXPR)
999	{
1000	  tree arg0 = gimple_assign_rhs1 (before_cast_def_stmt);
1001	  tree arg1 = gimple_assign_rhs2 (before_cast_def_stmt);
1002	  tree unit_size =
1003	    TYPE_SIZE_UNIT (TREE_TYPE (TYPE_MAIN_VARIANT (TREE_TYPE (op0))));
1004
1005	  if (!(CONSTANT_CLASS_P (arg0)
1006	      && simple_cst_equal (arg0, unit_size))
1007	      && !(CONSTANT_CLASS_P (arg1)
1008	      && simple_cst_equal (arg1, unit_size)))
1009	    return false;
1010	}
1011      else
1012	return false;
1013    }
1014  else
1015    return false;
1016
1017  /* Check 3.
1018     check that address of D.1605_6 was not taken.
1019     FIXME: if D.1605_6 is gimple reg than it cannot be addressable.  */
1020
1021  return true;
1022}
1023
1024/* Register the parameter and return types of function FN.  The type
1025   ESCAPES if the function is visible outside of the compilation
1026   unit.  */
1027static void
1028check_function_parameter_and_return_types (tree fn, bool escapes)
1029{
1030  tree arg;
1031
1032  if (TYPE_ARG_TYPES (TREE_TYPE (fn)))
1033    {
1034      for (arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1035	   arg && TREE_VALUE (arg) != void_type_node;
1036	   arg = TREE_CHAIN (arg))
1037	{
1038	  tree type = get_canon_type (TREE_VALUE (arg), false, false);
1039	  if (escapes)
1040	    mark_interesting_type (type, EXPOSED_PARAMETER);
1041	}
1042    }
1043  else
1044    {
1045      /* FIXME - According to Geoff Keating, we should never have to
1046	 do this; the front ends should always process the arg list
1047	 from the TYPE_ARG_LIST. However, Geoff is wrong, this code
1048	 does seem to be live.  */
1049
1050      for (arg = DECL_ARGUMENTS (fn); arg; arg = TREE_CHAIN (arg))
1051	{
1052	  tree type = get_canon_type (TREE_TYPE (arg), false, false);
1053	  if (escapes)
1054	    mark_interesting_type (type, EXPOSED_PARAMETER);
1055	}
1056    }
1057  if (escapes)
1058    {
1059      tree type = get_canon_type (TREE_TYPE (TREE_TYPE (fn)), false, false);
1060      mark_interesting_type (type, EXPOSED_PARAMETER);
1061    }
1062}
1063
1064/* Return true if the variable T is the right kind of static variable to
1065   perform compilation unit scope escape analysis.  */
1066
1067static inline void
1068has_proper_scope_for_analysis (tree t)
1069{
1070  /* If the variable has the "used" attribute, treat it as if it had a
1071     been touched by the devil.  */
1072  tree type = get_canon_type (TREE_TYPE (t), false, false);
1073  if (!type) return;
1074
1075  if (DECL_PRESERVE_P (t))
1076    {
1077      mark_interesting_type (type, FULL_ESCAPE);
1078      return;
1079    }
1080
1081  /* Do not want to do anything with volatile except mark any
1082     function that uses one to be not const or pure.  */
1083  if (TREE_THIS_VOLATILE (t))
1084    return;
1085
1086  /* Do not care about a local automatic that is not static.  */
1087  if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
1088    return;
1089
1090  if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
1091    {
1092      /* If the front end set the variable to be READONLY and
1093	 constant, we can allow this variable in pure or const
1094	 functions but the scope is too large for our analysis to set
1095	 these bits ourselves.  */
1096
1097      if (TREE_READONLY (t)
1098	  && DECL_INITIAL (t)
1099	  && is_gimple_min_invariant (DECL_INITIAL (t)))
1100	; /* Read of a constant, do not change the function state.  */
1101      else
1102	{
1103	  /* The type escapes for all public and externs. */
1104	  mark_interesting_type (type, FULL_ESCAPE);
1105	}
1106    }
1107}
1108
1109/* If T is a VAR_DECL for a static that we are interested in, add the
1110   uid to the bitmap.  */
1111
1112static void
1113check_operand (tree t)
1114{
1115  if (!t) return;
1116
1117  /* This is an assignment from a function, register the types as
1118     escaping.  */
1119  if (TREE_CODE (t) == FUNCTION_DECL)
1120    check_function_parameter_and_return_types (t, true);
1121
1122  else if (TREE_CODE (t) == VAR_DECL)
1123    has_proper_scope_for_analysis (t);
1124}
1125
1126/* Examine tree T for references.   */
1127
1128static void
1129check_tree (tree t)
1130{
1131  /* We want to catch here also REALPART_EXPR and IMAGEPART_EXPR,
1132     but they already included in handled_component_p.  */
1133  while (handled_component_p (t))
1134    {
1135      if (TREE_CODE (t) == ARRAY_REF)
1136	check_operand (TREE_OPERAND (t, 1));
1137      t = TREE_OPERAND (t, 0);
1138    }
1139
1140  if (INDIRECT_REF_P (t))
1141/*  || TREE_CODE (t) == MEM_REF) */
1142    check_tree (TREE_OPERAND (t, 0));
1143
1144  if (SSA_VAR_P (t) || (TREE_CODE (t) == FUNCTION_DECL))
1145    {
1146      check_operand (t);
1147      if (DECL_P (t) && DECL_INITIAL (t))
1148	check_tree (DECL_INITIAL (t));
1149    }
1150}
1151
1152/* Create an address_of edge FROM_TYPE.TO_TYPE.  */
1153static void
1154mark_interesting_addressof (tree to_type, tree from_type)
1155{
1156  int from_uid;
1157  int to_uid;
1158  bitmap type_map;
1159  splay_tree_node result;
1160
1161  from_type = get_canon_type (from_type, false, false);
1162  to_type = get_canon_type (to_type, false, false);
1163
1164  if (!from_type || !to_type)
1165    return;
1166
1167  from_uid = TYPE_UID (from_type);
1168  to_uid = TYPE_UID (to_type);
1169
1170  gcc_assert (ipa_type_escape_star_count_of_interesting_type (from_type) == 0);
1171
1172  /* Process the Y into X map pointer.  */
1173  result = splay_tree_lookup (uid_to_addressof_down_map,
1174			      (splay_tree_key) from_uid);
1175
1176  if (result)
1177    type_map = (bitmap) result->value;
1178  else
1179    {
1180      type_map = BITMAP_ALLOC (&ipa_obstack);
1181      splay_tree_insert (uid_to_addressof_down_map,
1182			 from_uid,
1183			 (splay_tree_value)type_map);
1184    }
1185  bitmap_set_bit (type_map, TYPE_UID (to_type));
1186
1187  /* Process the X into Y reverse map pointer.  */
1188  result =
1189    splay_tree_lookup (uid_to_addressof_up_map, (splay_tree_key) to_uid);
1190
1191  if (result)
1192    type_map = (bitmap) result->value;
1193  else
1194    {
1195      type_map = BITMAP_ALLOC (&ipa_obstack);
1196      splay_tree_insert (uid_to_addressof_up_map,
1197			 to_uid,
1198			 (splay_tree_value)type_map);
1199    }
1200  bitmap_set_bit (type_map, TYPE_UID (from_type));
1201}
1202
1203/* Scan tree T to see if there are any addresses taken in within T.  */
1204
1205static void
1206look_for_address_of (tree t)
1207{
1208  if (TREE_CODE (t) == ADDR_EXPR)
1209    {
1210      tree x = get_base_var (t);
1211      tree cref = TREE_OPERAND (t, 0);
1212
1213      /* If we have an expression of the form "&a.b.c.d", mark a.b,
1214	 b.c and c.d. as having its address taken.  */
1215      tree fielddecl = NULL_TREE;
1216      while (cref!= x)
1217	{
1218	  if (TREE_CODE (cref) == COMPONENT_REF)
1219	    {
1220	      fielddecl =  TREE_OPERAND (cref, 1);
1221	      mark_interesting_addressof (TREE_TYPE (fielddecl),
1222					  DECL_FIELD_CONTEXT (fielddecl));
1223	    }
1224	  else if (TREE_CODE (cref) == ARRAY_REF)
1225	    get_canon_type (TREE_TYPE (cref), false, false);
1226
1227	  cref = TREE_OPERAND (cref, 0);
1228	}
1229
1230      if (TREE_CODE (x) == VAR_DECL)
1231	has_proper_scope_for_analysis (x);
1232    }
1233}
1234
1235
1236/* Scan tree T to see if there are any casts within it.  */
1237
1238static unsigned int
1239look_for_casts (tree t)
1240{
1241  unsigned int cast = 0;
1242
1243  if (is_gimple_cast (t) || TREE_CODE (t) == VIEW_CONVERT_EXPR)
1244    {
1245      tree castfromvar = TREE_OPERAND (t, 0);
1246      cast = cast | check_cast (TREE_TYPE (t), castfromvar);
1247    }
1248  else
1249    while (handled_component_p (t))
1250      {
1251	t = TREE_OPERAND (t, 0);
1252	if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1253	  {
1254	    /* This may be some part of a component ref.
1255	       IE it may be a.b.VIEW_CONVERT_EXPR<weird_type>(c).d, AFAIK.
1256	       castfromref will give you a.b.c, not a. */
1257	    tree castfromref = TREE_OPERAND (t, 0);
1258	    cast = cast | check_cast (TREE_TYPE (t), castfromref);
1259	  }
1260	else if (TREE_CODE (t) == COMPONENT_REF)
1261	  get_canon_type (TREE_TYPE (TREE_OPERAND (t, 1)), false, false);
1262      }
1263
1264  if (!cast)
1265    cast = CT_NO_CAST;
1266  return cast;
1267}
1268
1269/* Check to see if T is a read or address of operation on a static var
1270   we are interested in analyzing.  */
1271
1272static void
1273check_rhs_var (tree t)
1274{
1275  look_for_address_of (t);
1276  check_tree (t);
1277}
1278
1279/* Check to see if T is an assignment to a static var we are
1280   interested in analyzing.  */
1281
1282static void
1283check_lhs_var (tree t)
1284{
1285  check_tree (t);
1286}
1287
1288/* This is a scaled down version of get_asm_expr_operands from
1289   tree_ssa_operands.c.  The version there runs much later and assumes
1290   that aliasing information is already available. Here we are just
1291   trying to find if the set of inputs and outputs contain references
1292   or address of operations to local.  FN is the function being
1293   analyzed and STMT is the actual asm statement.  */
1294
1295static void
1296check_asm (gimple stmt)
1297{
1298  size_t i;
1299
1300  for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1301    check_lhs_var (gimple_asm_output_op (stmt, i));
1302
1303  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1304    check_rhs_var (gimple_asm_input_op (stmt, i));
1305
1306  /* There is no code here to check for asm memory clobbers.  The
1307     casual maintainer might think that such code would be necessary,
1308     but that appears to be wrong.  In other parts of the compiler,
1309     the asm memory clobbers are assumed to only clobber variables
1310     that are addressable.  All types with addressable instances are
1311     assumed to already escape.  So, we are protected here.  */
1312}
1313
1314
1315/* Check the parameters of function call to CALL to mark the
1316   types that pass across the function boundary.  Also check to see if
1317   this is either an indirect call, a call outside the compilation
1318   unit.  */
1319
1320static void
1321check_call (gimple call)
1322{
1323  tree callee_t = gimple_call_fndecl (call);
1324  struct cgraph_node* callee;
1325  enum availability avail = AVAIL_NOT_AVAILABLE;
1326  size_t i;
1327
1328  for (i = 0; i < gimple_call_num_args (call); i++)
1329    check_rhs_var (gimple_call_arg (call, i));
1330
1331  if (callee_t)
1332    {
1333      tree arg_type;
1334      tree last_arg_type = NULL;
1335      callee = cgraph_node(callee_t);
1336      avail = cgraph_function_body_availability (callee);
1337
1338      /* Check that there are no implicit casts in the passing of
1339	 parameters.  */
1340      if (TYPE_ARG_TYPES (TREE_TYPE (callee_t)))
1341	{
1342	  for (arg_type = TYPE_ARG_TYPES (TREE_TYPE (callee_t)), i = 0;
1343	       arg_type && TREE_VALUE (arg_type) != void_type_node
1344	       && i < gimple_call_num_args (call);
1345	       arg_type = TREE_CHAIN (arg_type), i++)
1346	    {
1347	      tree operand = gimple_call_arg (call, i);
1348	      if (operand)
1349		{
1350		  last_arg_type = TREE_VALUE(arg_type);
1351		  check_cast (last_arg_type, operand);
1352		}
1353	      else
1354		/* The code reaches here for some unfortunate
1355		   builtin functions that do not have a list of
1356		   argument types.  */
1357		break;
1358	    }
1359	}
1360      else
1361	{
1362	  /* FIXME - According to Geoff Keating, we should never
1363	     have to do this; the front ends should always process
1364	     the arg list from the TYPE_ARG_LIST. */
1365	  for (arg_type = DECL_ARGUMENTS (callee_t), i = 0;
1366	       arg_type && i < gimple_call_num_args (call);
1367	       arg_type = TREE_CHAIN (arg_type), i++)
1368	    {
1369	      tree operand = gimple_call_arg (call, i);
1370	      if (operand)
1371		{
1372		  last_arg_type = TREE_TYPE (arg_type);
1373		  check_cast (last_arg_type, operand);
1374		}
1375	      else
1376		/* The code reaches here for some unfortunate
1377		   builtin functions that do not have a list of
1378		   argument types.  */
1379		break;
1380	    }
1381	}
1382
1383      /* In the case where we have a var_args function, we need to
1384	 check the remaining parameters against the last argument.  */
1385      arg_type = last_arg_type;
1386      for ( ; i < gimple_call_num_args (call); i++)
1387	{
1388	  tree operand = gimple_call_arg (call, i);
1389	  if (arg_type)
1390	    check_cast (arg_type, operand);
1391	  else
1392	    {
1393	      /* The code reaches here for some unfortunate
1394		 builtin functions that do not have a list of
1395		 argument types.  Most of these functions have
1396		 been marked as having their parameters not
1397		 escape, but for the rest, the type is doomed.  */
1398	      tree type = get_canon_type (TREE_TYPE (operand), false, false);
1399	      mark_interesting_type (type, FULL_ESCAPE);
1400	    }
1401	}
1402    }
1403
1404  /* The callee is either unknown (indirect call) or there is just no
1405     scannable code for it (external call) .  We look to see if there
1406     are any bits available for the callee (such as by declaration or
1407     because it is builtin) and process solely on the basis of those
1408     bits. */
1409  if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
1410    {
1411      /* If this is a direct call to an external function, mark all of
1412	 the parameter and return types.  */
1413      for (i = 0; i < gimple_call_num_args (call); i++)
1414	{
1415	  tree operand = gimple_call_arg (call, i);
1416	  tree type = get_canon_type (TREE_TYPE (operand), false, false);
1417	  mark_interesting_type (type, EXPOSED_PARAMETER);
1418	}
1419
1420      if (callee_t)
1421	{
1422	  tree type =
1423	    get_canon_type (TREE_TYPE (TREE_TYPE (callee_t)), false, false);
1424	  mark_interesting_type (type, EXPOSED_PARAMETER);
1425	}
1426    }
1427}
1428
1429/* CODE is the operation on OP0 and OP1.  OP0 is the operand that we
1430   *know* is a pointer type.  OP1 may be a pointer type.  */
1431static bool
1432okay_pointer_operation (enum tree_code code, tree op0, tree op1)
1433{
1434  tree op0type = TYPE_MAIN_VARIANT (TREE_TYPE (op0));
1435
1436  switch (code)
1437    {
1438    case MULT_EXPR:
1439      /* Multiplication does not change alignment.  */
1440      return true;
1441      break;
1442    case MINUS_EXPR:
1443    case PLUS_EXPR:
1444    case POINTER_PLUS_EXPR:
1445      {
1446	tree base, offset;
1447	gimple offset_cast_stmt;
1448
1449	if (POINTER_TYPE_P (op0type)
1450	    && TREE_CODE (op0) == SSA_NAME
1451	    && TREE_CODE (op1) == SSA_NAME
1452	    && is_array_access_through_pointer_and_index (code, op0, op1,
1453							  &base,
1454							  &offset,
1455							  &offset_cast_stmt))
1456	  return true;
1457	else
1458	  {
1459	    tree size_of_op0_points_to = TYPE_SIZE_UNIT (TREE_TYPE (op0type));
1460
1461	    if (CONSTANT_CLASS_P (op1)
1462		&& size_of_op0_points_to
1463		&& multiple_of_p (TREE_TYPE (size_of_op0_points_to),
1464				  op1, size_of_op0_points_to))
1465	      return true;
1466
1467	    if (CONSTANT_CLASS_P (op0)
1468		&& size_of_op0_points_to
1469		&& multiple_of_p (TREE_TYPE (size_of_op0_points_to),
1470				  op0, size_of_op0_points_to))
1471	      return true;
1472	  }
1473      }
1474      break;
1475    default:
1476      return false;
1477    }
1478  return false;
1479}
1480
1481
1482
1483/* Helper for scan_for_refs.  Check the operands of an assignment to
1484   mark types that may escape.  */
1485
1486static void
1487check_assign (gimple t)
1488{
1489  /* First look on the lhs and see what variable is stored to */
1490  check_lhs_var (gimple_assign_lhs (t));
1491
1492  /* For the purposes of figuring out what the cast affects */
1493
1494  /* Next check the operands on the rhs to see if they are ok. */
1495  switch (TREE_CODE_CLASS (gimple_assign_rhs_code (t)))
1496    {
1497    case tcc_binary:
1498      {
1499	tree op0 = gimple_assign_rhs1 (t);
1500	tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
1501	tree op1 = gimple_assign_rhs2 (t);
1502	tree type1 = get_canon_type (TREE_TYPE (op1), false, false);
1503
1504	/* If this is pointer arithmetic of any bad sort, then
1505	    we need to mark the types as bad.  For binary
1506	    operations, no binary operator we currently support
1507	    is always "safe" in regard to what it would do to
1508	    pointers for purposes of determining which types
1509	    escape, except operations of the size of the type.
1510	    It is possible that min and max under the right set
1511	    of circumstances and if the moon is in the correct
1512	    place could be safe, but it is hard to see how this
1513	    is worth the effort.  */
1514	if (type0 && POINTER_TYPE_P (type0)
1515	    && !okay_pointer_operation (gimple_assign_rhs_code (t), op0, op1))
1516	  mark_interesting_type (type0, FULL_ESCAPE);
1517
1518	if (type1 && POINTER_TYPE_P (type1)
1519	    && !okay_pointer_operation (gimple_assign_rhs_code (t), op1, op0))
1520	  mark_interesting_type (type1, FULL_ESCAPE);
1521
1522	look_for_casts (op0);
1523	look_for_casts (op1);
1524	check_rhs_var (op0);
1525	check_rhs_var (op1);
1526      }
1527      break;
1528
1529    case tcc_unary:
1530      {
1531	tree op0 = gimple_assign_rhs1 (t);
1532	tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
1533
1534	/* For unary operations, if the operation is NEGATE or ABS on
1535	   a pointer, this is also considered pointer arithmetic and
1536	   thus, bad for business.  */
1537	if (type0
1538	    && POINTER_TYPE_P (type0)
1539	    && (TREE_CODE (op0) == NEGATE_EXPR
1540	      || TREE_CODE (op0) == ABS_EXPR))
1541	  mark_interesting_type (type0, FULL_ESCAPE);
1542
1543	check_rhs_var (op0);
1544	look_for_casts (op0);
1545      }
1546      break;
1547
1548    case tcc_reference:
1549      look_for_casts (gimple_assign_rhs1 (t));
1550      check_rhs_var (gimple_assign_rhs1 (t));
1551      break;
1552
1553    case tcc_declaration:
1554      check_rhs_var (gimple_assign_rhs1 (t));
1555      break;
1556
1557    case tcc_expression:
1558      if (gimple_assign_rhs_code (t) == ADDR_EXPR)
1559	{
1560	  tree rhs = gimple_assign_rhs1 (t);
1561	  look_for_casts (TREE_OPERAND (rhs, 0));
1562	  check_rhs_var (rhs);
1563	}
1564      break;
1565
1566    default:
1567      break;
1568    }
1569}
1570
1571
1572/* Scan statement T for references to types and mark anything
1573   interesting.  */
1574
1575static void
1576scan_for_refs (gimple t)
1577{
1578  switch (gimple_code (t))
1579    {
1580    case GIMPLE_ASSIGN:
1581      check_assign (t);
1582      break;
1583
1584    case GIMPLE_CALL:
1585      /* If this is a call to malloc, squirrel away the result so we
1586	 do mark the resulting cast as being bad.  */
1587      check_call (t);
1588      break;
1589
1590    case GIMPLE_ASM:
1591      check_asm (t);
1592      break;
1593
1594    default:
1595      break;
1596    }
1597
1598  return;
1599}
1600
1601
1602/* The init routine for analyzing global static variable usage.  See
1603   comments at top for description.  */
1604static void
1605ipa_init (void)
1606{
1607  bitmap_obstack_initialize (&ipa_obstack);
1608  global_types_exposed_parameter = BITMAP_ALLOC (&ipa_obstack);
1609  global_types_full_escape = BITMAP_ALLOC (&ipa_obstack);
1610  global_types_seen = BITMAP_ALLOC (&ipa_obstack);
1611
1612  uid_to_canon_type = splay_tree_new (splay_tree_compare_ints, 0, 0);
1613  all_canon_types = splay_tree_new (compare_type_brand, 0, 0);
1614  type_to_canon_type = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1615  uid_to_subtype_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1616  uid_to_addressof_down_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1617  uid_to_addressof_up_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1618
1619  /* There are some shared nodes, in particular the initializers on
1620     static declarations.  We do not need to scan them more than once
1621     since all we would be interested in are the addressof
1622     operations.  */
1623  visited_nodes = pointer_set_create ();
1624  initialized = true;
1625}
1626
1627/* Check out the rhs of a static or global initialization VNODE to see
1628   if any of them contain addressof operations.  Note that some of
1629   these variables may not even be referenced in the code in this
1630   compilation unit but their right hand sides may contain references
1631   to variables defined within this unit.  */
1632
1633static void
1634analyze_variable (struct varpool_node *vnode)
1635{
1636  tree global = vnode->decl;
1637  tree type = get_canon_type (TREE_TYPE (global), false, false);
1638
1639  /* If this variable has exposure beyond the compilation unit, add
1640     its type to the global types.  */
1641
1642  if (vnode->externally_visible)
1643    mark_interesting_type (type, FULL_ESCAPE);
1644
1645  gcc_assert (TREE_CODE (global) == VAR_DECL);
1646
1647  if (DECL_INITIAL (global))
1648    check_tree (DECL_INITIAL (global));
1649}
1650
1651/* This is the main routine for finding the reference patterns for
1652   global variables within a function FN.  */
1653
1654static void
1655analyze_function (struct cgraph_node *fn)
1656{
1657  tree decl = fn->decl;
1658  check_function_parameter_and_return_types (decl,
1659					     fn->local.externally_visible);
1660  if (dump_file)
1661    fprintf (dump_file, "\n local analysis of %s", cgraph_node_name (fn));
1662
1663  {
1664    struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
1665    basic_block this_block;
1666
1667    FOR_EACH_BB_FN (this_block, this_cfun)
1668      {
1669	gimple_stmt_iterator gsi;
1670	for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
1671	  scan_for_refs (gsi_stmt (gsi));
1672      }
1673  }
1674
1675  /* There may be const decls with interesting right hand sides.  */
1676  if (DECL_STRUCT_FUNCTION (decl))
1677    {
1678      tree step;
1679      for (step = DECL_STRUCT_FUNCTION (decl)->local_decls;
1680	   step;
1681	   step = TREE_CHAIN (step))
1682	{
1683	  tree var = TREE_VALUE (step);
1684	  if (TREE_CODE (var) == VAR_DECL
1685	      && DECL_INITIAL (var)
1686	      && !TREE_STATIC (var))
1687	    check_tree (DECL_INITIAL (var));
1688	  get_canon_type (TREE_TYPE (var), false, false);
1689	}
1690    }
1691}
1692
1693
1694
1695/* Convert a type_UID into a type.  */
1696static tree
1697type_for_uid (int uid)
1698{
1699  splay_tree_node result =
1700    splay_tree_lookup (uid_to_canon_type, (splay_tree_key) uid);
1701
1702  if (result)
1703    return (tree) result->value;
1704  else return NULL;
1705}
1706
1707/* Return a bitmap with the subtypes of the type for UID.  If it
1708   does not exist, return either NULL or a new bitmap depending on the
1709   value of CREATE.  */
1710
1711static bitmap
1712subtype_map_for_uid (int uid, bool create)
1713{
1714  splay_tree_node result = splay_tree_lookup (uid_to_subtype_map,
1715			      (splay_tree_key) uid);
1716
1717  if (result)
1718    return (bitmap) result->value;
1719  else if (create)
1720    {
1721      bitmap subtype_map = BITMAP_ALLOC (&ipa_obstack);
1722      splay_tree_insert (uid_to_subtype_map,
1723			 uid,
1724			 (splay_tree_value)subtype_map);
1725      return subtype_map;
1726    }
1727  else return NULL;
1728}
1729
1730/* Mark all of the supertypes and field types of TYPE as being seen.
1731   Also accumulate the subtypes for each type so that
1732   close_types_full_escape can mark a subtype as escaping if the
1733   supertype escapes.  */
1734
1735static void
1736close_type_seen (tree type)
1737{
1738  tree field;
1739  int i, uid;
1740  tree binfo, base_binfo;
1741
1742  /* See thru all pointer tos and array ofs. */
1743  type = get_canon_type (type, true, true);
1744  if (!type)
1745    return;
1746
1747  uid = TYPE_UID (type);
1748
1749  if (bitmap_bit_p (been_there_done_that, uid))
1750    return;
1751  bitmap_set_bit (been_there_done_that, uid);
1752
1753  /* If we are doing a language with a type hierarchy, mark all of
1754     the superclasses.  */
1755  if (TYPE_BINFO (type))
1756    for (binfo = TYPE_BINFO (type), i = 0;
1757	 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1758      {
1759	tree binfo_type = BINFO_TYPE (base_binfo);
1760	bitmap subtype_map = subtype_map_for_uid
1761	  (TYPE_UID (TYPE_MAIN_VARIANT (binfo_type)), true);
1762	bitmap_set_bit (subtype_map, uid);
1763	close_type_seen (get_canon_type (binfo_type, true, true));
1764      }
1765
1766  /* If the field is a struct or union type, mark all of the
1767     subfields.  */
1768  for (field = TYPE_FIELDS (type);
1769       field;
1770       field = TREE_CHAIN (field))
1771    {
1772      tree field_type;
1773      if (TREE_CODE (field) != FIELD_DECL)
1774	continue;
1775
1776      field_type = TREE_TYPE (field);
1777      if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
1778	close_type_seen (get_canon_type (field_type, true, true));
1779    }
1780}
1781
1782/* Take a TYPE that has been passed by value to an external function
1783   and mark all of the fields that have pointer types as escaping. For
1784   any of the non pointer types that are structures or unions,
1785   recurse.  TYPE is never a pointer type.  */
1786
1787static void
1788close_type_exposed_parameter (tree type)
1789{
1790  tree field;
1791  int uid;
1792
1793  type = get_canon_type (type, false, false);
1794  if (!type)
1795    return;
1796  uid = TYPE_UID (type);
1797  gcc_assert (!POINTER_TYPE_P (type));
1798
1799  if (bitmap_bit_p (been_there_done_that, uid))
1800    return;
1801  bitmap_set_bit (been_there_done_that, uid);
1802
1803  /* If the field is a struct or union type, mark all of the
1804     subfields.  */
1805  for (field = TYPE_FIELDS (type);
1806       field;
1807       field = TREE_CHAIN (field))
1808    {
1809      tree field_type;
1810
1811      if (TREE_CODE (field) != FIELD_DECL)
1812	continue;
1813
1814      field_type = get_canon_type (TREE_TYPE (field), false, false);
1815      mark_interesting_type (field_type, EXPOSED_PARAMETER);
1816
1817      /* Only recurse for non pointer types of structures and unions.  */
1818      if (ipa_type_escape_star_count_of_interesting_type (field_type) == 0)
1819	close_type_exposed_parameter (field_type);
1820    }
1821}
1822
1823/* The next function handles the case where a type fully escapes.
1824   This means that not only does the type itself escape,
1825
1826   a) the type of every field recursively escapes
1827   b) the type of every subtype escapes as well as the super as well
1828   as all of the pointer to types for each field.
1829
1830   Note that pointer to types are not marked as escaping.  If the
1831   pointed to type escapes, the pointer to type also escapes.
1832
1833   Take a TYPE that has had the address taken for an instance of it
1834   and mark all of the types for its fields as having their addresses
1835   taken. */
1836
1837static void
1838close_type_full_escape (tree type)
1839{
1840  tree field;
1841  unsigned int i;
1842  int uid;
1843  tree binfo, base_binfo;
1844  bitmap_iterator bi;
1845  bitmap subtype_map;
1846  splay_tree_node address_result;
1847
1848  /* Strip off any pointer or array types.  */
1849  type = get_canon_type (type, true, true);
1850  if (!type)
1851    return;
1852  uid = TYPE_UID (type);
1853
1854  if (bitmap_bit_p (been_there_done_that, uid))
1855    return;
1856  bitmap_set_bit (been_there_done_that, uid);
1857
1858  subtype_map = subtype_map_for_uid (uid, false);
1859
1860  /* If we are doing a language with a type hierarchy, mark all of
1861     the superclasses.  */
1862  if (TYPE_BINFO (type))
1863    for (binfo = TYPE_BINFO (type), i = 0;
1864	 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1865      {
1866	tree binfotype = BINFO_TYPE (base_binfo);
1867	binfotype = mark_type (binfotype, FULL_ESCAPE);
1868	close_type_full_escape (binfotype);
1869      }
1870
1871  /* Mark as escaped any types that have been down casted to
1872     this type. */
1873  if (subtype_map)
1874    EXECUTE_IF_SET_IN_BITMAP (subtype_map, 0, i, bi)
1875      {
1876	tree subtype = type_for_uid (i);
1877	subtype = mark_type (subtype, FULL_ESCAPE);
1878	close_type_full_escape (subtype);
1879      }
1880
1881  /* If the field is a struct or union type, mark all of the
1882     subfields.  */
1883  for (field = TYPE_FIELDS (type);
1884       field;
1885       field = TREE_CHAIN (field))
1886    {
1887      tree field_type;
1888      if (TREE_CODE (field) != FIELD_DECL)
1889	continue;
1890
1891      field_type = TREE_TYPE (field);
1892      if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
1893	{
1894	  field_type = mark_type (field_type, FULL_ESCAPE);
1895	  close_type_full_escape (field_type);
1896	}
1897    }
1898
1899  /* For all of the types A that contain this type B and were part of
1900     an expression like "&...A.B...", mark the A's as escaping.  */
1901  address_result = splay_tree_lookup (uid_to_addressof_up_map,
1902				      (splay_tree_key) uid);
1903  if (address_result)
1904    {
1905      bitmap containing_classes = (bitmap) address_result->value;
1906      EXECUTE_IF_SET_IN_BITMAP (containing_classes, 0, i, bi)
1907	{
1908	  close_type_full_escape (type_for_uid (i));
1909	}
1910    }
1911}
1912
1913/* Transitively close the addressof bitmap for the type with UID.
1914   This means that if we had a.b and b.c, a would have both b and c in
1915   its maps.  */
1916
1917static bitmap
1918close_addressof_down (int uid)
1919{
1920  bitmap_iterator bi;
1921  splay_tree_node result =
1922    splay_tree_lookup (uid_to_addressof_down_map, (splay_tree_key) uid);
1923  bitmap map = NULL;
1924  bitmap new_map;
1925  unsigned int i;
1926
1927  if (result)
1928    map = (bitmap) result->value;
1929  else
1930    return NULL;
1931
1932  if (bitmap_bit_p (been_there_done_that, uid))
1933    return map;
1934  bitmap_set_bit (been_there_done_that, uid);
1935
1936  /* If the type escapes, get rid of the addressof map, it will not be
1937     needed.  */
1938  if (bitmap_bit_p (global_types_full_escape, uid))
1939    {
1940      BITMAP_FREE (map);
1941      splay_tree_remove (uid_to_addressof_down_map, (splay_tree_key) uid);
1942      return NULL;
1943    }
1944
1945  /* The new_map will have all of the bits for the enclosed fields and
1946     will have the unique id version of the old map.  */
1947  new_map = BITMAP_ALLOC (&ipa_obstack);
1948
1949  EXECUTE_IF_SET_IN_BITMAP (map, 0, i, bi)
1950    {
1951      bitmap submap = close_addressof_down (i);
1952      bitmap_set_bit (new_map, i);
1953      if (submap)
1954	bitmap_ior_into (new_map, submap);
1955    }
1956  result->value = (splay_tree_value) new_map;
1957
1958  BITMAP_FREE (map);
1959  return new_map;
1960}
1961
1962
1963/* The main entry point for type escape analysis.  */
1964
1965static unsigned int
1966type_escape_execute (void)
1967{
1968  struct cgraph_node *node;
1969  struct varpool_node *vnode;
1970  unsigned int i;
1971  bitmap_iterator bi;
1972  splay_tree_node result;
1973
1974  ipa_init ();
1975
1976  /* Process all of the variables first.  */
1977  FOR_EACH_STATIC_VARIABLE (vnode)
1978    analyze_variable (vnode);
1979
1980  /* Process all of the functions next.
1981
1982     We do not want to process any of the clones so we check that this
1983     is a master clone.  However, we do need to process any
1984     AVAIL_OVERWRITABLE functions (these are never clones) because
1985     they may cause a type variable to escape.
1986  */
1987  for (node = cgraph_nodes; node; node = node->next)
1988    if (node->analyzed && !node->clone_of)
1989      analyze_function (node);
1990
1991
1992  pointer_set_destroy (visited_nodes);
1993  visited_nodes = NULL;
1994
1995  /* Do all of the closures to discover which types escape the
1996     compilation unit.  */
1997
1998  been_there_done_that = BITMAP_ALLOC (&ipa_obstack);
1999  bitmap_tmp = BITMAP_ALLOC (&ipa_obstack);
2000
2001  /* Examine the types that we have directly seen in scanning the code
2002     and add to that any contained types or superclasses.  */
2003
2004  bitmap_copy (bitmap_tmp, global_types_seen);
2005  EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2006    {
2007      tree type = type_for_uid (i);
2008      /* Only look at records and unions and pointer tos.  */
2009      if (ipa_type_escape_star_count_of_interesting_or_array_type (type) >= 0)
2010	close_type_seen (type);
2011    }
2012  bitmap_clear (been_there_done_that);
2013
2014  /* Examine all of the types passed by value and mark any enclosed
2015     pointer types as escaping.  */
2016  bitmap_copy (bitmap_tmp, global_types_exposed_parameter);
2017  EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2018    {
2019      close_type_exposed_parameter (type_for_uid (i));
2020    }
2021  bitmap_clear (been_there_done_that);
2022
2023  /* Close the types for escape.  If something escapes, then any
2024     enclosed types escape as well as any subtypes.  */
2025  bitmap_copy (bitmap_tmp, global_types_full_escape);
2026  EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2027    {
2028      close_type_full_escape (type_for_uid (i));
2029    }
2030  bitmap_clear (been_there_done_that);
2031
2032  /* Before this pass, the uid_to_addressof_down_map for type X
2033     contained an entry for Y if there had been an operation of the
2034     form &X.Y.  This step adds all of the fields contained within Y
2035     (recursively) to X's map.  */
2036
2037  result = splay_tree_min (uid_to_addressof_down_map);
2038  while (result)
2039    {
2040      int uid = result->key;
2041      /* Close the addressof map, i.e. copy all of the transitive
2042	 substructures up to this level.  */
2043      close_addressof_down (uid);
2044      result = splay_tree_successor (uid_to_addressof_down_map, uid);
2045    }
2046
2047  /* Do not need the array types and pointer types in the persistent
2048     data structures.  */
2049  result = splay_tree_min (all_canon_types);
2050  while (result)
2051    {
2052      tree type = (tree) result->value;
2053      tree key = (tree) result->key;
2054      if (POINTER_TYPE_P (type)
2055	  || TREE_CODE (type) == ARRAY_TYPE)
2056	{
2057	  splay_tree_remove (all_canon_types, (splay_tree_key) result->key);
2058	  splay_tree_remove (type_to_canon_type, (splay_tree_key) type);
2059	  splay_tree_remove (uid_to_canon_type, (splay_tree_key) TYPE_UID (type));
2060	  bitmap_clear_bit (global_types_seen, TYPE_UID (type));
2061	}
2062      result = splay_tree_successor (all_canon_types, (splay_tree_key) key);
2063    }
2064
2065  if (dump_file)
2066    {
2067      EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
2068	{
2069	  /* The pointer types are in the global_types_full_escape
2070	     bitmap but not in the backwards map.  They also contain
2071	     no useful information since they are not marked.  */
2072	  tree type = type_for_uid (i);
2073	  fprintf(dump_file, "type %d ", i);
2074	  print_generic_expr (dump_file, type, 0);
2075	  if (bitmap_bit_p (global_types_full_escape, i))
2076	    fprintf(dump_file, " escaped\n");
2077	  else
2078	    fprintf(dump_file, " contained\n");
2079	}
2080    }
2081
2082  /* Get rid of uid_to_addressof_up_map and its bitmaps.  */
2083  result = splay_tree_min (uid_to_addressof_up_map);
2084  while (result)
2085    {
2086      int uid = (int)result->key;
2087      bitmap bm = (bitmap)result->value;
2088
2089      BITMAP_FREE (bm);
2090      splay_tree_remove (uid_to_addressof_up_map, (splay_tree_key) uid);
2091      result = splay_tree_successor (uid_to_addressof_up_map, uid);
2092    }
2093
2094  /* Get rid of the subtype map.  */
2095  result = splay_tree_min (uid_to_subtype_map);
2096  while (result)
2097    {
2098      bitmap b = (bitmap)result->value;
2099      BITMAP_FREE(b);
2100      splay_tree_remove (uid_to_subtype_map, result->key);
2101      result = splay_tree_min (uid_to_subtype_map);
2102    }
2103  splay_tree_delete (uid_to_subtype_map);
2104  uid_to_subtype_map = NULL;
2105
2106  BITMAP_FREE (global_types_exposed_parameter);
2107  BITMAP_FREE (been_there_done_that);
2108  BITMAP_FREE (bitmap_tmp);
2109  return 0;
2110}
2111
2112static bool
2113gate_type_escape_vars (void)
2114{
2115  return (flag_ipa_type_escape
2116	  /* Don't bother doing anything if the program has errors.  */
2117	  && !(errorcount || sorrycount));
2118}
2119
2120struct simple_ipa_opt_pass pass_ipa_type_escape =
2121{
2122 {
2123  SIMPLE_IPA_PASS,
2124  "type-escape-var",			/* name */
2125  gate_type_escape_vars,		/* gate */
2126  type_escape_execute,			/* execute */
2127  NULL,					/* sub */
2128  NULL,					/* next */
2129  0,					/* static_pass_number */
2130  TV_IPA_TYPE_ESCAPE,	        	/* tv_id */
2131  0,	                                /* properties_required */
2132  0,					/* properties_provided */
2133  0,					/* properties_destroyed */
2134  0,					/* todo_flags_start */
2135  0                                     /* todo_flags_finish */
2136 }
2137};
2138