1/* SSA operands management for trees.
2   Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING.  If not, write to
18the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19Boston, MA 02110-1301, USA.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
27#include "function.h"
28#include "diagnostic.h"
29#include "tree-flow.h"
30#include "tree-inline.h"
31#include "tree-pass.h"
32#include "ggc.h"
33#include "timevar.h"
34#include "toplev.h"
35#include "langhooks.h"
36#include "ipa-reference.h"
37
38/* This file contains the code required to manage the operands cache of the
39   SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
40   annotation.  This cache contains operands that will be of interest to
41   optimizers and other passes wishing to manipulate the IL.
42
43   The operand type are broken up into REAL and VIRTUAL operands.  The real
44   operands are represented as pointers into the stmt's operand tree.  Thus
45   any manipulation of the real operands will be reflected in the actual tree.
46   Virtual operands are represented solely in the cache, although the base
47   variable for the SSA_NAME may, or may not occur in the stmt's tree.
48   Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50   The routines in this file are concerned with creating this operand cache
51   from a stmt tree.
52
53   The operand tree is the parsed by the various get_* routines which look
54   through the stmt tree for the occurrence of operands which may be of
55   interest, and calls are made to the append_* routines whenever one is
56   found.  There are 5 of these routines, each representing one of the
57   5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58   Virtual Must Defs.
59
60   The append_* routines check for duplication, and simply keep a list of
61   unique objects for each operand type in the build_* extendable vectors.
62
63   Once the stmt tree is completely parsed, the finalize_ssa_operands()
64   routine is called, which proceeds to perform the finalization routine
65   on each of the 5 operand vectors which have been built up.
66
67   If the stmt had a previous operand cache, the finalization routines
68   attempt to match up the new operands with the old ones.  If it's a perfect
69   match, the old vector is simply reused.  If it isn't a perfect match, then
70   a new vector is created and the new operands are placed there.  For
71   virtual operands, if the previous cache had SSA_NAME version of a
72   variable, and that same variable occurs in the same operands cache, then
73   the new cache vector will also get the same SSA_NAME.
74
75  i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76  vector for VUSE, then the new vector will also be modified such that
77  it contains 'a_5' rather than 'a'.
78
79*/
80
81
82/* Flags to describe operand properties in helpers.  */
83
84/* By default, operands are loaded.  */
85#define opf_none	0
86
87/* Operand is the target of an assignment expression or a
88   call-clobbered variable  */
89#define opf_is_def 	(1 << 0)
90
91/* Operand is the target of an assignment expression.  */
92#define opf_kill_def 	(1 << 1)
93
94/* No virtual operands should be created in the expression.  This is used
95   when traversing ADDR_EXPR nodes which have different semantics than
96   other expressions.  Inside an ADDR_EXPR node, the only operands that we
97   need to consider are indices into arrays.  For instance, &a.b[i] should
98   generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99   VUSE for 'b'.  */
100#define opf_no_vops 	(1 << 2)
101
102/* Operand is a "non-specific" kill for call-clobbers and such.  This is used
103   to distinguish "reset the world" events from explicit MODIFY_EXPRs.  */
104#define opf_non_specific  (1 << 3)
105
106
107/* Array for building all the def operands.  */
108static VEC(tree,heap) *build_defs;
109
110/* Array for building all the use operands.  */
111static VEC(tree,heap) *build_uses;
112
113/* Array for building all the v_may_def operands.  */
114static VEC(tree,heap) *build_v_may_defs;
115
116/* Array for building all the vuse operands.  */
117static VEC(tree,heap) *build_vuses;
118
119/* Array for building all the v_must_def operands.  */
120static VEC(tree,heap) *build_v_must_defs;
121
122/* True if the operands for call clobbered vars are cached and valid.  */
123bool ssa_call_clobbered_cache_valid;
124bool ssa_ro_call_cache_valid;
125
126/* These arrays are the cached operand vectors for call clobbered calls.  */
127static VEC(tree,heap) *clobbered_v_may_defs;
128static VEC(tree,heap) *clobbered_vuses;
129static VEC(tree,heap) *ro_call_vuses;
130static bool clobbered_aliased_loads;
131static bool clobbered_aliased_stores;
132static bool ro_call_aliased_loads;
133static bool ops_active = false;
134
135static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
136static unsigned operand_memory_index;
137
138static void get_expr_operands (tree, tree *, int);
139static void get_asm_expr_operands (tree);
140static void get_indirect_ref_operands (tree, tree, int);
141static void get_tmr_operands (tree, tree, int);
142static void get_call_expr_operands (tree, tree);
143static inline void append_def (tree *);
144static inline void append_use (tree *);
145static void append_v_may_def (tree);
146static void append_v_must_def (tree);
147static void add_call_clobber_ops (tree, tree);
148static void add_call_read_ops (tree);
149static void add_stmt_operand (tree *, stmt_ann_t, int);
150static void build_ssa_operands (tree stmt);
151
152static def_optype_p free_defs = NULL;
153static use_optype_p free_uses = NULL;
154static vuse_optype_p free_vuses = NULL;
155static maydef_optype_p free_maydefs = NULL;
156static mustdef_optype_p free_mustdefs = NULL;
157
158
159/* Return the DECL_UID of the base variable of T.  */
160
161static inline unsigned
162get_name_decl (tree t)
163{
164  if (TREE_CODE (t) != SSA_NAME)
165    return DECL_UID (t);
166  else
167    return DECL_UID (SSA_NAME_VAR (t));
168}
169
170/* Comparison function for qsort used in operand_build_sort_virtual.  */
171
172static int
173operand_build_cmp (const void *p, const void *q)
174{
175  tree e1 = *((const tree *)p);
176  tree e2 = *((const tree *)q);
177  unsigned int u1,u2;
178
179  u1 = get_name_decl (e1);
180  u2 = get_name_decl (e2);
181
182  /* We want to sort in ascending order.  They can never be equal.  */
183#ifdef ENABLE_CHECKING
184  gcc_assert (u1 != u2);
185#endif
186  return (u1 > u2 ? 1 : -1);
187}
188
189/* Sort the virtual operands in LIST from lowest DECL_UID to highest.  */
190
191static inline void
192operand_build_sort_virtual (VEC(tree,heap) *list)
193{
194  int num = VEC_length (tree, list);
195  if (num < 2)
196    return;
197  if (num == 2)
198    {
199      if (get_name_decl (VEC_index (tree, list, 0))
200	  > get_name_decl (VEC_index (tree, list, 1)))
201	{
202	  /* Swap elements if in the wrong order.  */
203	  tree tmp = VEC_index (tree, list, 0);
204	  VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
205	  VEC_replace (tree, list, 1, tmp);
206	}
207      return;
208    }
209  /* There are 3 or more elements, call qsort.  */
210  qsort (VEC_address (tree, list),
211	 VEC_length (tree, list),
212	 sizeof (tree),
213	 operand_build_cmp);
214}
215
216
217
218/*  Return true if the ssa operands cache is active.  */
219
220bool
221ssa_operands_active (void)
222{
223  return ops_active;
224}
225
226
227/* Initialize the operand cache routines.  */
228
229void
230init_ssa_operands (void)
231{
232  build_defs = VEC_alloc (tree, heap, 5);
233  build_uses = VEC_alloc (tree, heap, 10);
234  build_vuses = VEC_alloc (tree, heap, 25);
235  build_v_may_defs = VEC_alloc (tree, heap, 25);
236  build_v_must_defs = VEC_alloc (tree, heap, 25);
237
238  gcc_assert (operand_memory == NULL);
239  operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
240  ops_active = true;
241}
242
243
244/* Dispose of anything required by the operand routines.  */
245
246void
247fini_ssa_operands (void)
248{
249  struct ssa_operand_memory_d *ptr;
250  VEC_free (tree, heap, build_defs);
251  VEC_free (tree, heap, build_uses);
252  VEC_free (tree, heap, build_v_must_defs);
253  VEC_free (tree, heap, build_v_may_defs);
254  VEC_free (tree, heap, build_vuses);
255  free_defs = NULL;
256  free_uses = NULL;
257  free_vuses = NULL;
258  free_maydefs = NULL;
259  free_mustdefs = NULL;
260  while ((ptr = operand_memory) != NULL)
261    {
262      operand_memory = operand_memory->next;
263      ggc_free (ptr);
264    }
265
266  VEC_free (tree, heap, clobbered_v_may_defs);
267  VEC_free (tree, heap, clobbered_vuses);
268  VEC_free (tree, heap, ro_call_vuses);
269  ops_active = false;
270}
271
272
273/* Return memory for operands of SIZE chunks.  */
274
275static inline void *
276ssa_operand_alloc (unsigned size)
277{
278  char *ptr;
279  if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
280    {
281      struct ssa_operand_memory_d *ptr;
282      ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
283      ptr->next = operand_memory;
284      operand_memory = ptr;
285      operand_memory_index = 0;
286    }
287  ptr = &(operand_memory->mem[operand_memory_index]);
288  operand_memory_index += size;
289  return ptr;
290}
291
292
293/* Make sure PTR is in the correct immediate use list.  Since uses are simply
294   pointers into the stmt TREE, there is no way of telling if anyone has
295   changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
296   The contents are different, but the pointer is still the same.  This
297   routine will check to make sure PTR is in the correct list, and if it isn't
298   put it in the correct list.  We cannot simply check the previous node
299   because all nodes in the same stmt might have be changed.  */
300
301static inline void
302correct_use_link (use_operand_p ptr, tree stmt)
303{
304  use_operand_p prev;
305  tree root;
306
307  /*  Fold_stmt () may have changed the stmt pointers.  */
308  if (ptr->stmt != stmt)
309    ptr->stmt = stmt;
310
311  prev = ptr->prev;
312  if (prev)
313    {
314      /* Find the root element, making sure we skip any safe iterators.  */
315      while (prev->use != NULL || prev->stmt == NULL)
316	prev = prev->prev;
317
318      /* Get the ssa_name of the list the node is in.  */
319      root = prev->stmt;
320      /* If it's the right list, simply return.  */
321      if (root == *(ptr->use))
322	return;
323    }
324  /* Its in the wrong list if we reach here.  */
325  delink_imm_use (ptr);
326  link_imm_use (ptr, *(ptr->use));
327}
328
329
330/* This routine makes sure that PTR is in an immediate use list, and makes
331   sure the stmt pointer is set to the current stmt.  Virtual uses do not need
332   the overhead of correct_use_link since they cannot be directly manipulated
333   like a real use can be.  (They don't exist in the TREE_OPERAND nodes.)  */
334static inline void
335set_virtual_use_link (use_operand_p ptr, tree stmt)
336{
337  /*  Fold_stmt () may have changed the stmt pointers.  */
338  if (ptr->stmt != stmt)
339    ptr->stmt = stmt;
340
341  /* If this use isn't in a list, add it to the correct list.  */
342  if (!ptr->prev)
343    link_imm_use (ptr, *(ptr->use));
344}
345
346
347
348#define FINALIZE_OPBUILD		build_defs
349#define FINALIZE_OPBUILD_BASE(I)	(tree *)VEC_index (tree,	\
350							   build_defs, (I))
351#define FINALIZE_OPBUILD_ELEM(I)	(tree *)VEC_index (tree,	\
352							   build_defs, (I))
353#define FINALIZE_FUNC			finalize_ssa_def_ops
354#define FINALIZE_ALLOC			alloc_def
355#define FINALIZE_FREE			free_defs
356#define FINALIZE_TYPE			struct def_optype_d
357#define FINALIZE_ELEM(PTR)		((PTR)->def_ptr)
358#define FINALIZE_OPS			DEF_OPS
359#define FINALIZE_BASE(VAR)		VAR
360#define FINALIZE_BASE_TYPE		tree *
361#define FINALIZE_BASE_ZERO		NULL
362#define FINALIZE_INITIALIZE(PTR, VAL, STMT)	FINALIZE_ELEM (PTR) = (VAL)
363#include "tree-ssa-opfinalize.h"
364
365
366/* This routine will create stmt operands for STMT from the def build list.  */
367
368static void
369finalize_ssa_defs (tree stmt)
370{
371  unsigned int num = VEC_length (tree, build_defs);
372  /* There should only be a single real definition per assignment.  */
373  gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
374
375  /* If there is an old list, often the new list is identical, or close, so
376     find the elements at the beginning that are the same as the vector.  */
377
378  finalize_ssa_def_ops (stmt);
379  VEC_truncate (tree, build_defs, 0);
380}
381
382#define FINALIZE_OPBUILD	build_uses
383#define FINALIZE_OPBUILD_BASE(I)	(tree *)VEC_index (tree,	\
384							   build_uses, (I))
385#define FINALIZE_OPBUILD_ELEM(I)	(tree *)VEC_index (tree,	\
386							   build_uses, (I))
387#define FINALIZE_FUNC		finalize_ssa_use_ops
388#define FINALIZE_ALLOC		alloc_use
389#define FINALIZE_FREE		free_uses
390#define FINALIZE_TYPE		struct use_optype_d
391#define FINALIZE_ELEM(PTR)	((PTR)->use_ptr.use)
392#define FINALIZE_OPS		USE_OPS
393#define FINALIZE_USE_PTR(PTR)	USE_OP_PTR (PTR)
394#define FINALIZE_CORRECT_USE	correct_use_link
395#define FINALIZE_BASE(VAR)	VAR
396#define FINALIZE_BASE_TYPE	tree *
397#define FINALIZE_BASE_ZERO	NULL
398#define FINALIZE_INITIALIZE(PTR, VAL, STMT)				\
399				(PTR)->use_ptr.use = (VAL);             \
400				link_imm_use_stmt (&((PTR)->use_ptr),   \
401						   *(VAL), (STMT))
402#include "tree-ssa-opfinalize.h"
403
404/* Return a new use operand vector for STMT, comparing to OLD_OPS_P.  */
405
406static void
407finalize_ssa_uses (tree stmt)
408{
409#ifdef ENABLE_CHECKING
410  {
411    unsigned x;
412    unsigned num = VEC_length (tree, build_uses);
413
414    /* If the pointer to the operand is the statement itself, something is
415       wrong.  It means that we are pointing to a local variable (the
416       initial call to get_stmt_operands does not pass a pointer to a
417       statement).  */
418    for (x = 0; x < num; x++)
419      gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
420  }
421#endif
422  finalize_ssa_use_ops (stmt);
423  VEC_truncate (tree, build_uses, 0);
424}
425
426
427/* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P.  */
428#define FINALIZE_OPBUILD	build_v_may_defs
429#define FINALIZE_OPBUILD_ELEM(I)	VEC_index (tree, build_v_may_defs, (I))
430#define FINALIZE_OPBUILD_BASE(I)	get_name_decl (VEC_index (tree,	\
431							build_v_may_defs, (I)))
432#define FINALIZE_FUNC		finalize_ssa_v_may_def_ops
433#define FINALIZE_ALLOC		alloc_maydef
434#define FINALIZE_FREE		free_maydefs
435#define FINALIZE_TYPE		struct maydef_optype_d
436#define FINALIZE_ELEM(PTR)	MAYDEF_RESULT (PTR)
437#define FINALIZE_OPS		MAYDEF_OPS
438#define FINALIZE_USE_PTR(PTR)	MAYDEF_OP_PTR (PTR)
439#define FINALIZE_CORRECT_USE	set_virtual_use_link
440#define FINALIZE_BASE_ZERO	0
441#define FINALIZE_BASE(VAR)	get_name_decl (VAR)
442#define FINALIZE_BASE_TYPE	unsigned
443#define FINALIZE_INITIALIZE(PTR, VAL, STMT)				\
444				(PTR)->def_var = (VAL);			\
445				(PTR)->use_var = (VAL);			\
446				(PTR)->use_ptr.use = &((PTR)->use_var);	\
447				link_imm_use_stmt (&((PTR)->use_ptr),	\
448						   (VAL), (STMT))
449#include "tree-ssa-opfinalize.h"
450
451
452static void
453finalize_ssa_v_may_defs (tree stmt)
454{
455  finalize_ssa_v_may_def_ops (stmt);
456}
457
458
459/* Clear the in_list bits and empty the build array for v_may_defs.  */
460
461static inline void
462cleanup_v_may_defs (void)
463{
464  unsigned x, num;
465  num = VEC_length (tree, build_v_may_defs);
466
467  for (x = 0; x < num; x++)
468    {
469      tree t = VEC_index (tree, build_v_may_defs, x);
470      if (TREE_CODE (t) != SSA_NAME)
471	{
472	  var_ann_t ann = var_ann (t);
473	  ann->in_v_may_def_list = 0;
474	}
475    }
476  VEC_truncate (tree, build_v_may_defs, 0);
477}
478
479
480#define FINALIZE_OPBUILD	build_vuses
481#define FINALIZE_OPBUILD_ELEM(I)	VEC_index (tree, build_vuses, (I))
482#define FINALIZE_OPBUILD_BASE(I)	get_name_decl (VEC_index (tree,	\
483							build_vuses, (I)))
484#define FINALIZE_FUNC		finalize_ssa_vuse_ops
485#define FINALIZE_ALLOC		alloc_vuse
486#define FINALIZE_FREE		free_vuses
487#define FINALIZE_TYPE		struct vuse_optype_d
488#define FINALIZE_ELEM(PTR)	VUSE_OP (PTR)
489#define FINALIZE_OPS		VUSE_OPS
490#define FINALIZE_USE_PTR(PTR)	VUSE_OP_PTR (PTR)
491#define FINALIZE_CORRECT_USE	set_virtual_use_link
492#define FINALIZE_BASE_ZERO	0
493#define FINALIZE_BASE(VAR)	get_name_decl (VAR)
494#define FINALIZE_BASE_TYPE	unsigned
495#define FINALIZE_INITIALIZE(PTR, VAL, STMT)				\
496				(PTR)->use_var = (VAL);			\
497				(PTR)->use_ptr.use = &((PTR)->use_var);	\
498				link_imm_use_stmt (&((PTR)->use_ptr),	\
499						   (VAL), (STMT))
500#include "tree-ssa-opfinalize.h"
501
502
503/* Return a new vuse operand vector, comparing to OLD_OPS_P.  */
504
505static void
506finalize_ssa_vuses (tree stmt)
507{
508  unsigned num, num_v_may_defs;
509  unsigned vuse_index;
510
511  /* Remove superfluous VUSE operands.  If the statement already has a
512   V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
513   needed because V_MAY_DEFs imply a VUSE of the variable.  For instance,
514   suppose that variable 'a' is aliased:
515
516	      # VUSE <a_2>
517	      # a_3 = V_MAY_DEF <a_2>
518	      a = a + 1;
519
520  The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
521  operation.  */
522
523  num = VEC_length (tree, build_vuses);
524  num_v_may_defs = VEC_length (tree, build_v_may_defs);
525
526  if (num > 0 && num_v_may_defs > 0)
527    {
528      for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
529        {
530	  tree vuse;
531	  vuse = VEC_index (tree, build_vuses, vuse_index);
532	  if (TREE_CODE (vuse) != SSA_NAME)
533	    {
534	      var_ann_t ann = var_ann (vuse);
535	      ann->in_vuse_list = 0;
536	      if (ann->in_v_may_def_list)
537	        {
538		  VEC_ordered_remove (tree, build_vuses, vuse_index);
539		  continue;
540		}
541	    }
542	  vuse_index++;
543	}
544    }
545  else
546    /* Clear out the in_list bits.  */
547    for (vuse_index = 0;
548	 vuse_index < VEC_length (tree, build_vuses);
549	 vuse_index++)
550      {
551	tree t = VEC_index (tree, build_vuses, vuse_index);
552	if (TREE_CODE (t) != SSA_NAME)
553	  {
554	    var_ann_t ann = var_ann (t);
555	    ann->in_vuse_list = 0;
556	  }
557      }
558
559  finalize_ssa_vuse_ops (stmt);
560  /* The v_may_def build vector wasn't cleaned up because we needed it.  */
561  cleanup_v_may_defs ();
562
563  /* Free the vuses build vector.  */
564  VEC_truncate (tree, build_vuses, 0);
565
566}
567
568/* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P.  */
569
570#define FINALIZE_OPBUILD	build_v_must_defs
571#define FINALIZE_OPBUILD_ELEM(I)	VEC_index (tree, build_v_must_defs, (I))
572#define FINALIZE_OPBUILD_BASE(I)	get_name_decl (VEC_index (tree,	\
573							build_v_must_defs, (I)))
574#define FINALIZE_FUNC		finalize_ssa_v_must_def_ops
575#define FINALIZE_ALLOC		alloc_mustdef
576#define FINALIZE_FREE		free_mustdefs
577#define FINALIZE_TYPE		struct mustdef_optype_d
578#define FINALIZE_ELEM(PTR)	MUSTDEF_RESULT (PTR)
579#define FINALIZE_OPS		MUSTDEF_OPS
580#define FINALIZE_USE_PTR(PTR)	MUSTDEF_KILL_PTR (PTR)
581#define FINALIZE_CORRECT_USE	set_virtual_use_link
582#define FINALIZE_BASE_ZERO	0
583#define FINALIZE_BASE(VAR)	get_name_decl (VAR)
584#define FINALIZE_BASE_TYPE	unsigned
585#define FINALIZE_INITIALIZE(PTR, VAL, STMT)				\
586				(PTR)->def_var = (VAL);			\
587				(PTR)->kill_var = (VAL);		\
588				(PTR)->use_ptr.use = &((PTR)->kill_var);\
589				link_imm_use_stmt (&((PTR)->use_ptr),	\
590						   (VAL), (STMT))
591#include "tree-ssa-opfinalize.h"
592
593
594static void
595finalize_ssa_v_must_defs (tree stmt)
596{
597  /* In the presence of subvars, there may be more than one V_MUST_DEF per
598     statement (one for each subvar).  It is a bit expensive to verify that
599     all must-defs in a statement belong to subvars if there is more than one
600     MUST-def, so we don't do it.  Suffice to say, if you reach here without
601     having subvars, and have num >1, you have hit a bug. */
602
603  finalize_ssa_v_must_def_ops (stmt);
604  VEC_truncate (tree, build_v_must_defs, 0);
605}
606
607
608/* Finalize all the build vectors, fill the new ones into INFO.  */
609
610static inline void
611finalize_ssa_stmt_operands (tree stmt)
612{
613  finalize_ssa_defs (stmt);
614  finalize_ssa_uses (stmt);
615  finalize_ssa_v_must_defs (stmt);
616  finalize_ssa_v_may_defs (stmt);
617  finalize_ssa_vuses (stmt);
618}
619
620
621/* Start the process of building up operands vectors in INFO.  */
622
623static inline void
624start_ssa_stmt_operands (void)
625{
626  gcc_assert (VEC_length (tree, build_defs) == 0);
627  gcc_assert (VEC_length (tree, build_uses) == 0);
628  gcc_assert (VEC_length (tree, build_vuses) == 0);
629  gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
630  gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
631}
632
633
634/* Add DEF_P to the list of pointers to operands.  */
635
636static inline void
637append_def (tree *def_p)
638{
639  VEC_safe_push (tree, heap, build_defs, (tree)def_p);
640}
641
642
643/* Add USE_P to the list of pointers to operands.  */
644
645static inline void
646append_use (tree *use_p)
647{
648  VEC_safe_push (tree, heap, build_uses, (tree)use_p);
649}
650
651
652/* Add a new virtual may def for variable VAR to the build array.  */
653
654static inline void
655append_v_may_def (tree var)
656{
657  if (TREE_CODE (var) != SSA_NAME)
658    {
659      var_ann_t ann = get_var_ann (var);
660
661      /* Don't allow duplicate entries.  */
662      if (ann->in_v_may_def_list)
663	return;
664      ann->in_v_may_def_list = 1;
665    }
666
667  VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
668}
669
670
671/* Add VAR to the list of virtual uses.  */
672
673static inline void
674append_vuse (tree var)
675{
676
677  /* Don't allow duplicate entries.  */
678  if (TREE_CODE (var) != SSA_NAME)
679    {
680      var_ann_t ann = get_var_ann (var);
681
682      if (ann->in_vuse_list || ann->in_v_may_def_list)
683        return;
684      ann->in_vuse_list = 1;
685    }
686
687  VEC_safe_push (tree, heap, build_vuses, (tree)var);
688}
689
690
691/* Add VAR to the list of virtual must definitions for INFO.  */
692
693static inline void
694append_v_must_def (tree var)
695{
696  unsigned i;
697
698  /* Don't allow duplicate entries.  */
699  for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
700    if (var == VEC_index (tree, build_v_must_defs, i))
701      return;
702
703  VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
704}
705
706
707/* Parse STMT looking for operands.  OLD_OPS is the original stmt operand
708   cache for STMT, if it existed before.  When finished, the various build_*
709   operand vectors will have potential operands. in them.  */
710
711static void
712parse_ssa_operands (tree stmt)
713{
714  enum tree_code code;
715
716  code = TREE_CODE (stmt);
717  switch (code)
718    {
719    case MODIFY_EXPR:
720      /* First get operands from the RHS.  For the LHS, we use a V_MAY_DEF if
721	 either only part of LHS is modified or if the RHS might throw,
722	 otherwise, use V_MUST_DEF.
723
724	 ??? If it might throw, we should represent somehow that it is killed
725	 on the fallthrough path.  */
726      {
727	tree lhs = TREE_OPERAND (stmt, 0);
728	int lhs_flags = opf_is_def;
729
730	get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
731
732	/* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
733	   or not the entire LHS is modified; that depends on what's
734	   inside the VIEW_CONVERT_EXPR.  */
735	if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
736	  lhs = TREE_OPERAND (lhs, 0);
737
738	if (TREE_CODE (lhs) != ARRAY_REF
739	    && TREE_CODE (lhs) != ARRAY_RANGE_REF
740	    && TREE_CODE (lhs) != BIT_FIELD_REF
741	    && TREE_CODE (lhs) != REALPART_EXPR
742	    && TREE_CODE (lhs) != IMAGPART_EXPR)
743	  lhs_flags |= opf_kill_def;
744
745        get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
746      }
747      break;
748
749    case COND_EXPR:
750      get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
751      break;
752
753    case SWITCH_EXPR:
754      get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
755      break;
756
757    case ASM_EXPR:
758      get_asm_expr_operands (stmt);
759      break;
760
761    case RETURN_EXPR:
762      get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
763      break;
764
765    case GOTO_EXPR:
766      get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
767      break;
768
769    case LABEL_EXPR:
770      get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
771      break;
772
773      /* These nodes contain no variable references.  */
774    case BIND_EXPR:
775    case CASE_LABEL_EXPR:
776    case TRY_CATCH_EXPR:
777    case TRY_FINALLY_EXPR:
778    case EH_FILTER_EXPR:
779    case CATCH_EXPR:
780    case RESX_EXPR:
781      break;
782
783    default:
784      /* Notice that if get_expr_operands tries to use &STMT as the operand
785	 pointer (which may only happen for USE operands), we will fail in
786	 append_use.  This default will handle statements like empty
787	 statements, or CALL_EXPRs that may appear on the RHS of a statement
788	 or as statements themselves.  */
789      get_expr_operands (stmt, &stmt, opf_none);
790      break;
791    }
792}
793
794/* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
795   original operands, and if ANN is non-null, appropriate stmt flags are set
796   in the stmt's annotation.  If ANN is NULL, this is not considered a "real"
797   stmt, and none of the operands will be entered into their respective
798   immediate uses tables.  This is to allow stmts to be processed when they
799   are not actually in the CFG.
800
801   Note that some fields in old_ops may change to NULL, although none of the
802   memory they originally pointed to will be destroyed.  It is appropriate
803   to call free_stmt_operands() on the value returned in old_ops.
804
805   The rationale for this: Certain optimizations wish to examine the difference
806   between new_ops and old_ops after processing.  If a set of operands don't
807   change, new_ops will simply assume the pointer in old_ops, and the old_ops
808   pointer will be set to NULL, indicating no memory needs to be cleared.
809   Usage might appear something like:
810
811       old_ops_copy = old_ops = stmt_ann(stmt)->operands;
812       build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
813          <* compare old_ops_copy and new_ops *>
814       free_ssa_operands (old_ops);					*/
815
816static void
817build_ssa_operands (tree stmt)
818{
819  stmt_ann_t ann = get_stmt_ann (stmt);
820
821  /* Initially assume that the statement has no volatile operands, nor
822     makes aliased loads or stores.  */
823  if (ann)
824    {
825      ann->has_volatile_ops = false;
826      ann->makes_aliased_stores = false;
827      ann->makes_aliased_loads = false;
828    }
829
830  start_ssa_stmt_operands ();
831
832  parse_ssa_operands (stmt);
833  operand_build_sort_virtual (build_vuses);
834  operand_build_sort_virtual (build_v_may_defs);
835  operand_build_sort_virtual (build_v_must_defs);
836
837  finalize_ssa_stmt_operands (stmt);
838}
839
840
841/* Free any operands vectors in OPS.  */
842void
843free_ssa_operands (stmt_operands_p ops)
844{
845  ops->def_ops = NULL;
846  ops->use_ops = NULL;
847  ops->maydef_ops = NULL;
848  ops->mustdef_ops = NULL;
849  ops->vuse_ops = NULL;
850}
851
852
853/* Get the operands of statement STMT.  Note that repeated calls to
854   get_stmt_operands for the same statement will do nothing until the
855   statement is marked modified by a call to mark_stmt_modified().  */
856
857void
858update_stmt_operands (tree stmt)
859{
860  stmt_ann_t ann = get_stmt_ann (stmt);
861  /* If get_stmt_operands is called before SSA is initialized, dont
862  do anything.  */
863  if (!ssa_operands_active ())
864    return;
865  /* The optimizers cannot handle statements that are nothing but a
866     _DECL.  This indicates a bug in the gimplifier.  */
867  gcc_assert (!SSA_VAR_P (stmt));
868
869  gcc_assert (ann->modified);
870
871  timevar_push (TV_TREE_OPS);
872
873  build_ssa_operands (stmt);
874
875  /* Clear the modified bit for STMT.  Subsequent calls to
876     get_stmt_operands for this statement will do nothing until the
877     statement is marked modified by a call to mark_stmt_modified().  */
878  ann->modified = 0;
879
880  timevar_pop (TV_TREE_OPS);
881}
882
883
884/* Copies virtual operands from SRC to DST.  */
885
886void
887copy_virtual_operands (tree dest, tree src)
888{
889  tree t;
890  ssa_op_iter iter, old_iter;
891  use_operand_p use_p, u2;
892  def_operand_p def_p, d2;
893
894  build_ssa_operands (dest);
895
896  /* Copy all the virtual fields.  */
897  FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
898    append_vuse (t);
899  FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
900    append_v_may_def (t);
901  FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
902    append_v_must_def (t);
903
904  if (VEC_length (tree, build_vuses) == 0
905      && VEC_length (tree, build_v_may_defs) == 0
906      && VEC_length (tree, build_v_must_defs) == 0)
907    return;
908
909  /* Now commit the virtual operands to this stmt.  */
910  finalize_ssa_v_must_defs (dest);
911  finalize_ssa_v_may_defs (dest);
912  finalize_ssa_vuses (dest);
913
914  /* Finally, set the field to the same values as then originals.  */
915
916
917  t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
918  FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
919    {
920      gcc_assert (!op_iter_done (&old_iter));
921      SET_USE (use_p, t);
922      t = op_iter_next_tree (&old_iter);
923    }
924  gcc_assert (op_iter_done (&old_iter));
925
926  op_iter_init_maydef (&old_iter, src, &u2, &d2);
927  FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
928    {
929      gcc_assert (!op_iter_done (&old_iter));
930      SET_USE (use_p, USE_FROM_PTR (u2));
931      SET_DEF (def_p, DEF_FROM_PTR (d2));
932      op_iter_next_maymustdef (&u2, &d2, &old_iter);
933    }
934  gcc_assert (op_iter_done (&old_iter));
935
936  op_iter_init_mustdef (&old_iter, src, &u2, &d2);
937  FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
938    {
939      gcc_assert (!op_iter_done (&old_iter));
940      SET_USE (use_p, USE_FROM_PTR (u2));
941      SET_DEF (def_p, DEF_FROM_PTR (d2));
942      op_iter_next_maymustdef (&u2, &d2, &old_iter);
943    }
944  gcc_assert (op_iter_done (&old_iter));
945
946}
947
948
949/* Specifically for use in DOM's expression analysis.  Given a store, we
950   create an artificial stmt which looks like a load from the store, this can
951   be used to eliminate redundant loads.  OLD_OPS are the operands from the
952   store stmt, and NEW_STMT is the new load which represents a load of the
953   values stored.  */
954
955void
956create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
957{
958  stmt_ann_t ann;
959  tree op;
960  ssa_op_iter iter;
961  use_operand_p use_p;
962  unsigned x;
963
964  ann = get_stmt_ann (new_stmt);
965
966  /* process the stmt looking for operands.  */
967  start_ssa_stmt_operands ();
968  parse_ssa_operands (new_stmt);
969
970  for (x = 0; x < VEC_length (tree, build_vuses); x++)
971    {
972      tree t = VEC_index (tree, build_vuses, x);
973      if (TREE_CODE (t) != SSA_NAME)
974	{
975	  var_ann_t ann = var_ann (t);
976	  ann->in_vuse_list = 0;
977	}
978    }
979
980  for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
981    {
982      tree t = VEC_index (tree, build_v_may_defs, x);
983      if (TREE_CODE (t) != SSA_NAME)
984	{
985	  var_ann_t ann = var_ann (t);
986	  ann->in_v_may_def_list = 0;
987	}
988    }
989  /* Remove any virtual operands that were found.  */
990  VEC_truncate (tree, build_v_may_defs, 0);
991  VEC_truncate (tree, build_v_must_defs, 0);
992  VEC_truncate (tree, build_vuses, 0);
993
994  /* For each VDEF on the original statement, we want to create a
995     VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
996     statement.  */
997  FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
998			     (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
999    append_vuse (op);
1000
1001  /* Now build the operands for this new stmt.  */
1002  finalize_ssa_stmt_operands (new_stmt);
1003
1004  /* All uses in this fake stmt must not be in the immediate use lists.  */
1005  FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1006    delink_imm_use (use_p);
1007}
1008
1009void
1010swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
1011{
1012  tree op0, op1;
1013  op0 = *exp0;
1014  op1 = *exp1;
1015
1016  /* If the operand cache is active, attempt to preserve the relative positions
1017     of these two operands in their respective immediate use lists.  */
1018  if (ssa_operands_active () && op0 != op1)
1019    {
1020      use_optype_p use0, use1, ptr;
1021      use0 = use1 = NULL;
1022      /* Find the 2 operands in the cache, if they are there.  */
1023      for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1024	if (USE_OP_PTR (ptr)->use == exp0)
1025	  {
1026	    use0 = ptr;
1027	    break;
1028	  }
1029      for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1030	if (USE_OP_PTR (ptr)->use == exp1)
1031	  {
1032	    use1 = ptr;
1033	    break;
1034	  }
1035      /* If both uses don't have operand entries, there isn't much we can do
1036         at this point.  Presumably we dont need to worry about it.  */
1037      if (use0 && use1)
1038        {
1039	  tree *tmp = USE_OP_PTR (use1)->use;
1040	  USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1041	  USE_OP_PTR (use0)->use = tmp;
1042	}
1043    }
1044
1045  /* Now swap the data.  */
1046  *exp0 = op1;
1047  *exp1 = op0;
1048}
1049
1050
1051/* Recursively scan the expression pointed to by EXPR_P in statement referred
1052   to by INFO.  FLAGS is one of the OPF_* constants modifying how to interpret
1053   the operands found.  */
1054
1055static void
1056get_expr_operands (tree stmt, tree *expr_p, int flags)
1057{
1058  enum tree_code code;
1059  enum tree_code_class class;
1060  tree expr = *expr_p;
1061  stmt_ann_t s_ann = stmt_ann (stmt);
1062
1063  if (expr == NULL)
1064    return;
1065
1066  code = TREE_CODE (expr);
1067  class = TREE_CODE_CLASS (code);
1068
1069  switch (code)
1070    {
1071    case ADDR_EXPR:
1072      /* We could have the address of a component, array member,
1073	 etc which has interesting variable references.  */
1074      /* Taking the address of a variable does not represent a
1075	 reference to it, but the fact that the stmt takes its address will be
1076	 of interest to some passes (e.g. alias resolution).  */
1077      add_stmt_operand (expr_p, s_ann, 0);
1078
1079      /* If the address is invariant, there may be no interesting variable
1080	 references inside.  */
1081      if (is_gimple_min_invariant (expr))
1082	return;
1083
1084      /* There should be no VUSEs created, since the referenced objects are
1085	 not really accessed.  The only operands that we should find here
1086	 are ARRAY_REF indices which will always be real operands (GIMPLE
1087	 does not allow non-registers as array indices).  */
1088      flags |= opf_no_vops;
1089
1090      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1091      return;
1092
1093    case SSA_NAME:
1094    case VAR_DECL:
1095    case PARM_DECL:
1096    case RESULT_DECL:
1097    case CONST_DECL:
1098      {
1099	subvar_t svars;
1100
1101	/* Add the subvars for a variable if it has subvars, to DEFS or USES.
1102	   Otherwise, add the variable itself.
1103	   Whether it goes to USES or DEFS depends on the operand flags.  */
1104	if (var_can_have_subvars (expr)
1105	    && (svars = get_subvars_for_var (expr)))
1106	  {
1107	    subvar_t sv;
1108	    for (sv = svars; sv; sv = sv->next)
1109	      add_stmt_operand (&sv->var, s_ann, flags);
1110	  }
1111	else
1112	  {
1113	    add_stmt_operand (expr_p, s_ann, flags);
1114	  }
1115	return;
1116      }
1117    case MISALIGNED_INDIRECT_REF:
1118      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1119      /* fall through */
1120
1121    case ALIGN_INDIRECT_REF:
1122    case INDIRECT_REF:
1123      get_indirect_ref_operands (stmt, expr, flags);
1124      return;
1125
1126    case TARGET_MEM_REF:
1127      get_tmr_operands (stmt, expr, flags);
1128      return;
1129
1130    case ARRAY_REF:
1131    case ARRAY_RANGE_REF:
1132      /* Treat array references as references to the virtual variable
1133	 representing the array.  The virtual variable for an ARRAY_REF
1134	 is the VAR_DECL for the array.  */
1135
1136      /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1137	 according to the value of IS_DEF.  Recurse if the LHS of the
1138	 ARRAY_REF node is not a regular variable.  */
1139      if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1140	add_stmt_operand (expr_p, s_ann, flags);
1141      else
1142	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1143
1144      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1145      get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1146      get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1147      return;
1148
1149    case COMPONENT_REF:
1150    case REALPART_EXPR:
1151    case IMAGPART_EXPR:
1152      {
1153	tree ref;
1154	unsigned HOST_WIDE_INT offset, size;
1155 	/* This component ref becomes an access to all of the subvariables
1156	   it can touch,  if we can determine that, but *NOT* the real one.
1157	   If we can't determine which fields we could touch, the recursion
1158	   will eventually get to a variable and add *all* of its subvars, or
1159	   whatever is the minimum correct subset.  */
1160
1161	ref = okay_component_ref_for_subvars (expr, &offset, &size);
1162	if (ref)
1163	  {
1164	    subvar_t svars = get_subvars_for_var (ref);
1165	    subvar_t sv;
1166	    for (sv = svars; sv; sv = sv->next)
1167	      {
1168		bool exact;
1169		if (overlap_subvar (offset, size, sv, &exact))
1170		  {
1171	            int subvar_flags = flags;
1172		    if (!exact)
1173		      subvar_flags &= ~opf_kill_def;
1174		    add_stmt_operand (&sv->var, s_ann, subvar_flags);
1175		  }
1176	      }
1177	  }
1178	else
1179	  get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1180			     flags & ~opf_kill_def);
1181
1182	if (code == COMPONENT_REF)
1183	  {
1184	    if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1185	      s_ann->has_volatile_ops = true;
1186	    get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1187	  }
1188	return;
1189      }
1190    case WITH_SIZE_EXPR:
1191      /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1192	 and an rvalue reference to its second argument.  */
1193      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1194      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1195      return;
1196
1197    case CALL_EXPR:
1198      get_call_expr_operands (stmt, expr);
1199      return;
1200
1201    case COND_EXPR:
1202    case VEC_COND_EXPR:
1203      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1204      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1205      get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1206      return;
1207
1208    case MODIFY_EXPR:
1209      {
1210	int subflags;
1211	tree op;
1212
1213	get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1214
1215	op = TREE_OPERAND (expr, 0);
1216	if (TREE_CODE (op) == WITH_SIZE_EXPR)
1217	  op = TREE_OPERAND (expr, 0);
1218	if (TREE_CODE (op) == ARRAY_REF
1219	    || TREE_CODE (op) == ARRAY_RANGE_REF
1220	    || TREE_CODE (op) == REALPART_EXPR
1221	    || TREE_CODE (op) == IMAGPART_EXPR)
1222	  subflags = opf_is_def;
1223	else
1224	  subflags = opf_is_def | opf_kill_def;
1225
1226	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1227	return;
1228      }
1229
1230    case CONSTRUCTOR:
1231      {
1232	/* General aggregate CONSTRUCTORs have been decomposed, but they
1233	   are still in use as the COMPLEX_EXPR equivalent for vectors.  */
1234	constructor_elt *ce;
1235	unsigned HOST_WIDE_INT idx;
1236
1237	for (idx = 0;
1238	     VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1239	     idx++)
1240	  get_expr_operands (stmt, &ce->value, opf_none);
1241
1242	return;
1243      }
1244
1245    case TRUTH_NOT_EXPR:
1246    case BIT_FIELD_REF:
1247    case VIEW_CONVERT_EXPR:
1248    do_unary:
1249      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1250      return;
1251
1252    case TRUTH_AND_EXPR:
1253    case TRUTH_OR_EXPR:
1254    case TRUTH_XOR_EXPR:
1255    case COMPOUND_EXPR:
1256    case OBJ_TYPE_REF:
1257    case ASSERT_EXPR:
1258    do_binary:
1259      {
1260	tree op0 = TREE_OPERAND (expr, 0);
1261	tree op1 = TREE_OPERAND (expr, 1);
1262
1263	/* If it would be profitable to swap the operands, then do so to
1264	   canonicalize the statement, enabling better optimization.
1265
1266	   By placing canonicalization of such expressions here we
1267	   transparently keep statements in canonical form, even
1268	   when the statement is modified.  */
1269	if (tree_swap_operands_p (op0, op1, false))
1270	  {
1271	    /* For relationals we need to swap the operands
1272	       and change the code.  */
1273	    if (code == LT_EXPR
1274		|| code == GT_EXPR
1275		|| code == LE_EXPR
1276		|| code == GE_EXPR)
1277	      {
1278		TREE_SET_CODE (expr, swap_tree_comparison (code));
1279		swap_tree_operands (stmt,
1280				    &TREE_OPERAND (expr, 0),
1281				    &TREE_OPERAND (expr, 1));
1282	      }
1283
1284	    /* For a commutative operator we can just swap the operands.  */
1285	    else if (commutative_tree_code (code))
1286	      {
1287		swap_tree_operands (stmt,
1288				    &TREE_OPERAND (expr, 0),
1289				    &TREE_OPERAND (expr, 1));
1290	      }
1291	  }
1292
1293	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1294	get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1295	return;
1296      }
1297
1298    case REALIGN_LOAD_EXPR:
1299      {
1300	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1301        get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1302        get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1303        return;
1304      }
1305
1306    case BLOCK:
1307    case FUNCTION_DECL:
1308    case EXC_PTR_EXPR:
1309    case FILTER_EXPR:
1310    case LABEL_DECL:
1311      /* Expressions that make no memory references.  */
1312      return;
1313
1314    default:
1315      if (class == tcc_unary)
1316	goto do_unary;
1317      if (class == tcc_binary || class == tcc_comparison)
1318	goto do_binary;
1319      if (class == tcc_constant || class == tcc_type)
1320	return;
1321    }
1322
1323  /* If we get here, something has gone wrong.  */
1324#ifdef ENABLE_CHECKING
1325  fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1326  debug_tree (expr);
1327  fputs ("\n", stderr);
1328  internal_error ("internal error");
1329#endif
1330  gcc_unreachable ();
1331}
1332
1333
1334/* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
1335
1336static void
1337get_asm_expr_operands (tree stmt)
1338{
1339  stmt_ann_t s_ann = stmt_ann (stmt);
1340  int noutputs = list_length (ASM_OUTPUTS (stmt));
1341  const char **oconstraints
1342    = (const char **) alloca ((noutputs) * sizeof (const char *));
1343  int i;
1344  tree link;
1345  const char *constraint;
1346  bool allows_mem, allows_reg, is_inout;
1347
1348  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1349    {
1350      oconstraints[i] = constraint
1351	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1352      parse_output_constraint (&constraint, i, 0, 0,
1353	  &allows_mem, &allows_reg, &is_inout);
1354
1355      /* This should have been split in gimplify_asm_expr.  */
1356      gcc_assert (!allows_reg || !is_inout);
1357
1358      /* Memory operands are addressable.  Note that STMT needs the
1359	 address of this operand.  */
1360      if (!allows_reg && allows_mem)
1361	{
1362	  tree t = get_base_address (TREE_VALUE (link));
1363	  if (t && DECL_P (t) && s_ann)
1364	    add_to_addressable_set (t, &s_ann->addresses_taken);
1365	}
1366
1367      get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1368    }
1369
1370  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1371    {
1372      constraint
1373	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1374      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1375	  oconstraints, &allows_mem, &allows_reg);
1376
1377      /* Memory operands are addressable.  Note that STMT needs the
1378	 address of this operand.  */
1379      if (!allows_reg && allows_mem)
1380	{
1381	  tree t = get_base_address (TREE_VALUE (link));
1382	  if (t && DECL_P (t) && s_ann)
1383	    add_to_addressable_set (t, &s_ann->addresses_taken);
1384	}
1385
1386      get_expr_operands (stmt, &TREE_VALUE (link), 0);
1387    }
1388
1389
1390  /* Clobber memory for asm ("" : : : "memory");  */
1391  for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1392    if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1393      {
1394	unsigned i;
1395	bitmap_iterator bi;
1396
1397	/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1398	   decided to group them).  */
1399	if (global_var)
1400	  add_stmt_operand (&global_var, s_ann, opf_is_def);
1401	else
1402	  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1403	    {
1404	      tree var = referenced_var (i);
1405	      add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1406	    }
1407
1408	/* Now clobber all addressables.  */
1409	EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1410	    {
1411	      tree var = referenced_var (i);
1412
1413	      /* Subvars are explicitly represented in this list, so
1414		 we don't need the original to be added to the clobber
1415		 ops, but the original *will* be in this list because
1416		 we keep the addressability of the original
1417		 variable up-to-date so we don't screw up the rest of
1418		 the backend.  */
1419	      if (var_can_have_subvars (var)
1420		  && get_subvars_for_var (var) != NULL)
1421		continue;
1422
1423	      add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1424	    }
1425
1426	break;
1427      }
1428}
1429
1430/* A subroutine of get_expr_operands to handle INDIRECT_REF,
1431   ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.  */
1432
1433static void
1434get_indirect_ref_operands (tree stmt, tree expr, int flags)
1435{
1436  tree *pptr = &TREE_OPERAND (expr, 0);
1437  tree ptr = *pptr;
1438  stmt_ann_t s_ann = stmt_ann (stmt);
1439
1440  /* Stores into INDIRECT_REF operands are never killing definitions.  */
1441  flags &= ~opf_kill_def;
1442
1443  if (SSA_VAR_P (ptr))
1444    {
1445      struct ptr_info_def *pi = NULL;
1446
1447      /* If PTR has flow-sensitive points-to information, use it.  */
1448      if (TREE_CODE (ptr) == SSA_NAME
1449	  && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1450	  && pi->name_mem_tag)
1451	{
1452	  /* PTR has its own memory tag.  Use it.  */
1453	  add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1454	}
1455      else
1456	{
1457	  /* If PTR is not an SSA_NAME or it doesn't have a name
1458	     tag, use its type memory tag.  */
1459	  var_ann_t v_ann;
1460
1461	  /* If we are emitting debugging dumps, display a warning if
1462	     PTR is an SSA_NAME with no flow-sensitive alias
1463	     information.  That means that we may need to compute
1464	     aliasing again.  */
1465	  if (dump_file
1466	      && TREE_CODE (ptr) == SSA_NAME
1467	      && pi == NULL)
1468	    {
1469	      fprintf (dump_file,
1470		  "NOTE: no flow-sensitive alias info for ");
1471	      print_generic_expr (dump_file, ptr, dump_flags);
1472	      fprintf (dump_file, " in ");
1473	      print_generic_stmt (dump_file, stmt, dump_flags);
1474	    }
1475
1476	  if (TREE_CODE (ptr) == SSA_NAME)
1477	    ptr = SSA_NAME_VAR (ptr);
1478	  v_ann = var_ann (ptr);
1479	  if (v_ann->type_mem_tag)
1480	    add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1481	}
1482    }
1483
1484  /* If a constant is used as a pointer, we can't generate a real
1485     operand for it but we mark the statement volatile to prevent
1486     optimizations from messing things up.  */
1487  else if (TREE_CODE (ptr) == INTEGER_CST)
1488    {
1489      if (s_ann)
1490	s_ann->has_volatile_ops = true;
1491      return;
1492    }
1493
1494  /* Everything else *should* have been folded elsewhere, but users
1495     are smarter than we in finding ways to write invalid code.  We
1496     cannot just assert here.  If we were absolutely certain that we
1497     do handle all valid cases, then we could just do nothing here.
1498     That seems optimistic, so attempt to do something logical... */
1499  else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1500	   && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1501	   && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1502    {
1503      /* Make sure we know the object is addressable.  */
1504      pptr = &TREE_OPERAND (ptr, 0);
1505      add_stmt_operand (pptr, s_ann, 0);
1506
1507      /* Mark the object itself with a VUSE.  */
1508      pptr = &TREE_OPERAND (*pptr, 0);
1509      get_expr_operands (stmt, pptr, flags);
1510      return;
1511    }
1512
1513  /* Ok, this isn't even is_gimple_min_invariant.  Something's broke.  */
1514  else
1515    gcc_unreachable ();
1516
1517  /* Add a USE operand for the base pointer.  */
1518  get_expr_operands (stmt, pptr, opf_none);
1519}
1520
1521/* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
1522
1523static void
1524get_tmr_operands (tree stmt, tree expr, int flags)
1525{
1526  tree tag = TMR_TAG (expr), ref;
1527  unsigned HOST_WIDE_INT offset, size;
1528  subvar_t svars, sv;
1529  stmt_ann_t s_ann = stmt_ann (stmt);
1530
1531  /* First record the real operands.  */
1532  get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1533  get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1534
1535  /* MEM_REFs should never be killing.  */
1536  flags &= ~opf_kill_def;
1537
1538  if (TMR_SYMBOL (expr))
1539    {
1540      stmt_ann_t ann = stmt_ann (stmt);
1541      add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1542    }
1543
1544  if (!tag)
1545    {
1546      /* Something weird, so ensure that we will be careful.  */
1547      stmt_ann (stmt)->has_volatile_ops = true;
1548      return;
1549    }
1550
1551  if (DECL_P (tag))
1552    {
1553      get_expr_operands (stmt, &tag, flags);
1554      return;
1555    }
1556
1557  ref = okay_component_ref_for_subvars (tag, &offset, &size);
1558  gcc_assert (ref != NULL_TREE);
1559  svars = get_subvars_for_var (ref);
1560  for (sv = svars; sv; sv = sv->next)
1561    {
1562      bool exact;
1563      if (overlap_subvar (offset, size, sv, &exact))
1564	{
1565	  int subvar_flags = flags;
1566	  if (!exact)
1567	    subvar_flags &= ~opf_kill_def;
1568	  add_stmt_operand (&sv->var, s_ann, subvar_flags);
1569	}
1570    }
1571}
1572
1573/* A subroutine of get_expr_operands to handle CALL_EXPR.  */
1574
1575static void
1576get_call_expr_operands (tree stmt, tree expr)
1577{
1578  tree op;
1579  int call_flags = call_expr_flags (expr);
1580
1581  /* If aliases have been computed already, add V_MAY_DEF or V_USE
1582     operands for all the symbols that have been found to be
1583     call-clobbered.
1584
1585     Note that if aliases have not been computed, the global effects
1586     of calls will not be included in the SSA web. This is fine
1587     because no optimizer should run before aliases have been
1588     computed.  By not bothering with virtual operands for CALL_EXPRs
1589     we avoid adding superfluous virtual operands, which can be a
1590     significant compile time sink (See PR 15855).  */
1591  if (aliases_computed_p
1592      && !bitmap_empty_p (call_clobbered_vars)
1593      && !(call_flags & ECF_NOVOPS))
1594    {
1595      /* A 'pure' or a 'const' function never call-clobbers anything.
1596	 A 'noreturn' function might, but since we don't return anyway
1597	 there is no point in recording that.  */
1598      if (TREE_SIDE_EFFECTS (expr)
1599	  && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1600	add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1601      else if (!(call_flags & ECF_CONST))
1602	add_call_read_ops (stmt);
1603    }
1604
1605  /* Find uses in the called function.  */
1606  get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1607
1608  for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1609    get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1610
1611  get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1612
1613}
1614
1615
1616/* Add *VAR_P to the appropriate operand array for INFO.  FLAGS is as in
1617   get_expr_operands.  If *VAR_P is a GIMPLE register, it will be added to
1618   the statement's real operands, otherwise it is added to virtual
1619   operands.  */
1620
1621static void
1622add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1623{
1624  bool is_real_op;
1625  tree var, sym;
1626  var_ann_t v_ann;
1627
1628  var = *var_p;
1629  STRIP_NOPS (var);
1630
1631  /* If the operand is an ADDR_EXPR, add its operand to the list of
1632     variables that have had their address taken in this statement.  */
1633  if (TREE_CODE (var) == ADDR_EXPR && s_ann)
1634    {
1635      add_to_addressable_set (TREE_OPERAND (var, 0), &s_ann->addresses_taken);
1636      return;
1637    }
1638
1639  /* If the original variable is not a scalar, it will be added to the list
1640     of virtual operands.  In that case, use its base symbol as the virtual
1641     variable representing it.  */
1642  is_real_op = is_gimple_reg (var);
1643  if (!is_real_op && !DECL_P (var))
1644    var = get_virtual_var (var);
1645
1646  /* If VAR is not a variable that we care to optimize, do nothing.  */
1647  if (var == NULL_TREE || !SSA_VAR_P (var))
1648    return;
1649
1650  sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1651  v_ann = var_ann (sym);
1652
1653  /* Mark statements with volatile operands.  Optimizers should back
1654     off from statements having volatile operands.  */
1655  if (TREE_THIS_VOLATILE (sym) && s_ann)
1656    s_ann->has_volatile_ops = true;
1657
1658  /* If the variable cannot be modified and this is a V_MAY_DEF change
1659     it into a VUSE.  This happens when read-only variables are marked
1660     call-clobbered and/or aliased to writable variables.  So we only
1661     check that this only happens on non-specific stores.
1662
1663     Note that if this is a specific store, i.e. associated with a
1664     modify_expr, then we can't suppress the V_DEF, lest we run into
1665     validation problems.
1666
1667     This can happen when programs cast away const, leaving us with a
1668     store to read-only memory.  If the statement is actually executed
1669     at runtime, then the program is ill formed.  If the statement is
1670     not executed then all is well.  At the very least, we cannot ICE.  */
1671  if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1672    {
1673      gcc_assert (!is_real_op);
1674      flags &= ~(opf_is_def | opf_kill_def);
1675    }
1676
1677  if (is_real_op)
1678    {
1679      /* The variable is a GIMPLE register.  Add it to real operands.  */
1680      if (flags & opf_is_def)
1681	append_def (var_p);
1682      else
1683	append_use (var_p);
1684    }
1685  else
1686    {
1687      varray_type aliases;
1688
1689      /* The variable is not a GIMPLE register.  Add it (or its aliases) to
1690	 virtual operands, unless the caller has specifically requested
1691	 not to add virtual operands (used when adding operands inside an
1692	 ADDR_EXPR expression).  */
1693      if (flags & opf_no_vops)
1694	return;
1695
1696      aliases = v_ann->may_aliases;
1697
1698      if (aliases == NULL)
1699	{
1700	  /* The variable is not aliased or it is an alias tag.  */
1701	  if (flags & opf_is_def)
1702	    {
1703	      if (flags & opf_kill_def)
1704		{
1705		  /* Only regular variables or struct fields may get a
1706		     V_MUST_DEF operand.  */
1707		  gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
1708			      || v_ann->mem_tag_kind == STRUCT_FIELD);
1709		  /* V_MUST_DEF for non-aliased, non-GIMPLE register
1710		    variable definitions.  */
1711		  append_v_must_def (var);
1712		}
1713	      else
1714		{
1715		  /* Add a V_MAY_DEF for call-clobbered variables and
1716		     memory tags.  */
1717		  append_v_may_def (var);
1718		}
1719	    }
1720	  else
1721	    {
1722	      append_vuse (var);
1723	      if (s_ann && v_ann->is_alias_tag)
1724		s_ann->makes_aliased_loads = 1;
1725	    }
1726	}
1727      else
1728	{
1729	  size_t i;
1730
1731	  /* The variable is aliased.  Add its aliases to the virtual
1732	     operands.  */
1733	  gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1734
1735	  if (flags & opf_is_def)
1736	    {
1737	      /* If the variable is also an alias tag, add a virtual
1738		 operand for it, otherwise we will miss representing
1739		 references to the members of the variable's alias set.
1740		 This fixes the bug in gcc.c-torture/execute/20020503-1.c.  */
1741	      if (v_ann->is_alias_tag)
1742		append_v_may_def (var);
1743
1744	      for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1745		append_v_may_def (VARRAY_TREE (aliases, i));
1746
1747	      if (s_ann)
1748		s_ann->makes_aliased_stores = 1;
1749	    }
1750	  else
1751	    {
1752	      /* Similarly, append a virtual uses for VAR itself, when
1753		 it is an alias tag.  */
1754	      if (v_ann->is_alias_tag)
1755		append_vuse (var);
1756
1757	      for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1758		append_vuse (VARRAY_TREE (aliases, i));
1759
1760	      if (s_ann)
1761		s_ann->makes_aliased_loads = 1;
1762	    }
1763	}
1764    }
1765}
1766
1767
1768/* Add the base address of REF to the set *ADDRESSES_TAKEN.  If
1769   *ADDRESSES_TAKEN is NULL, a new set is created.  REF may be
1770   a single variable whose address has been taken or any other valid
1771   GIMPLE memory reference (structure reference, array, etc).  If the
1772   base address of REF is a decl that has sub-variables, also add all
1773   of its sub-variables.  */
1774
1775void
1776add_to_addressable_set (tree ref, bitmap *addresses_taken)
1777{
1778  tree var;
1779  subvar_t svars;
1780
1781  gcc_assert (addresses_taken);
1782
1783  /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1784     as the only thing we take the address of.  If VAR is a structure,
1785     taking the address of a field means that the whole structure may
1786     be referenced using pointer arithmetic.  See PR 21407 and the
1787     ensuing mailing list discussion.  */
1788  var = get_base_address (ref);
1789  if (var && SSA_VAR_P (var))
1790    {
1791      if (*addresses_taken == NULL)
1792	*addresses_taken = BITMAP_GGC_ALLOC ();
1793
1794      if (var_can_have_subvars (var)
1795	  && (svars = get_subvars_for_var (var)))
1796	{
1797	  subvar_t sv;
1798	  for (sv = svars; sv; sv = sv->next)
1799	    {
1800	      bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1801	      TREE_ADDRESSABLE (sv->var) = 1;
1802	    }
1803	}
1804      else
1805	{
1806	  bitmap_set_bit (*addresses_taken, DECL_UID (var));
1807	  TREE_ADDRESSABLE (var) = 1;
1808	}
1809    }
1810}
1811
1812
1813/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1814   clobbered variables in the function.  */
1815
1816static void
1817add_call_clobber_ops (tree stmt, tree callee)
1818{
1819  unsigned u;
1820  tree t;
1821  bitmap_iterator bi;
1822  stmt_ann_t s_ann = stmt_ann (stmt);
1823  struct stmt_ann_d empty_ann;
1824  bitmap not_read_b, not_written_b;
1825
1826  /* Functions that are not const, pure or never return may clobber
1827     call-clobbered variables.  */
1828  if (s_ann)
1829    s_ann->makes_clobbering_call = true;
1830
1831  /* If we created .GLOBAL_VAR earlier, just use it.  See compute_may_aliases
1832     for the heuristic used to decide whether to create .GLOBAL_VAR or not.  */
1833  if (global_var)
1834    {
1835      add_stmt_operand (&global_var, s_ann, opf_is_def);
1836      return;
1837    }
1838
1839  /* FIXME - if we have better information from the static vars
1840     analysis, we need to make the cache call site specific.  This way
1841     we can have the performance benefits even if we are doing good
1842     optimization.  */
1843
1844  /* Get info for local and module level statics.  There is a bit
1845     set for each static if the call being processed does not read
1846     or write that variable.  */
1847
1848  not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1849  not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1850
1851  /* If cache is valid, copy the elements into the build vectors.  */
1852  if (ssa_call_clobbered_cache_valid
1853      && (!not_read_b || bitmap_empty_p (not_read_b))
1854      && (!not_written_b || bitmap_empty_p (not_written_b)))
1855    {
1856      for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
1857	{
1858	  t = VEC_index (tree, clobbered_vuses, u);
1859	  gcc_assert (TREE_CODE (t) != SSA_NAME);
1860	  var_ann (t)->in_vuse_list = 1;
1861	  VEC_safe_push (tree, heap, build_vuses, (tree)t);
1862	}
1863      for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
1864	{
1865	  t = VEC_index (tree, clobbered_v_may_defs, u);
1866	  gcc_assert (TREE_CODE (t) != SSA_NAME);
1867	  var_ann (t)->in_v_may_def_list = 1;
1868	  VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
1869	}
1870      if (s_ann)
1871	{
1872	  s_ann->makes_aliased_loads = clobbered_aliased_loads;
1873	  s_ann->makes_aliased_stores = clobbered_aliased_stores;
1874	}
1875      return;
1876    }
1877
1878  memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1879
1880  /* Add a V_MAY_DEF operand for every call clobbered variable.  */
1881  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1882    {
1883      tree var = referenced_var (u);
1884      if (unmodifiable_var_p (var))
1885	add_stmt_operand (&var, &empty_ann, opf_none);
1886      else
1887	{
1888	  bool not_read
1889	    = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
1890	  bool not_written
1891	    = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
1892
1893	  if ((TREE_READONLY (var)
1894	       && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
1895	      || not_written)
1896	    {
1897	      if (!not_read)
1898		add_stmt_operand (&var, &empty_ann, opf_none);
1899	    }
1900	  else
1901	    add_stmt_operand (&var, &empty_ann, opf_is_def);
1902	}
1903    }
1904
1905  if ((!not_read_b || bitmap_empty_p (not_read_b))
1906      && (!not_written_b || bitmap_empty_p (not_written_b)))
1907    {
1908      clobbered_aliased_loads = empty_ann.makes_aliased_loads;
1909      clobbered_aliased_stores = empty_ann.makes_aliased_stores;
1910
1911      /* Set the flags for a stmt's annotation.  */
1912      if (s_ann)
1913	{
1914	  s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1915	  s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
1916	}
1917
1918      /* Prepare empty cache vectors.  */
1919      VEC_truncate (tree, clobbered_vuses, 0);
1920      VEC_truncate (tree, clobbered_v_may_defs, 0);
1921
1922      /* Now fill the clobbered cache with the values that have been found.  */
1923      for (u = 0; u < VEC_length (tree, build_vuses); u++)
1924	VEC_safe_push (tree, heap, clobbered_vuses,
1925		       VEC_index (tree, build_vuses, u));
1926
1927      gcc_assert (VEC_length (tree, build_vuses)
1928		  == VEC_length (tree, clobbered_vuses));
1929
1930      for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
1931	VEC_safe_push (tree, heap, clobbered_v_may_defs,
1932		       VEC_index (tree, build_v_may_defs, u));
1933
1934      gcc_assert (VEC_length (tree, build_v_may_defs)
1935		  == VEC_length (tree, clobbered_v_may_defs));
1936
1937      ssa_call_clobbered_cache_valid = true;
1938    }
1939}
1940
1941
1942/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1943   function.  */
1944
1945static void
1946add_call_read_ops (tree stmt)
1947{
1948  unsigned u;
1949  tree t;
1950  bitmap_iterator bi;
1951  stmt_ann_t s_ann = stmt_ann (stmt);
1952  struct stmt_ann_d empty_ann;
1953
1954  /* if the function is not pure, it may reference memory.  Add
1955     a VUSE for .GLOBAL_VAR if it has been created.  See add_referenced_var
1956     for the heuristic used to decide whether to create .GLOBAL_VAR.  */
1957  if (global_var)
1958    {
1959      add_stmt_operand (&global_var, s_ann, opf_none);
1960      return;
1961    }
1962
1963  /* If cache is valid, copy the elements into the build vector.  */
1964  if (ssa_ro_call_cache_valid)
1965    {
1966      for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
1967	{
1968	  t = VEC_index (tree, ro_call_vuses, u);
1969	  gcc_assert (TREE_CODE (t) != SSA_NAME);
1970	  var_ann (t)->in_vuse_list = 1;
1971	  VEC_safe_push (tree, heap, build_vuses, (tree)t);
1972	}
1973      if (s_ann)
1974	s_ann->makes_aliased_loads = ro_call_aliased_loads;
1975      return;
1976    }
1977
1978  memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1979
1980  /* Add a VUSE for each call-clobbered variable.  */
1981  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1982    {
1983      tree var = referenced_var (u);
1984      add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
1985    }
1986
1987  ro_call_aliased_loads = empty_ann.makes_aliased_loads;
1988  if (s_ann)
1989    s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1990
1991  /* Prepare empty cache vectors.  */
1992  VEC_truncate (tree, ro_call_vuses, 0);
1993
1994  /* Now fill the clobbered cache with the values that have been found.  */
1995  for (u = 0; u <  VEC_length (tree, build_vuses); u++)
1996    VEC_safe_push (tree, heap, ro_call_vuses,
1997		   VEC_index (tree, build_vuses, u));
1998
1999  gcc_assert (VEC_length (tree, build_vuses)
2000	      == VEC_length (tree, ro_call_vuses));
2001
2002  ssa_ro_call_cache_valid = true;
2003}
2004
2005
2006/* Scan the immediate_use list for VAR making sure its linked properly.
2007   return RTUE iof there is a problem.  */
2008
2009bool
2010verify_imm_links (FILE *f, tree var)
2011{
2012  use_operand_p ptr, prev, list;
2013  int count;
2014
2015  gcc_assert (TREE_CODE (var) == SSA_NAME);
2016
2017  list = &(SSA_NAME_IMM_USE_NODE (var));
2018  gcc_assert (list->use == NULL);
2019
2020  if (list->prev == NULL)
2021    {
2022      gcc_assert (list->next == NULL);
2023      return false;
2024    }
2025
2026  prev = list;
2027  count = 0;
2028  for (ptr = list->next; ptr != list; )
2029    {
2030      if (prev != ptr->prev)
2031	goto error;
2032
2033      if (ptr->use == NULL)
2034	goto error; /* 2 roots, or SAFE guard node.  */
2035      else if (*(ptr->use) != var)
2036	goto error;
2037
2038      prev = ptr;
2039      ptr = ptr->next;
2040      /* Avoid infinite loops.  50,000,000 uses probably indicates a problem.  */
2041      if (count++ > 50000000)
2042	goto error;
2043    }
2044
2045  /* Verify list in the other direction.  */
2046  prev = list;
2047  for (ptr = list->prev; ptr != list; )
2048    {
2049      if (prev != ptr->next)
2050	goto error;
2051      prev = ptr;
2052      ptr = ptr->prev;
2053      if (count-- < 0)
2054	goto error;
2055    }
2056
2057  if (count != 0)
2058    goto error;
2059
2060  return false;
2061
2062 error:
2063  if (ptr->stmt && stmt_modified_p (ptr->stmt))
2064    {
2065      fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2066      print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2067    }
2068  fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2069	   (void *)ptr->use);
2070  print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2071  fprintf(f, "\n");
2072  return true;
2073}
2074
2075
2076/* Dump all the immediate uses to FILE.  */
2077
2078void
2079dump_immediate_uses_for (FILE *file, tree var)
2080{
2081  imm_use_iterator iter;
2082  use_operand_p use_p;
2083
2084  gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2085
2086  print_generic_expr (file, var, TDF_SLIM);
2087  fprintf (file, " : -->");
2088  if (has_zero_uses (var))
2089    fprintf (file, " no uses.\n");
2090  else
2091    if (has_single_use (var))
2092      fprintf (file, " single use.\n");
2093    else
2094      fprintf (file, "%d uses.\n", num_imm_uses (var));
2095
2096  FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2097    {
2098      if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2099	print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2100      else
2101	print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2102    }
2103  fprintf(file, "\n");
2104}
2105
2106/* Dump all the immediate uses to FILE.  */
2107
2108void
2109dump_immediate_uses (FILE *file)
2110{
2111  tree var;
2112  unsigned int x;
2113
2114  fprintf (file, "Immediate_uses: \n\n");
2115  for (x = 1; x < num_ssa_names; x++)
2116    {
2117      var = ssa_name(x);
2118      if (!var)
2119        continue;
2120      dump_immediate_uses_for (file, var);
2121    }
2122}
2123
2124
2125/* Dump def-use edges on stderr.  */
2126
2127void
2128debug_immediate_uses (void)
2129{
2130  dump_immediate_uses (stderr);
2131}
2132
2133/* Dump def-use edges on stderr.  */
2134
2135void
2136debug_immediate_uses_for (tree var)
2137{
2138  dump_immediate_uses_for (stderr, var);
2139}
2140#include "gt-tree-ssa-operands.h"
2141