1/* Finding reachable regions and values.
2   Copyright (C) 2020-2022 Free Software Foundation, Inc.
3   Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "function.h"
26#include "basic-block.h"
27#include "gimple.h"
28#include "gimple-iterator.h"
29#include "diagnostic-core.h"
30#include "graphviz.h"
31#include "options.h"
32#include "cgraph.h"
33#include "tree-dfa.h"
34#include "stringpool.h"
35#include "convert.h"
36#include "target.h"
37#include "fold-const.h"
38#include "tree-pretty-print.h"
39#include "tristate.h"
40#include "bitmap.h"
41#include "selftest.h"
42#include "function.h"
43#include "analyzer/analyzer.h"
44#include "analyzer/analyzer-logging.h"
45#include "ordered-hash-map.h"
46#include "options.h"
47#include "cgraph.h"
48#include "cfg.h"
49#include "digraph.h"
50#include "json.h"
51#include "analyzer/call-string.h"
52#include "analyzer/program-point.h"
53#include "analyzer/store.h"
54#include "analyzer/region-model.h"
55#include "analyzer/region-model-reachability.h"
56
57#if ENABLE_ANALYZER
58
59namespace ana {
60
61reachable_regions::reachable_regions (region_model *model)
62: m_model (model), m_store (model->get_store ()),
63  m_reachable_base_regs (), m_mutable_base_regs ()
64{
65}
66
67/* Callback called for each cluster when initializing this object.  */
68
69void
70reachable_regions::init_cluster_cb (const region *base_reg,
71				    reachable_regions *this_ptr)
72{
73  this_ptr->init_cluster (base_reg);
74}
75
76/* Called for each cluster when initializing this object.  */
77void
78reachable_regions::init_cluster (const region *base_reg)
79{
80  /* Mark any globals as mutable (and traverse what they point to).  */
81  const region *parent = base_reg->get_parent_region ();
82  gcc_assert (parent);
83  if (parent->get_kind () == RK_GLOBALS)
84    add (base_reg, true);
85
86  /* Mark any clusters that already escaped in previous unknown calls
87     as mutable (and traverse what they currently point to).  */
88  if (m_store->escaped_p (base_reg))
89    add (base_reg, true);
90
91  if (const symbolic_region *sym_reg = base_reg->dyn_cast_symbolic_region ())
92    {
93      const svalue *ptr = sym_reg->get_pointer ();
94      if (ptr->implicitly_live_p (NULL, m_model))
95	add (base_reg, true);
96      switch (ptr->get_kind ())
97	{
98	default:
99	  break;
100	case SK_INITIAL:
101	  {
102	    /* If BASE_REG is *INIT_VAL(REG) for some other REG, see if REG is
103	       unbound and untouched.  If so, then add BASE_REG as a root.  */
104	    const initial_svalue *init_sval
105	      = as_a <const initial_svalue *> (ptr);
106	    const region *init_sval_reg = init_sval->get_region ();
107	    const region *other_base_reg = init_sval_reg->get_base_region ();
108	    const binding_cluster *other_cluster
109	      = m_store->get_cluster (other_base_reg);
110	    if (other_cluster == NULL
111		|| !other_cluster->touched_p ())
112	      add (base_reg, true);
113	  }
114	  break;
115
116	case SK_UNKNOWN:
117	case SK_CONJURED:
118	  {
119	    /* If this cluster is due to dereferencing an unknown/conjured
120	       pointer, any values written through the pointer could still
121	       be live.  */
122	    add (base_reg, true);
123	  }
124	  break;
125	}
126    }
127}
128
129  /* Lazily mark the cluster containing REG as being reachable, recursively
130     adding clusters reachable from REG's cluster.  */
131void
132reachable_regions::add (const region *reg, bool is_mutable)
133{
134  gcc_assert (reg);
135
136  const region *base_reg = const_cast <region *> (reg->get_base_region ());
137  gcc_assert (base_reg);
138
139  /* Bail out if this cluster is already in the sets at the IS_MUTABLE
140     level of mutability.  */
141  if (!is_mutable && m_reachable_base_regs.contains (base_reg))
142    return;
143  m_reachable_base_regs.add (base_reg);
144
145  if (is_mutable)
146    {
147      if (m_mutable_base_regs.contains (base_reg))
148	return;
149      else
150	m_mutable_base_regs.add (base_reg);
151    }
152
153  /* Add values within the cluster.  If any are pointers, add the pointee.  */
154  if (binding_cluster *bind_cluster = m_store->get_cluster (base_reg))
155    bind_cluster->for_each_value (handle_sval_cb, this);
156  else
157    handle_sval (m_model->get_store_value (reg, NULL));
158}
159
160void
161reachable_regions::handle_sval_cb (const svalue *sval,
162				   reachable_regions *this_ptr)
163{
164  this_ptr->handle_sval (sval);
165}
166
167/* Add SVAL.  If it is a pointer, add the pointed-to region.  */
168
169void
170reachable_regions::handle_sval (const svalue *sval)
171{
172  m_reachable_svals.add (sval);
173  m_mutable_svals.add (sval);
174  if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
175    {
176      const region *pointee = ptr->get_pointee ();
177      /* Use const-ness of pointer type to affect mutability.  */
178      bool ptr_is_mutable = true;
179      if (ptr->get_type ()
180	  && TREE_CODE (ptr->get_type ()) == POINTER_TYPE
181	  && TYPE_READONLY (TREE_TYPE (ptr->get_type ())))
182	{
183	  ptr_is_mutable = false;
184	}
185      else
186	{
187	  m_mutable_svals.add (sval);
188	}
189      add (pointee, ptr_is_mutable);
190    }
191  /* Treat all svalues within a compound_svalue as reachable.  */
192  if (const compound_svalue *compound_sval
193      = sval->dyn_cast_compound_svalue ())
194    {
195      for (compound_svalue::iterator_t iter = compound_sval->begin ();
196	   iter != compound_sval->end (); ++iter)
197	{
198	  const svalue *iter_sval = (*iter).second;
199	  handle_sval (iter_sval);
200	}
201    }
202  if (const svalue *cast = sval->maybe_undo_cast ())
203    handle_sval (cast);
204
205  /* If SVAL is the result of a reversible operation, then the operands
206     are reachable.  */
207  switch (sval->get_kind ())
208    {
209    default:
210      break;
211    case SK_UNARYOP:
212      {
213	const unaryop_svalue *unaryop_sval = (const unaryop_svalue *)sval;
214	switch (unaryop_sval->get_op ())
215	  {
216	  default:
217	    break;
218	  case NEGATE_EXPR:
219	    handle_sval (unaryop_sval->get_arg ());
220	    break;
221	  }
222      }
223      break;
224    case SK_BINOP:
225      {
226	const binop_svalue *binop_sval = (const binop_svalue *)sval;
227	switch (binop_sval->get_op ())
228	  {
229	  default:
230	    break;
231	  case POINTER_PLUS_EXPR:
232	    handle_sval (binop_sval->get_arg0 ());
233	    handle_sval (binop_sval->get_arg1 ());
234	    break;
235	  }
236      }
237    }
238}
239
240/* Add SVAL.  If it is a pointer, add the pointed-to region.
241   Use PARAM_TYPE for determining mutability.  */
242
243void
244reachable_regions::handle_parm (const svalue *sval, tree param_type)
245{
246  bool is_mutable = true;
247  if (param_type
248      && TREE_CODE (param_type) == POINTER_TYPE
249      &&  TYPE_READONLY (TREE_TYPE (param_type)))
250    is_mutable = false;
251  if (is_mutable)
252    m_mutable_svals.add (sval);
253  else
254    m_reachable_svals.add (sval);
255  if (const region *base_reg = sval->maybe_get_deref_base_region ())
256    add (base_reg, is_mutable);
257  /* Treat all svalues within a compound_svalue as reachable.  */
258  if (const compound_svalue *compound_sval
259      = sval->dyn_cast_compound_svalue ())
260    {
261      for (compound_svalue::iterator_t iter = compound_sval->begin ();
262	   iter != compound_sval->end (); ++iter)
263	{
264	  const svalue *iter_sval = (*iter).second;
265	  handle_sval (iter_sval);
266	}
267    }
268  if (const svalue *cast = sval->maybe_undo_cast ())
269    handle_sval (cast);
270}
271
272/* Update the store to mark the clusters that were found to be mutable
273   as having escaped.
274   Notify CTXT about escaping function_decls.  */
275
276void
277reachable_regions::mark_escaped_clusters (region_model_context *ctxt)
278{
279  auto_vec<const function_region *> escaped_fn_regs
280    (m_mutable_base_regs.elements ());
281  for (hash_set<const region *>::iterator iter = m_mutable_base_regs.begin ();
282       iter != m_mutable_base_regs.end (); ++iter)
283    {
284      const region *base_reg = *iter;
285      m_store->mark_as_escaped (base_reg);
286
287      /* If we have a function that's escaped, potentially add
288	 it to the worklist.  */
289      if (const function_region *fn_reg = base_reg->dyn_cast_function_region ())
290	escaped_fn_regs.quick_push (fn_reg);
291    }
292  if (ctxt)
293    {
294      /* Sort to ensure deterministic results.  */
295      escaped_fn_regs.qsort (region::cmp_ptr_ptr);
296      unsigned i;
297      const function_region *fn_reg;
298      FOR_EACH_VEC_ELT (escaped_fn_regs, i, fn_reg)
299	ctxt->on_escaped_function (fn_reg->get_fndecl ());
300    }
301}
302
303/* Dump SET to PP, sorting it to avoid churn when comparing dumps.  */
304
305template <typename T>
306static void
307dump_set (const hash_set<const T *> &set, pretty_printer *pp)
308{
309  auto_vec<const T *> elements (set.elements ());
310  for (typename hash_set<const T *>::iterator iter = set.begin ();
311       iter != set.end (); ++iter)
312    elements.quick_push (*iter);
313
314  elements.qsort (T::cmp_ptr_ptr);
315
316  unsigned i;
317  const T *element;
318  FOR_EACH_VEC_ELT (elements, i, element)
319    {
320      pp_string (pp, "  ");
321      element->dump_to_pp (pp, true);
322      pp_newline (pp);
323    }
324}
325
326/* Dump a multiline representation of this object to PP.  */
327
328void
329reachable_regions::dump_to_pp (pretty_printer *pp) const
330{
331  pp_string (pp, "reachable clusters: ");
332  pp_newline (pp);
333  dump_set (m_reachable_base_regs, pp);
334
335  pp_string (pp, "mutable clusters: ");
336  pp_newline (pp);
337  dump_set (m_mutable_base_regs, pp);
338
339  pp_string (pp, "reachable svals: ");
340  pp_newline (pp);
341  dump_set (m_reachable_svals, pp);
342
343  pp_string (pp, "mutable svals: ");
344  pp_newline (pp);
345  dump_set (m_mutable_svals, pp);
346}
347
348/* Dump a multiline representation of this object to stderr.  */
349
350DEBUG_FUNCTION void
351reachable_regions::dump () const
352{
353  pretty_printer pp;
354  pp_format_decoder (&pp) = default_tree_printer;
355  pp_show_color (&pp) = pp_show_color (global_dc->printer);
356  pp.buffer->stream = stderr;
357  dump_to_pp (&pp);
358  pp_flush (&pp);
359}
360
361} // namespace ana
362
363#endif /* #if ENABLE_ANALYZER */
364