1/* Callgraph handling code.
2   Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3   Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA.  */
21
22/*  This file contains basic routines manipulating call graph and variable pool
23
24The callgraph:
25
26    The call-graph is data structure designed for intra-procedural optimization
27    but it is also used in non-unit-at-a-time compilation to allow easier code
28    sharing.
29
30    The call-graph consist of nodes and edges represented via linked lists.
31    Each function (external or not) corresponds to the unique node (in
32    contrast to tree DECL nodes where we can have multiple nodes for each
33    function).
34
35    The mapping from declarations to call-graph nodes is done using hash table
36    based on DECL_ASSEMBLER_NAME, so it is essential for assembler name to
37    not change once the declaration is inserted into the call-graph.
38    The call-graph nodes are created lazily using cgraph_node function when
39    called for unknown declaration.
40
41    When built, there is one edge for each direct call.  It is possible that
42    the reference will be later optimized out.  The call-graph is built
43    conservatively in order to make conservative data flow analysis possible.
44
45    The callgraph at the moment does not represent indirect calls or calls
46    from other compilation unit.  Flag NEEDED is set for each node that may
47    be accessed in such an invisible way and it shall be considered an
48    entry point to the callgraph.
49
50    Intraprocedural information:
51
52      Callgraph is place to store data needed for intraprocedural optimization.
53      All data structures are divided into three components: local_info that
54      is produced while analyzing the function, global_info that is result
55      of global walking of the callgraph on the end of compilation and
56      rtl_info used by RTL backend to propagate data from already compiled
57      functions to their callers.
58
59    Inlining plans:
60
61      The function inlining information is decided in advance and maintained
62      in the callgraph as so called inline plan.
63      For each inlined call, the callee's node is cloned to represent the
64      new function copy produced by inliner.
65      Each inlined call gets a unique corresponding clone node of the callee
66      and the data structure is updated while inlining is performed, so
67      the clones are eliminated and their callee edges redirected to the
68      caller.
69
70      Each edge has "inline_failed" field.  When the field is set to NULL,
71      the call will be inlined.  When it is non-NULL it contains a reason
72      why inlining wasn't performed.
73
74
75The varpool data structure:
76
77    Varpool is used to maintain variables in similar manner as call-graph
78    is used for functions.  Most of the API is symmetric replacing cgraph
79    function prefix by cgraph_varpool  */
80
81
82#include "config.h"
83#include "system.h"
84#include "coretypes.h"
85#include "tm.h"
86#include "tree.h"
87#include "tree-inline.h"
88#include "langhooks.h"
89#include "hashtab.h"
90#include "toplev.h"
91#include "flags.h"
92#include "ggc.h"
93#include "debug.h"
94#include "target.h"
95#include "basic-block.h"
96#include "cgraph.h"
97#include "varray.h"
98#include "output.h"
99#include "intl.h"
100#include "tree-gimple.h"
101#include "tree-dump.h"
102
103static void cgraph_node_remove_callers (struct cgraph_node *node);
104static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
105static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
106
107/* Hash table used to convert declarations into nodes.  */
108static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
109
110/* The linked list of cgraph nodes.  */
111struct cgraph_node *cgraph_nodes;
112
113/* Queue of cgraph nodes scheduled to be lowered.  */
114struct cgraph_node *cgraph_nodes_queue;
115
116/* Number of nodes in existence.  */
117int cgraph_n_nodes;
118
119/* Maximal uid used in cgraph nodes.  */
120int cgraph_max_uid;
121
122/* Set when whole unit has been analyzed so we can access global info.  */
123bool cgraph_global_info_ready = false;
124
125/* Set when the cgraph is fully build and the basic flags are computed.  */
126bool cgraph_function_flags_ready = false;
127
128/* Hash table used to convert declarations into nodes.  */
129static GTY((param_is (struct cgraph_varpool_node))) htab_t cgraph_varpool_hash;
130
131/* Queue of cgraph nodes scheduled to be lowered and output.  */
132struct cgraph_varpool_node *cgraph_varpool_nodes_queue, *cgraph_varpool_first_unanalyzed_node;
133
134
135/* The linked list of cgraph varpool nodes.  */
136struct cgraph_varpool_node *cgraph_varpool_nodes;
137
138/* End of the varpool queue.  Needs to be QTYed to work with PCH.  */
139static GTY(()) struct cgraph_varpool_node *cgraph_varpool_last_needed_node;
140
141static hashval_t hash_node (const void *);
142static int eq_node (const void *, const void *);
143
144/* Returns a hash code for P.  */
145
146static hashval_t
147hash_node (const void *p)
148{
149  const struct cgraph_node *n = p;
150  return (hashval_t) DECL_UID (n->decl);
151}
152
153/* Returns nonzero if P1 and P2 are equal.  */
154
155static int
156eq_node (const void *p1, const void *p2)
157{
158  const struct cgraph_node *n1 = p1, *n2 = p2;
159  return DECL_UID (n1->decl) == DECL_UID (n2->decl);
160}
161
162/* Allocate new callgraph node and insert it into basic data structures.  */
163static struct cgraph_node *
164cgraph_create_node (void)
165{
166  struct cgraph_node *node;
167
168  node = ggc_alloc_cleared (sizeof (*node));
169  node->next = cgraph_nodes;
170  node->uid = cgraph_max_uid++;
171  if (cgraph_nodes)
172    cgraph_nodes->previous = node;
173  node->previous = NULL;
174  node->global.estimated_growth = INT_MIN;
175  cgraph_nodes = node;
176  cgraph_n_nodes++;
177  return node;
178}
179
180/* Return cgraph node assigned to DECL.  Create new one when needed.  */
181struct cgraph_node *
182cgraph_node (tree decl)
183{
184  struct cgraph_node key, *node, **slot;
185
186  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
187
188  if (!cgraph_hash)
189    cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
190
191  key.decl = decl;
192
193  slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
194
195  if (*slot)
196    {
197      node = *slot;
198      if (!node->master_clone)
199	node->master_clone = node;
200      return node;
201    }
202
203  node = cgraph_create_node ();
204  node->decl = decl;
205  *slot = node;
206  if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
207    {
208      node->origin = cgraph_node (DECL_CONTEXT (decl));
209      node->next_nested = node->origin->nested;
210      node->origin->nested = node;
211      node->master_clone = node;
212    }
213  return node;
214}
215
216/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL.  */
217
218static bool
219decl_assembler_name_equal (tree decl, tree asmname)
220{
221  tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
222
223  if (decl_asmname == asmname)
224    return true;
225
226  /* If the target assembler name was set by the user, things are trickier.
227     We have a leading '*' to begin with.  After that, it's arguable what
228     is the correct thing to do with -fleading-underscore.  Arguably, we've
229     historically been doing the wrong thing in assemble_alias by always
230     printing the leading underscore.  Since we're not changing that, make
231     sure user_label_prefix follows the '*' before matching.  */
232  if (IDENTIFIER_POINTER (decl_asmname)[0] == '*')
233    {
234      const char *decl_str = IDENTIFIER_POINTER (decl_asmname) + 1;
235      size_t ulp_len = strlen (user_label_prefix);
236
237      if (ulp_len == 0)
238	;
239      else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
240	decl_str += ulp_len;
241      else
242	return false;
243
244      return strcmp (decl_str, IDENTIFIER_POINTER (asmname)) == 0;
245    }
246
247  return false;
248}
249
250
251/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
252   Return NULL if there's no such node.  */
253
254struct cgraph_node *
255cgraph_node_for_asm (tree asmname)
256{
257  struct cgraph_node *node;
258
259  for (node = cgraph_nodes; node ; node = node->next)
260    if (decl_assembler_name_equal (node->decl, asmname))
261      return node;
262
263  return NULL;
264}
265
266/* Return callgraph edge representing CALL_EXPR statement.  */
267struct cgraph_edge *
268cgraph_edge (struct cgraph_node *node, tree call_stmt)
269{
270  struct cgraph_edge *e;
271
272  /* This loop may turn out to be performance problem.  In such case adding
273     hashtables into call nodes with very many edges is probably best
274     solution.  It is not good idea to add pointer into CALL_EXPR itself
275     because we want to make possible having multiple cgraph nodes representing
276     different clones of the same body before the body is actually cloned.  */
277  for (e = node->callees; e; e= e->next_callee)
278    if (e->call_stmt == call_stmt)
279      break;
280  return e;
281}
282
283/* Create edge from CALLER to CALLEE in the cgraph.  */
284
285struct cgraph_edge *
286cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
287		    tree call_stmt, gcov_type count, int nest)
288{
289  struct cgraph_edge *edge = ggc_alloc (sizeof (struct cgraph_edge));
290#ifdef ENABLE_CHECKING
291  struct cgraph_edge *e;
292
293  for (e = caller->callees; e; e = e->next_callee)
294    gcc_assert (e->call_stmt != call_stmt);
295#endif
296
297  gcc_assert (get_call_expr_in (call_stmt));
298
299  if (!DECL_SAVED_TREE (callee->decl))
300    edge->inline_failed = N_("function body not available");
301  else if (callee->local.redefined_extern_inline)
302    edge->inline_failed = N_("redefined extern inline functions are not "
303			     "considered for inlining");
304  else if (callee->local.inlinable)
305    edge->inline_failed = N_("function not considered for inlining");
306  else
307    edge->inline_failed = N_("function not inlinable");
308
309  edge->aux = NULL;
310
311  edge->caller = caller;
312  edge->callee = callee;
313  edge->call_stmt = call_stmt;
314  edge->prev_caller = NULL;
315  edge->next_caller = callee->callers;
316  if (callee->callers)
317    callee->callers->prev_caller = edge;
318  edge->prev_callee = NULL;
319  edge->next_callee = caller->callees;
320  if (caller->callees)
321    caller->callees->prev_callee = edge;
322  caller->callees = edge;
323  callee->callers = edge;
324  edge->count = count;
325  edge->loop_nest = nest;
326  return edge;
327}
328
329/* Remove the edge E from the list of the callers of the callee.  */
330
331static inline void
332cgraph_edge_remove_callee (struct cgraph_edge *e)
333{
334  if (e->prev_caller)
335    e->prev_caller->next_caller = e->next_caller;
336  if (e->next_caller)
337    e->next_caller->prev_caller = e->prev_caller;
338  if (!e->prev_caller)
339    e->callee->callers = e->next_caller;
340}
341
342/* Remove the edge E from the list of the callees of the caller.  */
343
344static inline void
345cgraph_edge_remove_caller (struct cgraph_edge *e)
346{
347  if (e->prev_callee)
348    e->prev_callee->next_callee = e->next_callee;
349  if (e->next_callee)
350    e->next_callee->prev_callee = e->prev_callee;
351  if (!e->prev_callee)
352    e->caller->callees = e->next_callee;
353}
354
355/* Remove the edge E in the cgraph.  */
356
357void
358cgraph_remove_edge (struct cgraph_edge *e)
359{
360  /* Remove from callers list of the callee.  */
361  cgraph_edge_remove_callee (e);
362
363  /* Remove from callees list of the callers.  */
364  cgraph_edge_remove_caller (e);
365}
366
367/* Redirect callee of E to N.  The function does not update underlying
368   call expression.  */
369
370void
371cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
372{
373  /* Remove from callers list of the current callee.  */
374  cgraph_edge_remove_callee (e);
375
376  /* Insert to callers list of the new callee.  */
377  e->prev_caller = NULL;
378  if (n->callers)
379    n->callers->prev_caller = e;
380  e->next_caller = n->callers;
381  n->callers = e;
382  e->callee = n;
383}
384
385/* Remove all callees from the node.  */
386
387void
388cgraph_node_remove_callees (struct cgraph_node *node)
389{
390  struct cgraph_edge *e;
391
392  /* It is sufficient to remove the edges from the lists of callers of
393     the callees.  The callee list of the node can be zapped with one
394     assignment.  */
395  for (e = node->callees; e; e = e->next_callee)
396    cgraph_edge_remove_callee (e);
397  node->callees = NULL;
398}
399
400/* Remove all callers from the node.  */
401
402static void
403cgraph_node_remove_callers (struct cgraph_node *node)
404{
405  struct cgraph_edge *e;
406
407  /* It is sufficient to remove the edges from the lists of callees of
408     the callers.  The caller list of the node can be zapped with one
409     assignment.  */
410  for (e = node->callers; e; e = e->next_caller)
411    cgraph_edge_remove_caller (e);
412  node->callers = NULL;
413}
414
415/* Remove the node from cgraph.  */
416
417void
418cgraph_remove_node (struct cgraph_node *node)
419{
420  void **slot;
421  bool kill_body = false;
422
423  cgraph_node_remove_callers (node);
424  cgraph_node_remove_callees (node);
425  while (node->nested)
426    cgraph_remove_node (node->nested);
427  if (node->origin)
428    {
429      struct cgraph_node **node2 = &node->origin->nested;
430
431      while (*node2 != node)
432	node2 = &(*node2)->next_nested;
433      *node2 = node->next_nested;
434    }
435  if (node->previous)
436    node->previous->next = node->next;
437  else
438    cgraph_nodes = node->next;
439  if (node->next)
440    node->next->previous = node->previous;
441  slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
442  if (*slot == node)
443    {
444      if (node->next_clone)
445      {
446	struct cgraph_node *new_node = node->next_clone;
447	struct cgraph_node *n;
448
449	/* Make the next clone be the master clone */
450	for (n = new_node; n; n = n->next_clone)
451	  n->master_clone = new_node;
452
453	*slot = new_node;
454	node->next_clone->prev_clone = NULL;
455      }
456      else
457	{
458          htab_clear_slot (cgraph_hash, slot);
459	  kill_body = true;
460	}
461    }
462  else
463    {
464      node->prev_clone->next_clone = node->next_clone;
465      if (node->next_clone)
466        node->next_clone->prev_clone = node->prev_clone;
467    }
468
469  /* While all the clones are removed after being proceeded, the function
470     itself is kept in the cgraph even after it is compiled.  Check whether
471     we are done with this body and reclaim it proactively if this is the case.
472     */
473  if (!kill_body && *slot)
474    {
475      struct cgraph_node *n = *slot;
476      if (!n->next_clone && !n->global.inlined_to
477	  && (cgraph_global_info_ready
478	      && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
479	kill_body = true;
480    }
481
482  if (kill_body && !dump_enabled_p (TDI_tree_all) && flag_unit_at_a_time)
483    {
484      DECL_SAVED_TREE (node->decl) = NULL;
485      DECL_STRUCT_FUNCTION (node->decl) = NULL;
486      DECL_INITIAL (node->decl) = error_mark_node;
487    }
488  cgraph_n_nodes--;
489  /* Do not free the structure itself so the walk over chain can continue.  */
490}
491
492/* Notify finalize_compilation_unit that given node is reachable.  */
493
494void
495cgraph_mark_reachable_node (struct cgraph_node *node)
496{
497  if (!node->reachable && node->local.finalized)
498    {
499      notice_global_symbol (node->decl);
500      node->reachable = 1;
501      gcc_assert (!cgraph_global_info_ready);
502
503      node->next_needed = cgraph_nodes_queue;
504      cgraph_nodes_queue = node;
505    }
506}
507
508/* Likewise indicate that a node is needed, i.e. reachable via some
509   external means.  */
510
511void
512cgraph_mark_needed_node (struct cgraph_node *node)
513{
514  node->needed = 1;
515  cgraph_mark_reachable_node (node);
516}
517
518/* Return local info for the compiled function.  */
519
520struct cgraph_local_info *
521cgraph_local_info (tree decl)
522{
523  struct cgraph_node *node;
524
525  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
526  node = cgraph_node (decl);
527  return &node->local;
528}
529
530/* Return local info for the compiled function.  */
531
532struct cgraph_global_info *
533cgraph_global_info (tree decl)
534{
535  struct cgraph_node *node;
536
537  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
538  node = cgraph_node (decl);
539  return &node->global;
540}
541
542/* Return local info for the compiled function.  */
543
544struct cgraph_rtl_info *
545cgraph_rtl_info (tree decl)
546{
547  struct cgraph_node *node;
548
549  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
550  node = cgraph_node (decl);
551  if (decl != current_function_decl
552      && !TREE_ASM_WRITTEN (node->decl))
553    return NULL;
554  return &node->rtl;
555}
556
557/* Return name of the node used in debug output.  */
558const char *
559cgraph_node_name (struct cgraph_node *node)
560{
561  return lang_hooks.decl_printable_name (node->decl, 2);
562}
563
564/* Return name of the node used in debug output.  */
565static const char *
566cgraph_varpool_node_name (struct cgraph_varpool_node *node)
567{
568  return lang_hooks.decl_printable_name (node->decl, 2);
569}
570
571/* Names used to print out the availability enum.  */
572static const char * const availability_names[] =
573  {"unset", "not_available", "overwrittable", "available", "local"};
574
575/* Dump given cgraph node.  */
576void
577dump_cgraph_node (FILE *f, struct cgraph_node *node)
578{
579  struct cgraph_edge *edge;
580  fprintf (f, "%s/%i:", cgraph_node_name (node), node->uid);
581  if (node->global.inlined_to)
582    fprintf (f, " (inline copy in %s/%i)",
583	     cgraph_node_name (node->global.inlined_to),
584	     node->global.inlined_to->uid);
585  if (cgraph_function_flags_ready)
586    fprintf (f, " availability:%s",
587	     availability_names [cgraph_function_body_availability (node)]);
588  if (node->master_clone && node->master_clone->uid != node->uid)
589    fprintf (f, "(%i)", node->master_clone->uid);
590  if (node->count)
591    fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
592	     (HOST_WIDEST_INT)node->count);
593  if (node->local.self_insns)
594    fprintf (f, " %i insns", node->local.self_insns);
595  if (node->global.insns && node->global.insns != node->local.self_insns)
596    fprintf (f, " (%i after inlining)", node->global.insns);
597  if (node->origin)
598    fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
599  if (node->needed)
600    fprintf (f, " needed");
601  else if (node->reachable)
602    fprintf (f, " reachable");
603  if (DECL_SAVED_TREE (node->decl))
604    fprintf (f, " tree");
605  if (node->output)
606    fprintf (f, " output");
607  if (node->local.local)
608    fprintf (f, " local");
609  if (node->local.externally_visible)
610    fprintf (f, " externally_visible");
611  if (node->local.finalized)
612    fprintf (f, " finalized");
613  if (node->local.disregard_inline_limits)
614    fprintf (f, " always_inline");
615  else if (node->local.inlinable)
616    fprintf (f, " inlinable");
617  if (node->local.redefined_extern_inline)
618    fprintf (f, " redefined_extern_inline");
619  if (TREE_ASM_WRITTEN (node->decl))
620    fprintf (f, " asm_written");
621
622  fprintf (f, "\n  called by: ");
623  for (edge = node->callers; edge; edge = edge->next_caller)
624    {
625      fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
626	       edge->caller->uid);
627      if (edge->count)
628	fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
629		 (HOST_WIDEST_INT)edge->count);
630      if (!edge->inline_failed)
631	fprintf(f, "(inlined) ");
632    }
633
634  fprintf (f, "\n  calls: ");
635  for (edge = node->callees; edge; edge = edge->next_callee)
636    {
637      fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
638	       edge->callee->uid);
639      if (!edge->inline_failed)
640	fprintf(f, "(inlined) ");
641      if (edge->count)
642	fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
643		 (HOST_WIDEST_INT)edge->count);
644      if (edge->loop_nest)
645	fprintf (f, "(nested in %i loops) ", edge->loop_nest);
646    }
647  fprintf (f, "\n");
648}
649
650/* Dump the callgraph.  */
651
652void
653dump_cgraph (FILE *f)
654{
655  struct cgraph_node *node;
656
657  fprintf (f, "callgraph:\n\n");
658  for (node = cgraph_nodes; node; node = node->next)
659    dump_cgraph_node (f, node);
660}
661
662/* Dump given cgraph node.  */
663void
664dump_cgraph_varpool_node (FILE *f, struct cgraph_varpool_node *node)
665{
666  fprintf (f, "%s:", cgraph_varpool_node_name (node));
667  fprintf (f, " availability:%s", availability_names [cgraph_variable_initializer_availability (node)]);
668  if (DECL_INITIAL (node->decl))
669    fprintf (f, " initialized");
670  if (node->needed)
671    fprintf (f, " needed");
672  if (node->analyzed)
673    fprintf (f, " analyzed");
674  if (node->finalized)
675    fprintf (f, " finalized");
676  if (node->output)
677    fprintf (f, " output");
678  if (node->externally_visible)
679    fprintf (f, " externally_visible");
680  fprintf (f, "\n");
681}
682
683/* Dump the callgraph.  */
684
685void
686dump_varpool (FILE *f)
687{
688  struct cgraph_varpool_node *node;
689
690  fprintf (f, "variable pool:\n\n");
691  for (node = cgraph_varpool_nodes; node; node = node->next_needed)
692    dump_cgraph_varpool_node (f, node);
693}
694
695/* Returns a hash code for P.  */
696
697static hashval_t
698hash_varpool_node (const void *p)
699{
700  const struct cgraph_varpool_node *n = p;
701  return (hashval_t) DECL_UID (n->decl);
702}
703
704/* Returns nonzero if P1 and P2 are equal.  */
705
706static int
707eq_varpool_node (const void *p1, const void *p2)
708{
709  const struct cgraph_varpool_node *n1 = p1, *n2 = p2;
710  return DECL_UID (n1->decl) == DECL_UID (n2->decl);
711}
712
713/* Return cgraph_varpool node assigned to DECL.  Create new one when needed.  */
714struct cgraph_varpool_node *
715cgraph_varpool_node (tree decl)
716{
717  struct cgraph_varpool_node key, *node, **slot;
718
719  gcc_assert (DECL_P (decl) && TREE_CODE (decl) != FUNCTION_DECL);
720
721  if (!cgraph_varpool_hash)
722    cgraph_varpool_hash = htab_create_ggc (10, hash_varpool_node,
723				           eq_varpool_node, NULL);
724  key.decl = decl;
725  slot = (struct cgraph_varpool_node **)
726    htab_find_slot (cgraph_varpool_hash, &key, INSERT);
727  if (*slot)
728    return *slot;
729  node = ggc_alloc_cleared (sizeof (*node));
730  node->decl = decl;
731  node->next = cgraph_varpool_nodes;
732  cgraph_varpool_nodes = node;
733  *slot = node;
734  return node;
735}
736
737struct cgraph_varpool_node *
738cgraph_varpool_node_for_asm (tree asmname)
739{
740  struct cgraph_varpool_node *node;
741
742  for (node = cgraph_varpool_nodes; node ; node = node->next)
743    if (decl_assembler_name_equal (node->decl, asmname))
744      return node;
745
746  return NULL;
747}
748
749/* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables.  */
750void
751change_decl_assembler_name (tree decl, tree name)
752{
753  if (!DECL_ASSEMBLER_NAME_SET_P (decl))
754    {
755      SET_DECL_ASSEMBLER_NAME (decl, name);
756      return;
757    }
758  if (name == DECL_ASSEMBLER_NAME (decl))
759    return;
760
761  if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
762      && DECL_RTL_SET_P (decl))
763    warning (0, "%D renamed after being referenced in assembly", decl);
764
765  SET_DECL_ASSEMBLER_NAME (decl, name);
766}
767
768/* Helper function for finalization code - add node into lists so it will
769   be analyzed and compiled.  */
770void
771cgraph_varpool_enqueue_needed_node (struct cgraph_varpool_node *node)
772{
773  if (cgraph_varpool_last_needed_node)
774    cgraph_varpool_last_needed_node->next_needed = node;
775  cgraph_varpool_last_needed_node = node;
776  node->next_needed = NULL;
777  if (!cgraph_varpool_nodes_queue)
778    cgraph_varpool_nodes_queue = node;
779  if (!cgraph_varpool_first_unanalyzed_node)
780    cgraph_varpool_first_unanalyzed_node = node;
781  notice_global_symbol (node->decl);
782}
783
784/* Reset the queue of needed nodes.  */
785void
786cgraph_varpool_reset_queue (void)
787{
788  cgraph_varpool_last_needed_node = NULL;
789  cgraph_varpool_nodes_queue = NULL;
790  cgraph_varpool_first_unanalyzed_node = NULL;
791}
792
793/* Notify finalize_compilation_unit that given node is reachable
794   or needed.  */
795void
796cgraph_varpool_mark_needed_node (struct cgraph_varpool_node *node)
797{
798  if (!node->needed && node->finalized)
799    cgraph_varpool_enqueue_needed_node (node);
800  node->needed = 1;
801}
802
803/* Determine if variable DECL is needed.  That is, visible to something
804   either outside this translation unit, something magic in the system
805   configury, or (if not doing unit-at-a-time) to something we haven't
806   seen yet.  */
807
808bool
809decide_is_variable_needed (struct cgraph_varpool_node *node, tree decl)
810{
811  /* If the user told us it is used, then it must be so.  */
812  if (node->externally_visible)
813    return true;
814  if (!flag_unit_at_a_time
815      && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
816    return true;
817
818  /* ??? If the assembler name is set by hand, it is possible to assemble
819     the name later after finalizing the function and the fact is noticed
820     in assemble_name then.  This is arguably a bug.  */
821  if (DECL_ASSEMBLER_NAME_SET_P (decl)
822      && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
823    return true;
824
825  /* If we decided it was needed before, but at the time we didn't have
826     the definition available, then it's still needed.  */
827  if (node->needed)
828    return true;
829
830  /* Externally visible variables must be output.  The exception is
831     COMDAT variables that must be output only when they are needed.  */
832  if (TREE_PUBLIC (decl) && !flag_whole_program && !DECL_COMDAT (decl)
833      && !DECL_EXTERNAL (decl))
834    return true;
835
836  if (flag_unit_at_a_time)
837    return false;
838
839  /* If not doing unit at a time, then we'll only defer this function
840     if its marked for inlining.  Otherwise we want to emit it now.  */
841
842  /* We want to emit COMDAT variables only when absolutely necessary.  */
843  if (DECL_COMDAT (decl))
844    return false;
845  return true;
846}
847
848void
849cgraph_varpool_finalize_decl (tree decl)
850{
851  struct cgraph_varpool_node *node = cgraph_varpool_node (decl);
852
853  /* The first declaration of a variable that comes through this function
854     decides whether it is global (in C, has external linkage)
855     or local (in C, has internal linkage).  So do nothing more
856     if this function has already run.  */
857  if (node->finalized)
858    {
859      if (cgraph_global_info_ready || !flag_unit_at_a_time)
860	cgraph_varpool_assemble_pending_decls ();
861      return;
862    }
863  if (node->needed)
864    cgraph_varpool_enqueue_needed_node (node);
865  node->finalized = true;
866
867  if (decide_is_variable_needed (node, decl))
868    cgraph_varpool_mark_needed_node (node);
869  /* Since we reclaim unreachable nodes at the end of every language
870     level unit, we need to be conservative about possible entry points
871     there.  */
872  else if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
873    cgraph_varpool_mark_needed_node (node);
874  if (cgraph_global_info_ready || !flag_unit_at_a_time)
875    cgraph_varpool_assemble_pending_decls ();
876}
877
878/* Return true when the DECL can possibly be inlined.  */
879bool
880cgraph_function_possibly_inlined_p (tree decl)
881{
882  if (!cgraph_global_info_ready)
883    return (DECL_INLINE (decl) && !flag_really_no_inline);
884  return DECL_POSSIBLY_INLINED (decl);
885}
886
887/* Create clone of E in the node N represented by CALL_EXPR the callgraph.  */
888struct cgraph_edge *
889cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
890		   tree call_stmt, gcov_type count_scale, int loop_nest,
891		   bool update_original)
892{
893  struct cgraph_edge *new;
894
895  new = cgraph_create_edge (n, e->callee, call_stmt,
896                            e->count * count_scale / REG_BR_PROB_BASE,
897		            e->loop_nest + loop_nest);
898
899  new->inline_failed = e->inline_failed;
900  if (update_original)
901    {
902      e->count -= new->count;
903      if (e->count < 0)
904	e->count = 0;
905    }
906  return new;
907}
908
909/* Create node representing clone of N executed COUNT times.  Decrease
910   the execution counts from original node too.
911
912   When UPDATE_ORIGINAL is true, the counts are subtracted from the original
913   function's profile to reflect the fact that part of execution is handled
914   by node.  */
915struct cgraph_node *
916cgraph_clone_node (struct cgraph_node *n, gcov_type count, int loop_nest,
917		   bool update_original)
918{
919  struct cgraph_node *new = cgraph_create_node ();
920  struct cgraph_edge *e;
921  gcov_type count_scale;
922
923  new->decl = n->decl;
924  new->origin = n->origin;
925  if (new->origin)
926    {
927      new->next_nested = new->origin->nested;
928      new->origin->nested = new;
929    }
930  new->analyzed = n->analyzed;
931  new->local = n->local;
932  new->global = n->global;
933  new->rtl = n->rtl;
934  new->master_clone = n->master_clone;
935  new->count = count;
936  if (n->count)
937    count_scale = new->count * REG_BR_PROB_BASE / n->count;
938  else
939    count_scale = 0;
940  if (update_original)
941    {
942      n->count -= count;
943      if (n->count < 0)
944	n->count = 0;
945    }
946
947  for (e = n->callees;e; e=e->next_callee)
948    cgraph_clone_edge (e, new, e->call_stmt, count_scale, loop_nest,
949		       update_original);
950
951  new->next_clone = n->next_clone;
952  new->prev_clone = n;
953  n->next_clone = new;
954  if (new->next_clone)
955    new->next_clone->prev_clone = new;
956
957  return new;
958}
959
960/* Return true if N is an master_clone, (see cgraph_master_clone).  */
961
962bool
963cgraph_is_master_clone (struct cgraph_node *n)
964{
965  return (n == cgraph_master_clone (n));
966}
967
968struct cgraph_node *
969cgraph_master_clone (struct cgraph_node *n)
970{
971  enum availability avail = cgraph_function_body_availability (n);
972
973  if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
974    return NULL;
975
976  if (!n->master_clone)
977    n->master_clone = cgraph_node (n->decl);
978
979  return n->master_clone;
980}
981
982/* NODE is no longer nested function; update cgraph accordingly.  */
983void
984cgraph_unnest_node (struct cgraph_node *node)
985{
986  struct cgraph_node **node2 = &node->origin->nested;
987  gcc_assert (node->origin);
988
989  while (*node2 != node)
990    node2 = &(*node2)->next_nested;
991  *node2 = node->next_nested;
992  node->origin = NULL;
993}
994
995/* Return function availability.  See cgraph.h for description of individual
996   return values.  */
997enum availability
998cgraph_function_body_availability (struct cgraph_node *node)
999{
1000  enum availability avail;
1001  gcc_assert (cgraph_function_flags_ready);
1002  if (!node->analyzed)
1003    avail = AVAIL_NOT_AVAILABLE;
1004  else if (node->local.local)
1005    avail = AVAIL_LOCAL;
1006  else if (node->local.externally_visible)
1007    avail = AVAIL_AVAILABLE;
1008
1009  /* If the function can be overwritten, return OVERWRITABLE.  Take
1010     care at least of two notable extensions - the COMDAT functions
1011     used to share template instantiations in C++ (this is symmetric
1012     to code cp_cannot_inline_tree_fn and probably shall be shared and
1013     the inlinability hooks completely eliminated).
1014
1015     ??? Does the C++ one definition rule allow us to always return
1016     AVAIL_AVAILABLE here?  That would be good reason to preserve this
1017     hook Similarly deal with extern inline functions - this is again
1018     necessary to get C++ shared functions having keyed templates
1019     right and in the C extension documentation we probably should
1020     document the requirement of both versions of function (extern
1021     inline and offline) having same side effect characteristics as
1022     good optimization is what this optimization is about.  */
1023
1024  else if (!(*targetm.binds_local_p) (node->decl)
1025	   && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl))
1026    avail = AVAIL_OVERWRITABLE;
1027  else avail = AVAIL_AVAILABLE;
1028
1029  return avail;
1030}
1031
1032/* Return variable availability.  See cgraph.h for description of individual
1033   return values.  */
1034enum availability
1035cgraph_variable_initializer_availability (struct cgraph_varpool_node *node)
1036{
1037  gcc_assert (cgraph_function_flags_ready);
1038  if (!node->finalized)
1039    return AVAIL_NOT_AVAILABLE;
1040  if (!TREE_PUBLIC (node->decl))
1041    return AVAIL_AVAILABLE;
1042  /* If the variable can be overwritten, return OVERWRITABLE.  Takes
1043     care of at least two notable extensions - the COMDAT variables
1044     used to share template instantiations in C++.  */
1045  if (!(*targetm.binds_local_p) (node->decl) && !DECL_COMDAT (node->decl))
1046    return AVAIL_OVERWRITABLE;
1047  return AVAIL_AVAILABLE;
1048}
1049
1050#include "gt-cgraph.h"
1051