1/* Write and read the cgraph to the memory mapped representation of a
2   .o file.
3
4   Copyright (C) 2009-2015 Free Software Foundation, Inc.
5   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3.  If not see
21<http://www.gnu.org/licenses/>.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "hash-set.h"
28#include "machmode.h"
29#include "vec.h"
30#include "double-int.h"
31#include "input.h"
32#include "alias.h"
33#include "symtab.h"
34#include "wide-int.h"
35#include "inchash.h"
36#include "tree.h"
37#include "fold-const.h"
38#include "stringpool.h"
39#include "predict.h"
40#include "hard-reg-set.h"
41#include "function.h"
42#include "basic-block.h"
43#include "tree-ssa-alias.h"
44#include "internal-fn.h"
45#include "gimple-expr.h"
46#include "is-a.h"
47#include "gimple.h"
48#include "hashtab.h"
49#include "rtl.h"
50#include "flags.h"
51#include "statistics.h"
52#include "real.h"
53#include "fixed-value.h"
54#include "insn-config.h"
55#include "expmed.h"
56#include "dojump.h"
57#include "explow.h"
58#include "calls.h"
59#include "emit-rtl.h"
60#include "varasm.h"
61#include "stmt.h"
62#include "expr.h"
63#include "params.h"
64#include "langhooks.h"
65#include "bitmap.h"
66#include "diagnostic-core.h"
67#include "except.h"
68#include "timevar.h"
69#include "hash-map.h"
70#include "plugin-api.h"
71#include "ipa-ref.h"
72#include "cgraph.h"
73#include "lto-streamer.h"
74#include "data-streamer.h"
75#include "tree-streamer.h"
76#include "gcov-io.h"
77#include "tree-pass.h"
78#include "profile.h"
79#include "context.h"
80#include "pass_manager.h"
81#include "ipa-utils.h"
82#include "omp-low.h"
83#include "ipa-chkp.h"
84
85/* True when asm nodes has been output.  */
86bool asm_nodes_output = false;
87
88static void output_cgraph_opt_summary (void);
89static void input_cgraph_opt_summary (vec<symtab_node *>  nodes);
90
91/* Number of LDPR values known to GCC.  */
92#define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
93
94/* All node orders are ofsetted by ORDER_BASE.  */
95static int order_base;
96
97/* Cgraph streaming is organized as set of record whose type
98   is indicated by a tag.  */
99enum LTO_symtab_tags
100{
101  /* Must leave 0 for the stopper.  */
102
103  /* Cgraph node without body available.  */
104  LTO_symtab_unavail_node = 1,
105  /* Cgraph node with function body.  */
106  LTO_symtab_analyzed_node,
107  /* Cgraph edges.  */
108  LTO_symtab_edge,
109  LTO_symtab_indirect_edge,
110  LTO_symtab_variable,
111  LTO_symtab_last_tag
112};
113
114/* Create a new symtab encoder.
115   if FOR_INPUT, the encoder allocate only datastructures needed
116   to read the symtab.  */
117
118lto_symtab_encoder_t
119lto_symtab_encoder_new (bool for_input)
120{
121  lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
122
123  if (!for_input)
124    encoder->map = new hash_map<symtab_node *, size_t>;
125  encoder->nodes.create (0);
126  return encoder;
127}
128
129
130/* Delete ENCODER and its components.  */
131
132void
133lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
134{
135   encoder->nodes.release ();
136   if (encoder->map)
137     delete encoder->map;
138   free (encoder);
139}
140
141
142/* Return the existing reference number of NODE in the symtab encoder in
143   output block OB.  Assign a new reference if this is the first time
144   NODE is encoded.  */
145
146int
147lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
148			   symtab_node *node)
149{
150  int ref;
151
152  if (!encoder->map)
153    {
154      lto_encoder_entry entry = {node, false, false, false};
155
156      ref = encoder->nodes.length ();
157      encoder->nodes.safe_push (entry);
158      return ref;
159    }
160
161  size_t *slot = encoder->map->get (node);
162  if (!slot || !*slot)
163    {
164      lto_encoder_entry entry = {node, false, false, false};
165      ref = encoder->nodes.length ();
166      if (!slot)
167        encoder->map->put (node, ref + 1);
168      encoder->nodes.safe_push (entry);
169    }
170  else
171    ref = *slot - 1;
172
173  return ref;
174}
175
176/* Remove NODE from encoder.  */
177
178bool
179lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
180			        symtab_node *node)
181{
182  int index;
183  lto_encoder_entry last_node;
184
185  size_t *slot = encoder->map->get (node);
186  if (slot == NULL || !*slot)
187    return false;
188
189  index = *slot - 1;
190  gcc_checking_assert (encoder->nodes[index].node == node);
191
192  /* Remove from vector. We do this by swapping node with the last element
193     of the vector.  */
194  last_node = encoder->nodes.pop ();
195  if (last_node.node != node)
196    {
197      gcc_assert (encoder->map->put (last_node.node, index + 1));
198
199      /* Move the last element to the original spot of NODE.  */
200      encoder->nodes[index] = last_node;
201    }
202
203  /* Remove element from hash table.  */
204  encoder->map->remove (node);
205  return true;
206}
207
208
209/* Return TRUE if we should encode the body of NODE (if any).  */
210
211bool
212lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
213				  struct cgraph_node *node)
214{
215  int index = lto_symtab_encoder_lookup (encoder, node);
216  return encoder->nodes[index].body;
217}
218
219/* Specify that we encode the body of NODE in this partition.  */
220
221static void
222lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
223				    struct cgraph_node *node)
224{
225  int index = lto_symtab_encoder_encode (encoder, node);
226  gcc_checking_assert (encoder->nodes[index].node == node);
227  encoder->nodes[index].body = true;
228}
229
230/* Return TRUE if we should encode initializer of NODE (if any).  */
231
232bool
233lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
234					 varpool_node *node)
235{
236  int index = lto_symtab_encoder_lookup (encoder, node);
237  if (index == LCC_NOT_FOUND)
238    return false;
239  return encoder->nodes[index].initializer;
240}
241
242/* Specify that we should encode initializer of NODE (if any).  */
243
244static void
245lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
246					   varpool_node *node)
247{
248  int index = lto_symtab_encoder_lookup (encoder, node);
249  encoder->nodes[index].initializer = true;
250}
251
252/* Return TRUE if NODE is in this partition.  */
253
254bool
255lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
256				   symtab_node *node)
257{
258  int index = lto_symtab_encoder_lookup (encoder, node);
259  if (index == LCC_NOT_FOUND)
260    return false;
261  return encoder->nodes[index].in_partition;
262}
263
264/* Specify that NODE is in this partition.  */
265
266void
267lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
268				     symtab_node *node)
269{
270  int index = lto_symtab_encoder_encode (encoder, node);
271  encoder->nodes[index].in_partition = true;
272}
273
274/* Output the cgraph EDGE to OB using ENCODER.  */
275
276static void
277lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
278		 lto_symtab_encoder_t encoder)
279{
280  unsigned int uid;
281  intptr_t ref;
282  struct bitpack_d bp;
283
284  if (edge->indirect_unknown_callee)
285    streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
286			 LTO_symtab_indirect_edge);
287  else
288    streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
289			 LTO_symtab_edge);
290
291  ref = lto_symtab_encoder_lookup (encoder, edge->caller);
292  gcc_assert (ref != LCC_NOT_FOUND);
293  streamer_write_hwi_stream (ob->main_stream, ref);
294
295  if (!edge->indirect_unknown_callee)
296    {
297      ref = lto_symtab_encoder_lookup (encoder, edge->callee);
298      gcc_assert (ref != LCC_NOT_FOUND);
299      streamer_write_hwi_stream (ob->main_stream, ref);
300    }
301
302  streamer_write_gcov_count_stream (ob->main_stream, edge->count);
303
304  bp = bitpack_create (ob->main_stream);
305  uid = (!gimple_has_body_p (edge->caller->decl)
306	 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
307  bp_pack_enum (&bp, cgraph_inline_failed_t,
308	        CIF_N_REASONS, edge->inline_failed);
309  bp_pack_var_len_unsigned (&bp, uid);
310  bp_pack_var_len_unsigned (&bp, edge->frequency);
311  bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
312  bp_pack_value (&bp, edge->speculative, 1);
313  bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
314  bp_pack_value (&bp, edge->can_throw_external, 1);
315  bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
316  if (edge->indirect_unknown_callee)
317    {
318      int flags = edge->indirect_info->ecf_flags;
319      bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
320      bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
321      bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
322      bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
323      bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
324      bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
325      /* Flags that should not appear on indirect calls.  */
326      gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
327			     | ECF_MAY_BE_ALLOCA
328			     | ECF_SIBCALL
329			     | ECF_LEAF
330			     | ECF_NOVOPS)));
331    }
332  streamer_write_bitpack (&bp);
333  if (edge->indirect_unknown_callee)
334    {
335      streamer_write_hwi_stream (ob->main_stream,
336			         edge->indirect_info->common_target_id);
337      if (edge->indirect_info->common_target_id)
338	streamer_write_hwi_stream
339	   (ob->main_stream, edge->indirect_info->common_target_probability);
340    }
341}
342
343/* Return if NODE contain references from other partitions.  */
344
345bool
346referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
347{
348  int i;
349  struct ipa_ref *ref = NULL;
350
351  for (i = 0; node->iterate_referring (i, ref); i++)
352    {
353      /* Ignore references from non-offloadable nodes while streaming NODE into
354	 offload LTO section.  */
355      if (!ref->referring->need_lto_streaming)
356	continue;
357
358      if (ref->referring->in_other_partition
359          || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
360	return true;
361    }
362  return false;
363}
364
365/* Return true when node is reachable from other partition.  */
366
367bool
368reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
369{
370  struct cgraph_edge *e;
371  if (!node->definition)
372    return false;
373  if (node->global.inlined_to)
374    return false;
375  for (e = node->callers; e; e = e->next_caller)
376    {
377      /* Ignore references from non-offloadable nodes while streaming NODE into
378	 offload LTO section.  */
379      if (!e->caller->need_lto_streaming)
380	continue;
381
382      if (e->caller->in_other_partition
383	  || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
384	return true;
385    }
386  return false;
387}
388
389/* Return if NODE contain references from other partitions.  */
390
391bool
392referenced_from_this_partition_p (symtab_node *node,
393				  lto_symtab_encoder_t encoder)
394{
395  int i;
396  struct ipa_ref *ref = NULL;
397
398  for (i = 0; node->iterate_referring (i, ref); i++)
399    if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
400      return true;
401  return false;
402}
403
404/* Return true when node is reachable from other partition.  */
405
406bool
407reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
408{
409  struct cgraph_edge *e;
410  for (e = node->callers; e; e = e->next_caller)
411    if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
412      return true;
413  return false;
414}
415
416/* Output the cgraph NODE to OB.  ENCODER is used to find the
417   reference number of NODE->inlined_to.  SET is the set of nodes we
418   are writing to the current file.  If NODE is not in SET, then NODE
419   is a boundary of a cgraph_node_set and we pretend NODE just has a
420   decl and no callees.  WRITTEN_DECLS is the set of FUNCTION_DECLs
421   that have had their callgraph node written so far.  This is used to
422   determine if NODE is a clone of a previously written node.  */
423
424static void
425lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
426		 lto_symtab_encoder_t encoder)
427{
428  unsigned int tag;
429  struct bitpack_d bp;
430  bool boundary_p;
431  intptr_t ref;
432  bool in_other_partition = false;
433  struct cgraph_node *clone_of, *ultimate_clone_of;
434  ipa_opt_pass_d *pass;
435  int i;
436  const char *comdat;
437  const char *section;
438  tree group;
439
440  boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
441
442  if (node->analyzed && (!boundary_p || node->alias || node->thunk.thunk_p))
443    tag = LTO_symtab_analyzed_node;
444  else
445    tag = LTO_symtab_unavail_node;
446
447  streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
448		       tag);
449  streamer_write_hwi_stream (ob->main_stream, node->order);
450
451  /* In WPA mode, we only output part of the call-graph.  Also, we
452     fake cgraph node attributes.  There are two cases that we care.
453
454     Boundary nodes: There are nodes that are not part of SET but are
455     called from within SET.  We artificially make them look like
456     externally visible nodes with no function body.
457
458     Cherry-picked nodes:  These are nodes we pulled from other
459     translation units into SET during IPA-inlining.  We make them as
460     local static nodes to prevent clashes with other local statics.  */
461  if (boundary_p && node->analyzed
462      && node->get_partitioning_class () == SYMBOL_PARTITION)
463    {
464      /* Inline clones can not be part of boundary.
465         gcc_assert (!node->global.inlined_to);
466
467	 FIXME: At the moment they can be, when partition contains an inline
468	 clone that is clone of inline clone from outside partition.  We can
469	 reshape the clone tree and make other tree to be the root, but it
470	 needs a bit extra work and will be promplty done by cgraph_remove_node
471	 after reading back.  */
472      in_other_partition = 1;
473    }
474
475  clone_of = node->clone_of;
476  while (clone_of
477	 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
478    if (clone_of->prev_sibling_clone)
479      clone_of = clone_of->prev_sibling_clone;
480    else
481      clone_of = clone_of->clone_of;
482
483  /* See if body of the master function is output.  If not, we are seeing only
484     an declaration and we do not need to pass down clone tree. */
485  ultimate_clone_of = clone_of;
486  while (ultimate_clone_of && ultimate_clone_of->clone_of)
487    ultimate_clone_of = ultimate_clone_of->clone_of;
488
489  if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
490    clone_of = NULL;
491
492  if (tag == LTO_symtab_analyzed_node)
493    gcc_assert (clone_of || !node->clone_of);
494  if (!clone_of)
495    streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
496  else
497    streamer_write_hwi_stream (ob->main_stream, ref);
498
499
500  lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
501  streamer_write_gcov_count_stream (ob->main_stream, node->count);
502  streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
503
504  streamer_write_hwi_stream (ob->main_stream,
505			     node->ipa_transforms_to_apply.length ());
506  FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
507    streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
508
509  if (tag == LTO_symtab_analyzed_node)
510    {
511      if (node->global.inlined_to)
512	{
513	  ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
514	  gcc_assert (ref != LCC_NOT_FOUND);
515	}
516      else
517	ref = LCC_NOT_FOUND;
518
519      streamer_write_hwi_stream (ob->main_stream, ref);
520    }
521
522  group = node->get_comdat_group ();
523  if (group)
524    comdat = IDENTIFIER_POINTER (group);
525  else
526    comdat = "";
527  streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
528
529  if (group)
530    {
531      if (node->same_comdat_group && !boundary_p)
532	{
533	  ref = lto_symtab_encoder_lookup (encoder,
534					   node->same_comdat_group);
535	  gcc_assert (ref != LCC_NOT_FOUND);
536	}
537      else
538	ref = LCC_NOT_FOUND;
539      streamer_write_hwi_stream (ob->main_stream, ref);
540    }
541
542  section = node->get_section ();
543  if (!section)
544    section = "";
545
546  streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
547
548  bp = bitpack_create (ob->main_stream);
549  bp_pack_value (&bp, node->local.local, 1);
550  bp_pack_value (&bp, node->externally_visible, 1);
551  bp_pack_value (&bp, node->no_reorder, 1);
552  bp_pack_value (&bp, node->definition, 1);
553  bp_pack_value (&bp, node->local.versionable, 1);
554  bp_pack_value (&bp, node->local.can_change_signature, 1);
555  bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
556  bp_pack_value (&bp, node->force_output, 1);
557  bp_pack_value (&bp, node->forced_by_abi, 1);
558  bp_pack_value (&bp, node->unique_name, 1);
559  bp_pack_value (&bp, node->body_removed, 1);
560  bp_pack_value (&bp, node->implicit_section, 1);
561  bp_pack_value (&bp, node->address_taken, 1);
562  bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
563		 && node->get_partitioning_class () == SYMBOL_PARTITION
564		 && (reachable_from_other_partition_p (node, encoder)
565		     || referenced_from_other_partition_p (node, encoder)), 1);
566  bp_pack_value (&bp, node->lowered, 1);
567  bp_pack_value (&bp, in_other_partition, 1);
568  bp_pack_value (&bp, node->alias, 1);
569  bp_pack_value (&bp, node->weakref, 1);
570  bp_pack_value (&bp, node->frequency, 2);
571  bp_pack_value (&bp, node->only_called_at_startup, 1);
572  bp_pack_value (&bp, node->only_called_at_exit, 1);
573  bp_pack_value (&bp, node->tm_clone, 1);
574  bp_pack_value (&bp, node->calls_comdat_local, 1);
575  bp_pack_value (&bp, node->icf_merged, 1);
576  bp_pack_value (&bp, node->nonfreeing_fn, 1);
577  bp_pack_value (&bp, node->thunk.thunk_p, 1);
578  bp_pack_value (&bp, node->parallelized_function, 1);
579  bp_pack_enum (&bp, ld_plugin_symbol_resolution,
580	        LDPR_NUM_KNOWN, node->resolution);
581  bp_pack_value (&bp, node->instrumentation_clone, 1);
582  bp_pack_value (&bp, node->split_part, 1);
583  streamer_write_bitpack (&bp);
584  streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
585
586  if (node->thunk.thunk_p)
587    {
588      streamer_write_uhwi_stream
589	 (ob->main_stream,
590	  1 + (node->thunk.this_adjusting != 0) * 2
591	  + (node->thunk.virtual_offset_p != 0) * 4
592	  + (node->thunk.add_pointer_bounds_args != 0) * 8);
593      streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
594      streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
595    }
596  streamer_write_hwi_stream (ob->main_stream, node->profile_id);
597  if (DECL_STATIC_CONSTRUCTOR (node->decl))
598    streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
599  if (DECL_STATIC_DESTRUCTOR (node->decl))
600    streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
601
602  if (node->instrumentation_clone)
603    lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->orig_decl);
604}
605
606/* Output the varpool NODE to OB.
607   If NODE is not in SET, then NODE is a boundary.  */
608
609static void
610lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
611			 lto_symtab_encoder_t encoder)
612{
613  bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
614  bool encode_initializer_p
615	 = (node->definition
616	    && lto_symtab_encoder_encode_initializer_p (encoder, node));
617  struct bitpack_d bp;
618  int ref;
619  const char *comdat;
620  const char *section;
621  tree group;
622
623  gcc_assert (!encode_initializer_p || node->definition);
624  gcc_assert (boundary_p || encode_initializer_p);
625
626  streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
627		       LTO_symtab_variable);
628  streamer_write_hwi_stream (ob->main_stream, node->order);
629  lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
630  bp = bitpack_create (ob->main_stream);
631  bp_pack_value (&bp, node->externally_visible, 1);
632  bp_pack_value (&bp, node->no_reorder, 1);
633  bp_pack_value (&bp, node->force_output, 1);
634  bp_pack_value (&bp, node->forced_by_abi, 1);
635  bp_pack_value (&bp, node->unique_name, 1);
636  bp_pack_value (&bp,
637		 node->body_removed
638		 || (!encode_initializer_p && !node->alias && node->definition),
639		 1);
640  bp_pack_value (&bp, node->implicit_section, 1);
641  bp_pack_value (&bp, node->writeonly, 1);
642  bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
643		 1);
644  bp_pack_value (&bp, node->alias, 1);
645  bp_pack_value (&bp, node->weakref, 1);
646  bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
647  gcc_assert (node->definition || !node->analyzed);
648  /* Constant pool initializers can be de-unified into individual ltrans units.
649     FIXME: Alternatively at -Os we may want to avoid generating for them the local
650     labels and share them across LTRANS partitions.  */
651  if (node->get_partitioning_class () != SYMBOL_PARTITION)
652    {
653      bp_pack_value (&bp, 0, 1);  /* used_from_other_parition.  */
654      bp_pack_value (&bp, 0, 1);  /* in_other_partition.  */
655    }
656  else
657    {
658      bp_pack_value (&bp, node->definition
659		     && referenced_from_other_partition_p (node, encoder), 1);
660      bp_pack_value (&bp, node->analyzed
661		     && boundary_p && !DECL_EXTERNAL (node->decl), 1);
662	  /* in_other_partition.  */
663    }
664  bp_pack_value (&bp, node->tls_model, 3);
665  bp_pack_value (&bp, node->used_by_single_function, 1);
666  bp_pack_value (&bp, node->need_bounds_init, 1);
667  streamer_write_bitpack (&bp);
668
669  group = node->get_comdat_group ();
670  if (group)
671    comdat = IDENTIFIER_POINTER (group);
672  else
673    comdat = "";
674  streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
675
676  if (group)
677    {
678      if (node->same_comdat_group && !boundary_p)
679	{
680	  ref = lto_symtab_encoder_lookup (encoder,
681					   node->same_comdat_group);
682	  gcc_assert (ref != LCC_NOT_FOUND);
683	}
684      else
685	ref = LCC_NOT_FOUND;
686      streamer_write_hwi_stream (ob->main_stream, ref);
687    }
688
689  section = node->get_section ();
690  if (!section)
691    section = "";
692  streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
693
694  streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
695		       LDPR_NUM_KNOWN, node->resolution);
696}
697
698/* Output the varpool NODE to OB.
699   If NODE is not in SET, then NODE is a boundary.  */
700
701static void
702lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
703		lto_symtab_encoder_t encoder)
704{
705  struct bitpack_d bp;
706  int nref;
707  int uid = ref->lto_stmt_uid;
708  struct cgraph_node *node;
709
710  bp = bitpack_create (ob->main_stream);
711  bp_pack_value (&bp, ref->use, 3);
712  bp_pack_value (&bp, ref->speculative, 1);
713  streamer_write_bitpack (&bp);
714  nref = lto_symtab_encoder_lookup (encoder, ref->referred);
715  gcc_assert (nref != LCC_NOT_FOUND);
716  streamer_write_hwi_stream (ob->main_stream, nref);
717
718  node = dyn_cast <cgraph_node *> (ref->referring);
719  if (node)
720    {
721      if (ref->stmt)
722	uid = gimple_uid (ref->stmt) + 1;
723      streamer_write_hwi_stream (ob->main_stream, uid);
724    }
725}
726
727/* Stream out profile_summary to OB.  */
728
729static void
730output_profile_summary (struct lto_simple_output_block *ob)
731{
732  unsigned h_ix;
733  struct bitpack_d bp;
734
735  if (profile_info)
736    {
737      /* We do not output num and run_max, they are not used by
738         GCC profile feedback and they are difficult to merge from multiple
739         units.  */
740      gcc_assert (profile_info->runs);
741      streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
742      streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
743
744      /* sum_all is needed for computing the working set with the
745         histogram.  */
746      streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
747
748      /* Create and output a bitpack of non-zero histogram entries indices.  */
749      bp = bitpack_create (ob->main_stream);
750      for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
751        bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
752      streamer_write_bitpack (&bp);
753      /* Now stream out only those non-zero entries.  */
754      for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
755        {
756          if (!profile_info->histogram[h_ix].num_counters)
757            continue;
758          streamer_write_gcov_count_stream (ob->main_stream,
759                                      profile_info->histogram[h_ix].num_counters);
760          streamer_write_gcov_count_stream (ob->main_stream,
761                                      profile_info->histogram[h_ix].min_value);
762          streamer_write_gcov_count_stream (ob->main_stream,
763                                      profile_info->histogram[h_ix].cum_value);
764         }
765      /* IPA-profile computes hot bb threshold based on cumulated
766	 whole program profile.  We need to stream it down to ltrans.  */
767       if (flag_wpa)
768         streamer_write_gcov_count_stream (ob->main_stream,
769					   get_hot_bb_threshold ());
770    }
771  else
772    streamer_write_uhwi_stream (ob->main_stream, 0);
773}
774
775/* Output all callees or indirect outgoing edges.  EDGE must be the first such
776   edge.  */
777
778static void
779output_outgoing_cgraph_edges (struct cgraph_edge *edge,
780			      struct lto_simple_output_block *ob,
781			      lto_symtab_encoder_t encoder)
782{
783  if (!edge)
784    return;
785
786  /* Output edges in backward direction, so the reconstructed callgraph match
787     and it is easy to associate call sites in the IPA pass summaries.  */
788  while (edge->next_callee)
789    edge = edge->next_callee;
790  for (; edge; edge = edge->prev_callee)
791    lto_output_edge (ob, edge, encoder);
792}
793
794/* Output the part of the cgraph in SET.  */
795
796static void
797output_refs (lto_symtab_encoder_t encoder)
798{
799  struct lto_simple_output_block *ob;
800  int count;
801  struct ipa_ref *ref;
802
803  ob = lto_create_simple_output_block (LTO_section_refs);
804
805  for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
806    {
807      symtab_node *node = lto_symtab_encoder_deref (encoder, i);
808
809      /* IPA_REF_ALIAS and IPA_REF_CHKP references are always preserved
810	 in the boundary.  Alias node can't have other references and
811	 can be always handled as if it's not in the boundary.  */
812      if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
813	{
814	  cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
815	  /* Output IPA_REF_CHKP reference.  */
816	  if (cnode
817	      && cnode->instrumented_version
818	      && !cnode->instrumentation_clone)
819	    {
820	      for (int i = 0; node->iterate_reference (i, ref); i++)
821		if (ref->use == IPA_REF_CHKP)
822		  {
823		    if (lto_symtab_encoder_lookup (encoder, ref->referred)
824			!= LCC_NOT_FOUND)
825		      {
826			int nref = lto_symtab_encoder_lookup (encoder, node);
827			streamer_write_gcov_count_stream (ob->main_stream, 1);
828			streamer_write_uhwi_stream (ob->main_stream, nref);
829			lto_output_ref (ob, ref, encoder);
830		      }
831		    break;
832		  }
833	    }
834	  continue;
835	}
836
837      count = node->ref_list.nreferences ();
838      if (count)
839	{
840	  streamer_write_gcov_count_stream (ob->main_stream, count);
841	  streamer_write_uhwi_stream (ob->main_stream,
842				     lto_symtab_encoder_lookup (encoder, node));
843	  for (int i = 0; node->iterate_reference (i, ref); i++)
844	    lto_output_ref (ob, ref, encoder);
845	}
846    }
847
848  streamer_write_uhwi_stream (ob->main_stream, 0);
849
850  lto_destroy_simple_output_block (ob);
851}
852
853/* Add NODE into encoder as well as nodes it is cloned from.
854   Do it in a way so clones appear first.  */
855
856static void
857add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
858	     bool include_body)
859{
860  if (node->clone_of)
861    add_node_to (encoder, node->clone_of, include_body);
862  else if (include_body)
863    lto_set_symtab_encoder_encode_body (encoder, node);
864  lto_symtab_encoder_encode (encoder, node);
865}
866
867/* Add all references in NODE to encoders.  */
868
869static void
870create_references (lto_symtab_encoder_t encoder, symtab_node *node)
871{
872  int i;
873  struct ipa_ref *ref = NULL;
874  for (i = 0; node->iterate_reference (i, ref); i++)
875    if (is_a <cgraph_node *> (ref->referred))
876      add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
877    else
878      lto_symtab_encoder_encode (encoder, ref->referred);
879}
880
881/* Select what needs to be streamed out.  In regular lto mode stream everything.
882   In offload lto mode stream only nodes marked as offloadable.  */
883void
884select_what_to_stream (void)
885{
886  struct symtab_node *snode;
887  FOR_EACH_SYMBOL (snode)
888    snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
889}
890
891/* Find all symbols we want to stream into given partition and insert them
892   to encoders.
893
894   The function actually replaces IN_ENCODER by new one.  The reason is that
895   streaming code needs clone's origin to be streamed before clone.  This
896   means that we need to insert the nodes in specific order.  This order is
897   ignored by the partitioning logic earlier.  */
898
899lto_symtab_encoder_t
900compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
901{
902  struct cgraph_edge *edge;
903  int i;
904  lto_symtab_encoder_t encoder;
905  lto_symtab_encoder_iterator lsei;
906  hash_set<void *> reachable_call_targets;
907
908  encoder = lto_symtab_encoder_new (false);
909
910  /* Go over all entries in the IN_ENCODER and duplicate them to
911     ENCODER. At the same time insert masters of clones so
912     every master appears before clone.  */
913  for (lsei = lsei_start_function_in_partition (in_encoder);
914       !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
915    {
916      struct cgraph_node *node = lsei_cgraph_node (lsei);
917      if (!node->need_lto_streaming)
918	continue;
919      add_node_to (encoder, node, true);
920      lto_set_symtab_encoder_in_partition (encoder, node);
921      create_references (encoder, node);
922    }
923  for (lsei = lsei_start_variable_in_partition (in_encoder);
924       !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
925    {
926      varpool_node *vnode = lsei_varpool_node (lsei);
927
928      if (!vnode->need_lto_streaming)
929	continue;
930      lto_set_symtab_encoder_in_partition (encoder, vnode);
931      lto_set_symtab_encoder_encode_initializer (encoder, vnode);
932      create_references (encoder, vnode);
933    }
934  /* Pickle in also the initializer of all referenced readonly variables
935     to help folding.  Constant pool variables are not shared, so we must
936     pickle those too.  */
937  for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
938    {
939      symtab_node *node = lto_symtab_encoder_deref (encoder, i);
940      if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
941	{
942	  if (!lto_symtab_encoder_encode_initializer_p (encoder,
943							vnode)
944	      && (((vnode->ctor_useable_for_folding_p ()
945		   && (!DECL_VIRTUAL_P (vnode->decl)
946		       || !flag_wpa
947		       || flag_ltrans_devirtualize))
948		  || POINTER_BOUNDS_P (vnode->decl))))
949	    {
950	      lto_set_symtab_encoder_encode_initializer (encoder, vnode);
951	      create_references (encoder, vnode);
952	    }
953       }
954    }
955
956  /* Go over all the nodes again to include callees that are not in
957     SET.  */
958  for (lsei = lsei_start_function_in_partition (encoder);
959       !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
960    {
961      struct cgraph_node *node = lsei_cgraph_node (lsei);
962      for (edge = node->callees; edge; edge = edge->next_callee)
963	{
964	  struct cgraph_node *callee = edge->callee;
965	  if (!lto_symtab_encoder_in_partition_p (encoder, callee))
966	    {
967	      /* We should have moved all the inlines.  */
968	      gcc_assert (!callee->global.inlined_to);
969	      add_node_to (encoder, callee, false);
970	    }
971	}
972      /* Add all possible targets for late devirtualization.  */
973      if (flag_ltrans_devirtualize || !flag_wpa)
974	for (edge = node->indirect_calls; edge; edge = edge->next_callee)
975	  if (edge->indirect_info->polymorphic)
976	    {
977	      unsigned int i;
978	      void *cache_token;
979	      bool final;
980	      vec <cgraph_node *>targets
981		= possible_polymorphic_call_targets
982		    (edge, &final, &cache_token);
983	      if (!reachable_call_targets.add (cache_token))
984		{
985		  for (i = 0; i < targets.length (); i++)
986		    {
987		      struct cgraph_node *callee = targets[i];
988
989		      /* Adding an external declarations into the unit serves
990			 no purpose and just increases its boundary.  */
991		      if (callee->definition
992			  && !lto_symtab_encoder_in_partition_p
993			       (encoder, callee))
994			{
995			  gcc_assert (!callee->global.inlined_to);
996			  add_node_to (encoder, callee, false);
997			}
998		    }
999		}
1000	    }
1001    }
1002  /* Be sure to also insert alias targert and thunk callees.  These needs
1003     to stay to aid local calling conventions.  */
1004  for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1005    {
1006      symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1007      cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1008
1009      if (node->alias && node->analyzed)
1010	create_references (encoder, node);
1011      if (cnode
1012	  && cnode->thunk.thunk_p)
1013	add_node_to (encoder, cnode->callees->callee, false);
1014    }
1015  lto_symtab_encoder_delete (in_encoder);
1016  return encoder;
1017}
1018
1019/* Output the part of the symtab in SET and VSET.  */
1020
1021void
1022output_symtab (void)
1023{
1024  struct cgraph_node *node;
1025  struct lto_simple_output_block *ob;
1026  int i, n_nodes;
1027  lto_symtab_encoder_t encoder;
1028
1029  if (flag_wpa)
1030    output_cgraph_opt_summary ();
1031
1032  ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
1033
1034  output_profile_summary (ob);
1035
1036  /* An encoder for cgraph nodes should have been created by
1037     ipa_write_summaries_1.  */
1038  gcc_assert (ob->decl_state->symtab_node_encoder);
1039  encoder = ob->decl_state->symtab_node_encoder;
1040
1041  /* Write out the nodes.  We must first output a node and then its clones,
1042     otherwise at a time reading back the node there would be nothing to clone
1043     from.  */
1044  n_nodes = lto_symtab_encoder_size (encoder);
1045  for (i = 0; i < n_nodes; i++)
1046    {
1047      symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1048      if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1049        lto_output_node (ob, cnode, encoder);
1050      else
1051	lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1052    }
1053
1054  /* Go over the nodes in SET again to write edges.  */
1055  for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
1056    {
1057      node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
1058      if (node
1059	  && (node->thunk.thunk_p
1060	      || lto_symtab_encoder_in_partition_p (encoder, node)))
1061	{
1062	  output_outgoing_cgraph_edges (node->callees, ob, encoder);
1063	  output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1064	}
1065    }
1066
1067  streamer_write_uhwi_stream (ob->main_stream, 0);
1068
1069  lto_destroy_simple_output_block (ob);
1070
1071  /* Emit toplevel asms.
1072     When doing WPA we must output every asm just once.  Since we do not partition asm
1073     nodes at all, output them to first output.  This is kind of hack, but should work
1074     well.  */
1075  if (!asm_nodes_output)
1076    {
1077      asm_nodes_output = true;
1078      lto_output_toplevel_asms ();
1079    }
1080
1081  output_refs (encoder);
1082}
1083
1084/* Return identifier encoded in IB as a plain string.  */
1085
1086static tree
1087read_identifier (struct lto_input_block *ib)
1088{
1089  unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1090  tree id;
1091
1092  if (ib->data[ib->p + len])
1093    lto_section_overrun (ib);
1094  if (!len)
1095    {
1096      ib->p++;
1097      return NULL;
1098    }
1099  id = get_identifier (ib->data + ib->p);
1100  ib->p += len + 1;
1101  return id;
1102}
1103
1104/* Return string encoded in IB, NULL if string is empty.  */
1105
1106static const char *
1107read_string (struct lto_input_block *ib)
1108{
1109  unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1110  const char *str;
1111
1112  if (ib->data[ib->p + len])
1113    lto_section_overrun (ib);
1114  if (!len)
1115    {
1116      ib->p++;
1117      return NULL;
1118    }
1119  str = ib->data + ib->p;
1120  ib->p += len + 1;
1121  return str;
1122}
1123
1124/* Output function/variable tables that will allow libgomp to look up offload
1125   target code.
1126   OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1127   varpool_node::get_create.  In WHOPR (partitioned) mode during the WPA stage
1128   both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables.  */
1129
1130void
1131output_offload_tables (void)
1132{
1133  if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1134    return;
1135
1136  struct lto_simple_output_block *ob
1137    = lto_create_simple_output_block (LTO_section_offload_table);
1138
1139  for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1140    {
1141      streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1142			   LTO_symtab_last_tag, LTO_symtab_unavail_node);
1143      lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1144				(*offload_funcs)[i]);
1145    }
1146
1147  for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1148    {
1149      streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1150			   LTO_symtab_last_tag, LTO_symtab_variable);
1151      lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1152				 (*offload_vars)[i]);
1153    }
1154
1155  streamer_write_uhwi_stream (ob->main_stream, 0);
1156  lto_destroy_simple_output_block (ob);
1157
1158  /* In WHOPR mode during the WPA stage the joint offload tables need to be
1159     streamed to one partition only.  That's why we free offload_funcs and
1160     offload_vars after the first call of output_offload_tables.  */
1161  if (flag_wpa)
1162    {
1163      vec_free (offload_funcs);
1164      vec_free (offload_vars);
1165    }
1166}
1167
1168/* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1169   STACK_SIZE, SELF_TIME and SELF_SIZE.  This is called either to initialize
1170   NODE or to replace the values in it, for instance because the first
1171   time we saw it, the function body was not available but now it
1172   is.  BP is a bitpack with all the bitflags for NODE read from the
1173   stream.  */
1174
1175static void
1176input_overwrite_node (struct lto_file_decl_data *file_data,
1177		      struct cgraph_node *node,
1178		      enum LTO_symtab_tags tag,
1179		      struct bitpack_d *bp)
1180{
1181  node->aux = (void *) tag;
1182  node->lto_file_data = file_data;
1183
1184  node->local.local = bp_unpack_value (bp, 1);
1185  node->externally_visible = bp_unpack_value (bp, 1);
1186  node->no_reorder = bp_unpack_value (bp, 1);
1187  node->definition = bp_unpack_value (bp, 1);
1188  node->local.versionable = bp_unpack_value (bp, 1);
1189  node->local.can_change_signature = bp_unpack_value (bp, 1);
1190  node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1191  node->force_output = bp_unpack_value (bp, 1);
1192  node->forced_by_abi = bp_unpack_value (bp, 1);
1193  node->unique_name = bp_unpack_value (bp, 1);
1194  node->body_removed = bp_unpack_value (bp, 1);
1195  node->implicit_section = bp_unpack_value (bp, 1);
1196  node->address_taken = bp_unpack_value (bp, 1);
1197  node->used_from_other_partition = bp_unpack_value (bp, 1);
1198  node->lowered = bp_unpack_value (bp, 1);
1199  node->analyzed = tag == LTO_symtab_analyzed_node;
1200  node->in_other_partition = bp_unpack_value (bp, 1);
1201  if (node->in_other_partition
1202      /* Avoid updating decl when we are seeing just inline clone.
1203	 When inlining function that has functions already inlined into it,
1204	 we produce clones of inline clones.
1205
1206	 WPA partitioning might put each clone into different unit and
1207	 we might end up streaming inline clone from other partition
1208	 to support clone we are interested in. */
1209      && (!node->clone_of
1210	  || node->clone_of->decl != node->decl))
1211    {
1212      DECL_EXTERNAL (node->decl) = 1;
1213      TREE_STATIC (node->decl) = 0;
1214    }
1215  node->alias = bp_unpack_value (bp, 1);
1216  node->weakref = bp_unpack_value (bp, 1);
1217  node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1218  node->only_called_at_startup = bp_unpack_value (bp, 1);
1219  node->only_called_at_exit = bp_unpack_value (bp, 1);
1220  node->tm_clone = bp_unpack_value (bp, 1);
1221  node->calls_comdat_local = bp_unpack_value (bp, 1);
1222  node->icf_merged = bp_unpack_value (bp, 1);
1223  node->nonfreeing_fn = bp_unpack_value (bp, 1);
1224  node->thunk.thunk_p = bp_unpack_value (bp, 1);
1225  node->parallelized_function = bp_unpack_value (bp, 1);
1226  node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1227				     LDPR_NUM_KNOWN);
1228  node->instrumentation_clone = bp_unpack_value (bp, 1);
1229  node->split_part = bp_unpack_value (bp, 1);
1230  gcc_assert (flag_ltrans
1231	      || (!node->in_other_partition
1232		  && !node->used_from_other_partition));
1233}
1234
1235/* Return string alias is alias of.  */
1236
1237static tree
1238get_alias_symbol (tree decl)
1239{
1240  tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1241  return get_identifier (TREE_STRING_POINTER
1242			  (TREE_VALUE (TREE_VALUE (alias))));
1243}
1244
1245/* Read a node from input_block IB.  TAG is the node's tag just read.
1246   Return the node read or overwriten.  */
1247
1248static struct cgraph_node *
1249input_node (struct lto_file_decl_data *file_data,
1250	    struct lto_input_block *ib,
1251	    enum LTO_symtab_tags tag,
1252	    vec<symtab_node *> nodes)
1253{
1254  gcc::pass_manager *passes = g->get_passes ();
1255  tree fn_decl;
1256  struct cgraph_node *node;
1257  struct bitpack_d bp;
1258  unsigned decl_index;
1259  int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1260  int clone_ref;
1261  int order;
1262  int i, count;
1263  tree group;
1264  const char *section;
1265  order = streamer_read_hwi (ib) + order_base;
1266  clone_ref = streamer_read_hwi (ib);
1267
1268  decl_index = streamer_read_uhwi (ib);
1269  fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1270
1271  if (clone_ref != LCC_NOT_FOUND)
1272    {
1273      node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1274	0, CGRAPH_FREQ_BASE, false,
1275	vNULL, false, NULL, NULL);
1276    }
1277  else
1278    {
1279      /* Declaration of functions can be already merged with a declaration
1280	 from other input file.  We keep cgraph unmerged until after streaming
1281	 of ipa passes is done.  Alays forcingly create a fresh node.  */
1282      node = symtab->create_empty ();
1283      node->decl = fn_decl;
1284      node->register_symbol ();
1285    }
1286
1287  node->order = order;
1288  if (order >= symtab->order)
1289    symtab->order = order + 1;
1290
1291  node->count = streamer_read_gcov_count (ib);
1292  node->count_materialization_scale = streamer_read_hwi (ib);
1293
1294  count = streamer_read_hwi (ib);
1295  node->ipa_transforms_to_apply = vNULL;
1296  for (i = 0; i < count; i++)
1297    {
1298      opt_pass *pass;
1299      int pid = streamer_read_hwi (ib);
1300
1301      gcc_assert (pid < passes->passes_by_id_size);
1302      pass = passes->passes_by_id[pid];
1303      node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1304    }
1305
1306  if (tag == LTO_symtab_analyzed_node)
1307    ref = streamer_read_hwi (ib);
1308
1309  group = read_identifier (ib);
1310  if (group)
1311    ref2 = streamer_read_hwi (ib);
1312
1313  /* Make sure that we have not read this node before.  Nodes that
1314     have already been read will have their tag stored in the 'aux'
1315     field.  Since built-in functions can be referenced in multiple
1316     functions, they are expected to be read more than once.  */
1317  if (node->aux && !DECL_BUILT_IN (node->decl))
1318    internal_error ("bytecode stream: found multiple instances of cgraph "
1319		    "node with uid %d", node->uid);
1320
1321  node->tp_first_run = streamer_read_uhwi (ib);
1322
1323  bp = streamer_read_bitpack (ib);
1324
1325  input_overwrite_node (file_data, node, tag, &bp);
1326
1327  /* Store a reference for now, and fix up later to be a pointer.  */
1328  node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1329
1330  if (group)
1331    {
1332      node->set_comdat_group (group);
1333      /* Store a reference for now, and fix up later to be a pointer.  */
1334      node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1335    }
1336  else
1337    node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1338  section = read_string (ib);
1339  if (section)
1340    node->set_section_for_node (section);
1341
1342  if (node->thunk.thunk_p)
1343    {
1344      int type = streamer_read_uhwi (ib);
1345      HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1346      HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1347
1348      node->thunk.fixed_offset = fixed_offset;
1349      node->thunk.this_adjusting = (type & 2);
1350      node->thunk.virtual_value = virtual_value;
1351      node->thunk.virtual_offset_p = (type & 4);
1352      node->thunk.add_pointer_bounds_args = (type & 8);
1353    }
1354  if (node->alias && !node->analyzed && node->weakref)
1355    node->alias_target = get_alias_symbol (node->decl);
1356  node->profile_id = streamer_read_hwi (ib);
1357  if (DECL_STATIC_CONSTRUCTOR (node->decl))
1358    node->set_init_priority (streamer_read_hwi (ib));
1359  if (DECL_STATIC_DESTRUCTOR (node->decl))
1360    node->set_fini_priority (streamer_read_hwi (ib));
1361
1362  if (node->instrumentation_clone)
1363    {
1364      decl_index = streamer_read_uhwi (ib);
1365      fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1366      node->orig_decl = fn_decl;
1367    }
1368
1369  return node;
1370}
1371
1372/* Read a node from input_block IB.  TAG is the node's tag just read.
1373   Return the node read or overwriten.  */
1374
1375static varpool_node *
1376input_varpool_node (struct lto_file_decl_data *file_data,
1377		    struct lto_input_block *ib)
1378{
1379  int decl_index;
1380  tree var_decl;
1381  varpool_node *node;
1382  struct bitpack_d bp;
1383  int ref = LCC_NOT_FOUND;
1384  int order;
1385  tree group;
1386  const char *section;
1387
1388  order = streamer_read_hwi (ib) + order_base;
1389  decl_index = streamer_read_uhwi (ib);
1390  var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1391
1392  /* Declaration of functions can be already merged with a declaration
1393     from other input file.  We keep cgraph unmerged until after streaming
1394     of ipa passes is done.  Alays forcingly create a fresh node.  */
1395  node = varpool_node::create_empty ();
1396  node->decl = var_decl;
1397  node->register_symbol ();
1398
1399  node->order = order;
1400  if (order >= symtab->order)
1401    symtab->order = order + 1;
1402  node->lto_file_data = file_data;
1403
1404  bp = streamer_read_bitpack (ib);
1405  node->externally_visible = bp_unpack_value (&bp, 1);
1406  node->no_reorder = bp_unpack_value (&bp, 1);
1407  node->force_output = bp_unpack_value (&bp, 1);
1408  node->forced_by_abi = bp_unpack_value (&bp, 1);
1409  node->unique_name = bp_unpack_value (&bp, 1);
1410  node->body_removed = bp_unpack_value (&bp, 1);
1411  node->implicit_section = bp_unpack_value (&bp, 1);
1412  node->writeonly = bp_unpack_value (&bp, 1);
1413  node->definition = bp_unpack_value (&bp, 1);
1414  node->alias = bp_unpack_value (&bp, 1);
1415  node->weakref = bp_unpack_value (&bp, 1);
1416  node->analyzed = bp_unpack_value (&bp, 1);
1417  node->used_from_other_partition = bp_unpack_value (&bp, 1);
1418  node->in_other_partition = bp_unpack_value (&bp, 1);
1419  if (node->in_other_partition)
1420    {
1421      DECL_EXTERNAL (node->decl) = 1;
1422      TREE_STATIC (node->decl) = 0;
1423    }
1424  if (node->alias && !node->analyzed && node->weakref)
1425    node->alias_target = get_alias_symbol (node->decl);
1426  node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1427  node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1428  node->need_bounds_init = bp_unpack_value (&bp, 1);
1429  group = read_identifier (ib);
1430  if (group)
1431    {
1432      node->set_comdat_group (group);
1433      ref = streamer_read_hwi (ib);
1434      /* Store a reference for now, and fix up later to be a pointer.  */
1435      node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1436    }
1437  else
1438    node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1439  section = read_string (ib);
1440  if (section)
1441    node->set_section_for_node (section);
1442  node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1443					        LDPR_NUM_KNOWN);
1444  gcc_assert (flag_ltrans
1445	      || (!node->in_other_partition
1446		  && !node->used_from_other_partition));
1447
1448  return node;
1449}
1450
1451/* Read a node from input_block IB.  TAG is the node's tag just read.
1452   Return the node read or overwriten.  */
1453
1454static void
1455input_ref (struct lto_input_block *ib,
1456	   symtab_node *referring_node,
1457	   vec<symtab_node *> nodes)
1458{
1459  symtab_node *node = NULL;
1460  struct bitpack_d bp;
1461  enum ipa_ref_use use;
1462  bool speculative;
1463  struct ipa_ref *ref;
1464
1465  bp = streamer_read_bitpack (ib);
1466  use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1467  speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1468  node = nodes[streamer_read_hwi (ib)];
1469  ref = referring_node->create_reference (node, use);
1470  ref->speculative = speculative;
1471  if (is_a <cgraph_node *> (referring_node))
1472    ref->lto_stmt_uid = streamer_read_hwi (ib);
1473}
1474
1475/* Read an edge from IB.  NODES points to a vector of previously read nodes for
1476   decoding caller and callee of the edge to be read.  If INDIRECT is true, the
1477   edge being read is indirect (in the sense that it has
1478   indirect_unknown_callee set).  */
1479
1480static void
1481input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1482	    bool indirect)
1483{
1484  struct cgraph_node *caller, *callee;
1485  struct cgraph_edge *edge;
1486  unsigned int stmt_id;
1487  gcov_type count;
1488  int freq;
1489  cgraph_inline_failed_t inline_failed;
1490  struct bitpack_d bp;
1491  int ecf_flags = 0;
1492
1493  caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1494  if (caller == NULL || caller->decl == NULL_TREE)
1495    internal_error ("bytecode stream: no caller found while reading edge");
1496
1497  if (!indirect)
1498    {
1499      callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1500      if (callee == NULL || callee->decl == NULL_TREE)
1501	internal_error ("bytecode stream: no callee found while reading edge");
1502    }
1503  else
1504    callee = NULL;
1505
1506  count = streamer_read_gcov_count (ib);
1507
1508  bp = streamer_read_bitpack (ib);
1509  inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1510  stmt_id = bp_unpack_var_len_unsigned (&bp);
1511  freq = (int) bp_unpack_var_len_unsigned (&bp);
1512
1513  if (indirect)
1514    edge = caller->create_indirect_edge (NULL, 0, count, freq);
1515  else
1516    edge = caller->create_edge (callee, NULL, count, freq);
1517
1518  edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1519  edge->speculative = bp_unpack_value (&bp, 1);
1520  edge->lto_stmt_uid = stmt_id;
1521  edge->inline_failed = inline_failed;
1522  edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1523  edge->can_throw_external = bp_unpack_value (&bp, 1);
1524  edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1525  if (indirect)
1526    {
1527      if (bp_unpack_value (&bp, 1))
1528	ecf_flags |= ECF_CONST;
1529      if (bp_unpack_value (&bp, 1))
1530	ecf_flags |= ECF_PURE;
1531      if (bp_unpack_value (&bp, 1))
1532	ecf_flags |= ECF_NORETURN;
1533      if (bp_unpack_value (&bp, 1))
1534	ecf_flags |= ECF_MALLOC;
1535      if (bp_unpack_value (&bp, 1))
1536	ecf_flags |= ECF_NOTHROW;
1537      if (bp_unpack_value (&bp, 1))
1538	ecf_flags |= ECF_RETURNS_TWICE;
1539      edge->indirect_info->ecf_flags = ecf_flags;
1540      edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1541      if (edge->indirect_info->common_target_id)
1542        edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1543    }
1544}
1545
1546
1547/* Read a cgraph from IB using the info in FILE_DATA.  */
1548
1549static vec<symtab_node *>
1550input_cgraph_1 (struct lto_file_decl_data *file_data,
1551		struct lto_input_block *ib)
1552{
1553  enum LTO_symtab_tags tag;
1554  vec<symtab_node *> nodes = vNULL;
1555  symtab_node *node;
1556  unsigned i;
1557
1558  tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1559  order_base = symtab->order;
1560  while (tag)
1561    {
1562      if (tag == LTO_symtab_edge)
1563        input_edge (ib, nodes, false);
1564      else if (tag == LTO_symtab_indirect_edge)
1565        input_edge (ib, nodes, true);
1566      else if (tag == LTO_symtab_variable)
1567        {
1568	  node = input_varpool_node (file_data, ib);
1569          nodes.safe_push (node);
1570	  lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1571        }
1572      else
1573	{
1574	  node = input_node (file_data, ib, tag, nodes);
1575	  if (node == NULL || node->decl == NULL_TREE)
1576	    internal_error ("bytecode stream: found empty cgraph node");
1577	  nodes.safe_push (node);
1578	  lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1579	}
1580
1581      tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1582    }
1583
1584  lto_input_toplevel_asms (file_data, order_base);
1585
1586  /* AUX pointers should be all non-zero for function nodes read from the stream.  */
1587#ifdef ENABLE_CHECKING
1588  FOR_EACH_VEC_ELT (nodes, i, node)
1589    gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1590#endif
1591  FOR_EACH_VEC_ELT (nodes, i, node)
1592    {
1593      int ref;
1594      if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1595	{
1596	  ref = (int) (intptr_t) cnode->global.inlined_to;
1597
1598	  /* We share declaration of builtins, so we may read same node twice.  */
1599	  if (!node->aux)
1600	    continue;
1601	  node->aux = NULL;
1602
1603	  /* Fixup inlined_to from reference to pointer.  */
1604	  if (ref != LCC_NOT_FOUND)
1605	    dyn_cast<cgraph_node *> (node)->global.inlined_to
1606	      = dyn_cast<cgraph_node *> (nodes[ref]);
1607	  else
1608	    cnode->global.inlined_to = NULL;
1609
1610	  /* Compute instrumented_version.  */
1611	  if (cnode->instrumentation_clone)
1612	    {
1613	      gcc_assert (cnode->orig_decl);
1614
1615	      cnode->instrumented_version = cgraph_node::get (cnode->orig_decl);
1616	      if (cnode->instrumented_version)
1617		{
1618		  /* We may have multiple nodes for a single function which
1619		     will be merged later.  To have a proper merge we need
1620		     to keep instrumentation_version reference between nodes
1621		     consistent: each instrumented_version reference should
1622		     have proper reverse reference.  Thus don't break existing
1623		     instrumented_version reference if it already exists.  */
1624		  if (cnode->instrumented_version->instrumented_version)
1625		    cnode->instrumented_version = NULL;
1626		  else
1627		    cnode->instrumented_version->instrumented_version = cnode;
1628		}
1629
1630	      /* Restore decl names reference except for wrapper functions.  */
1631	      if (!chkp_wrap_function (cnode->orig_decl))
1632		{
1633		  tree name = DECL_ASSEMBLER_NAME (cnode->decl);
1634		  IDENTIFIER_TRANSPARENT_ALIAS (name) = 1;
1635		  TREE_CHAIN (name) = DECL_ASSEMBLER_NAME (cnode->orig_decl);
1636		}
1637	    }
1638	}
1639
1640      ref = (int) (intptr_t) node->same_comdat_group;
1641
1642      /* Fixup same_comdat_group from reference to pointer.  */
1643      if (ref != LCC_NOT_FOUND)
1644	node->same_comdat_group = nodes[ref];
1645      else
1646	node->same_comdat_group = NULL;
1647    }
1648  FOR_EACH_VEC_ELT (nodes, i, node)
1649    node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1650  return nodes;
1651}
1652
1653/* Input ipa_refs.  */
1654
1655static void
1656input_refs (struct lto_input_block *ib,
1657	    vec<symtab_node *> nodes)
1658{
1659  int count;
1660  int idx;
1661  while (true)
1662    {
1663      symtab_node *node;
1664      count = streamer_read_uhwi (ib);
1665      if (!count)
1666	break;
1667      idx = streamer_read_uhwi (ib);
1668      node = nodes[idx];
1669      while (count)
1670	{
1671	  input_ref (ib, node, nodes);
1672	  count--;
1673	}
1674    }
1675}
1676
1677
1678static struct gcov_ctr_summary lto_gcov_summary;
1679
1680/* Input profile_info from IB.  */
1681static void
1682input_profile_summary (struct lto_input_block *ib,
1683		       struct lto_file_decl_data *file_data)
1684{
1685  unsigned h_ix;
1686  struct bitpack_d bp;
1687  unsigned int runs = streamer_read_uhwi (ib);
1688  if (runs)
1689    {
1690      file_data->profile_info.runs = runs;
1691      file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1692      file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1693
1694      memset (file_data->profile_info.histogram, 0,
1695              sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1696      /* Input the bitpack of non-zero histogram indices.  */
1697      bp = streamer_read_bitpack (ib);
1698      /* Read in and unpack the full bitpack, flagging non-zero
1699         histogram entries by setting the num_counters non-zero.  */
1700      for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1701        {
1702          file_data->profile_info.histogram[h_ix].num_counters
1703              = bp_unpack_value (&bp, 1);
1704        }
1705      for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1706        {
1707          if (!file_data->profile_info.histogram[h_ix].num_counters)
1708            continue;
1709
1710          file_data->profile_info.histogram[h_ix].num_counters
1711              = streamer_read_gcov_count (ib);
1712          file_data->profile_info.histogram[h_ix].min_value
1713              = streamer_read_gcov_count (ib);
1714          file_data->profile_info.histogram[h_ix].cum_value
1715              = streamer_read_gcov_count (ib);
1716        }
1717      /* IPA-profile computes hot bb threshold based on cumulated
1718	 whole program profile.  We need to stream it down to ltrans.  */
1719      if (flag_ltrans)
1720	set_hot_bb_threshold (streamer_read_gcov_count (ib));
1721    }
1722
1723}
1724
1725/* Rescale profile summaries to the same number of runs in the whole unit.  */
1726
1727static void
1728merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1729{
1730  struct lto_file_decl_data *file_data;
1731  unsigned int j, h_ix;
1732  gcov_unsigned_t max_runs = 0;
1733  struct cgraph_node *node;
1734  struct cgraph_edge *edge;
1735  gcov_type saved_sum_all = 0;
1736  gcov_ctr_summary *saved_profile_info = 0;
1737  int saved_scale = 0;
1738
1739  /* Find unit with maximal number of runs.  If we ever get serious about
1740     roundoff errors, we might also consider computing smallest common
1741     multiply.  */
1742  for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1743    if (max_runs < file_data->profile_info.runs)
1744      max_runs = file_data->profile_info.runs;
1745
1746  if (!max_runs)
1747    return;
1748
1749  /* Simple overflow check.  We probably don't need to support that many train
1750     runs. Such a large value probably imply data corruption anyway.  */
1751  if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1752    {
1753      sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1754	     INT_MAX / REG_BR_PROB_BASE);
1755      return;
1756    }
1757
1758  profile_info = &lto_gcov_summary;
1759  lto_gcov_summary.runs = max_runs;
1760  lto_gcov_summary.sum_max = 0;
1761  memset (lto_gcov_summary.histogram, 0,
1762          sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1763
1764  /* Rescale all units to the maximal number of runs.
1765     sum_max can not be easily merged, as we have no idea what files come from
1766     the same run.  We do not use the info anyway, so leave it 0.  */
1767  for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1768    if (file_data->profile_info.runs)
1769      {
1770	int scale = GCOV_COMPUTE_SCALE (max_runs,
1771                                        file_data->profile_info.runs);
1772	lto_gcov_summary.sum_max
1773            = MAX (lto_gcov_summary.sum_max,
1774                   apply_scale (file_data->profile_info.sum_max, scale));
1775	lto_gcov_summary.sum_all
1776            = MAX (lto_gcov_summary.sum_all,
1777                   apply_scale (file_data->profile_info.sum_all, scale));
1778        /* Save a pointer to the profile_info with the largest
1779           scaled sum_all and the scale for use in merging the
1780           histogram.  */
1781        if (!saved_profile_info
1782            || lto_gcov_summary.sum_all > saved_sum_all)
1783          {
1784            saved_profile_info = &file_data->profile_info;
1785            saved_sum_all = lto_gcov_summary.sum_all;
1786            saved_scale = scale;
1787          }
1788      }
1789
1790  gcc_assert (saved_profile_info);
1791
1792  /* Scale up the histogram from the profile that had the largest
1793     scaled sum_all above.  */
1794  for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1795    {
1796      /* Scale up the min value as we did the corresponding sum_all
1797         above. Use that to find the new histogram index.  */
1798      gcov_type scaled_min
1799          = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1800                         saved_scale);
1801      /* The new index may be shared with another scaled histogram entry,
1802         so we need to account for a non-zero histogram entry at new_ix.  */
1803      unsigned new_ix = gcov_histo_index (scaled_min);
1804      lto_gcov_summary.histogram[new_ix].min_value
1805          = (lto_gcov_summary.histogram[new_ix].num_counters
1806             ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1807             : scaled_min);
1808      /* Some of the scaled counter values would ostensibly need to be placed
1809         into different (larger) histogram buckets, but we keep things simple
1810         here and place the scaled cumulative counter value in the bucket
1811         corresponding to the scaled minimum counter value.  */
1812      lto_gcov_summary.histogram[new_ix].cum_value
1813          += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1814                          saved_scale);
1815      lto_gcov_summary.histogram[new_ix].num_counters
1816          += saved_profile_info->histogram[h_ix].num_counters;
1817    }
1818
1819  /* Watch roundoff errors.  */
1820  if (lto_gcov_summary.sum_max < max_runs)
1821    lto_gcov_summary.sum_max = max_runs;
1822
1823  /* If merging already happent at WPA time, we are done.  */
1824  if (flag_ltrans)
1825    return;
1826
1827  /* Now compute count_materialization_scale of each node.
1828     During LTRANS we already have values of count_materialization_scale
1829     computed, so just update them.  */
1830  FOR_EACH_FUNCTION (node)
1831    if (node->lto_file_data
1832	&& node->lto_file_data->profile_info.runs)
1833      {
1834	int scale;
1835
1836	scale = RDIV (node->count_materialization_scale * max_runs,
1837                      node->lto_file_data->profile_info.runs);
1838	node->count_materialization_scale = scale;
1839	if (scale < 0)
1840	  fatal_error (input_location, "Profile information in %s corrupted",
1841		       file_data->file_name);
1842
1843	if (scale == REG_BR_PROB_BASE)
1844	  continue;
1845	for (edge = node->callees; edge; edge = edge->next_callee)
1846	  edge->count = apply_scale (edge->count, scale);
1847	node->count = apply_scale (node->count, scale);
1848      }
1849}
1850
1851/* Input and merge the symtab from each of the .o files passed to
1852   lto1.  */
1853
1854void
1855input_symtab (void)
1856{
1857  struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1858  struct lto_file_decl_data *file_data;
1859  unsigned int j = 0;
1860  struct cgraph_node *node;
1861
1862  while ((file_data = file_data_vec[j++]))
1863    {
1864      const char *data;
1865      size_t len;
1866      struct lto_input_block *ib;
1867      vec<symtab_node *> nodes;
1868
1869      ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1870					  &data, &len);
1871      if (!ib)
1872	fatal_error (input_location,
1873		     "cannot find LTO cgraph in %s", file_data->file_name);
1874      input_profile_summary (ib, file_data);
1875      file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1876      nodes = input_cgraph_1 (file_data, ib);
1877      lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1878				      ib, data, len);
1879
1880      ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1881					  &data, &len);
1882      if (!ib)
1883	fatal_error (input_location, "cannot find LTO section refs in %s",
1884		     file_data->file_name);
1885      input_refs (ib, nodes);
1886      lto_destroy_simple_input_block (file_data, LTO_section_refs,
1887				      ib, data, len);
1888      if (flag_ltrans)
1889	input_cgraph_opt_summary (nodes);
1890      nodes.release ();
1891    }
1892
1893  merge_profile_summaries (file_data_vec);
1894  get_working_sets ();
1895
1896
1897  /* Clear out the aux field that was used to store enough state to
1898     tell which nodes should be overwritten.  */
1899  FOR_EACH_FUNCTION (node)
1900    {
1901      /* Some nodes may have been created by cgraph_node.  This
1902	 happens when the callgraph contains nested functions.  If the
1903	 node for the parent function was never emitted to the gimple
1904	 file, cgraph_node will create a node for it when setting the
1905	 context of the nested function.  */
1906      if (node->lto_file_data)
1907	node->aux = NULL;
1908    }
1909}
1910
1911/* Input function/variable tables that will allow libgomp to look up offload
1912   target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS.  */
1913
1914void
1915input_offload_tables (void)
1916{
1917  struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1918  struct lto_file_decl_data *file_data;
1919  unsigned int j = 0;
1920
1921  while ((file_data = file_data_vec[j++]))
1922    {
1923      const char *data;
1924      size_t len;
1925      struct lto_input_block *ib
1926	= lto_create_simple_input_block (file_data, LTO_section_offload_table,
1927					 &data, &len);
1928      if (!ib)
1929	continue;
1930
1931      enum LTO_symtab_tags tag
1932	= streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1933      while (tag)
1934	{
1935	  if (tag == LTO_symtab_unavail_node)
1936	    {
1937	      int decl_index = streamer_read_uhwi (ib);
1938	      tree fn_decl
1939		= lto_file_decl_data_get_fn_decl (file_data, decl_index);
1940	      vec_safe_push (offload_funcs, fn_decl);
1941	    }
1942	  else if (tag == LTO_symtab_variable)
1943	    {
1944	      int decl_index = streamer_read_uhwi (ib);
1945	      tree var_decl
1946		= lto_file_decl_data_get_var_decl (file_data, decl_index);
1947	      vec_safe_push (offload_vars, var_decl);
1948	    }
1949	  else
1950	    fatal_error (input_location,
1951			 "invalid offload table in %s", file_data->file_name);
1952
1953	  tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1954	}
1955
1956      lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1957				      ib, data, len);
1958    }
1959}
1960
1961/* True when we need optimization summary for NODE.  */
1962
1963static int
1964output_cgraph_opt_summary_p (struct cgraph_node *node)
1965{
1966  return (node->clone_of
1967	  && (node->clone.tree_map
1968	      || node->clone.args_to_skip
1969	      || node->clone.combined_args_to_skip));
1970}
1971
1972/* Output optimization summary for EDGE to OB.  */
1973static void
1974output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1975			 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1976{
1977}
1978
1979/* Output optimization summary for NODE to OB.  */
1980
1981static void
1982output_node_opt_summary (struct output_block *ob,
1983			 struct cgraph_node *node,
1984			 lto_symtab_encoder_t encoder)
1985{
1986  unsigned int index;
1987  bitmap_iterator bi;
1988  struct ipa_replace_map *map;
1989  struct bitpack_d bp;
1990  int i;
1991  struct cgraph_edge *e;
1992
1993  if (node->clone.args_to_skip)
1994    {
1995      streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1996      EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1997	streamer_write_uhwi (ob, index);
1998    }
1999  else
2000    streamer_write_uhwi (ob, 0);
2001  if (node->clone.combined_args_to_skip)
2002    {
2003      streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
2004      EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
2005	streamer_write_uhwi (ob, index);
2006    }
2007  else
2008    streamer_write_uhwi (ob, 0);
2009  streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
2010  FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
2011    {
2012      /* At the moment we assume all old trees to be PARM_DECLs, because we have no
2013         mechanism to store function local declarations into summaries.  */
2014      gcc_assert (!map->old_tree);
2015      streamer_write_uhwi (ob, map->parm_num);
2016      gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
2017      stream_write_tree (ob, map->new_tree, true);
2018      bp = bitpack_create (ob->main_stream);
2019      bp_pack_value (&bp, map->replace_p, 1);
2020      bp_pack_value (&bp, map->ref_p, 1);
2021      streamer_write_bitpack (&bp);
2022    }
2023
2024  if (lto_symtab_encoder_in_partition_p (encoder, node))
2025    {
2026      for (e = node->callees; e; e = e->next_callee)
2027	output_edge_opt_summary (ob, e);
2028      for (e = node->indirect_calls; e; e = e->next_callee)
2029	output_edge_opt_summary (ob, e);
2030    }
2031}
2032
2033/* Output optimization summaries stored in callgraph.
2034   At the moment it is the clone info structure.  */
2035
2036static void
2037output_cgraph_opt_summary (void)
2038{
2039  int i, n_nodes;
2040  lto_symtab_encoder_t encoder;
2041  struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
2042  unsigned count = 0;
2043
2044  ob->symbol = NULL;
2045  encoder = ob->decl_state->symtab_node_encoder;
2046  n_nodes = lto_symtab_encoder_size (encoder);
2047  for (i = 0; i < n_nodes; i++)
2048    {
2049      symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2050      cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2051      if (cnode && output_cgraph_opt_summary_p (cnode))
2052	count++;
2053    }
2054  streamer_write_uhwi (ob, count);
2055  for (i = 0; i < n_nodes; i++)
2056    {
2057      symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2058      cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2059      if (cnode && output_cgraph_opt_summary_p (cnode))
2060	{
2061	  streamer_write_uhwi (ob, i);
2062	  output_node_opt_summary (ob, cnode, encoder);
2063	}
2064    }
2065  produce_asm (ob, NULL);
2066  destroy_output_block (ob);
2067}
2068
2069/* Input optimisation summary of EDGE.  */
2070
2071static void
2072input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2073			struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
2074{
2075}
2076
2077/* Input optimisation summary of NODE.  */
2078
2079static void
2080input_node_opt_summary (struct cgraph_node *node,
2081			struct lto_input_block *ib_main,
2082			struct data_in *data_in)
2083{
2084  int i;
2085  int count;
2086  int bit;
2087  struct bitpack_d bp;
2088  struct cgraph_edge *e;
2089
2090  count = streamer_read_uhwi (ib_main);
2091  if (count)
2092    node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
2093  for (i = 0; i < count; i++)
2094    {
2095      bit = streamer_read_uhwi (ib_main);
2096      bitmap_set_bit (node->clone.args_to_skip, bit);
2097    }
2098  count = streamer_read_uhwi (ib_main);
2099  if (count)
2100    node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
2101  for (i = 0; i < count; i++)
2102    {
2103      bit = streamer_read_uhwi (ib_main);
2104      bitmap_set_bit (node->clone.combined_args_to_skip, bit);
2105    }
2106  count = streamer_read_uhwi (ib_main);
2107  for (i = 0; i < count; i++)
2108    {
2109      struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2110
2111      vec_safe_push (node->clone.tree_map, map);
2112      map->parm_num = streamer_read_uhwi (ib_main);
2113      map->old_tree = NULL;
2114      map->new_tree = stream_read_tree (ib_main, data_in);
2115      bp = streamer_read_bitpack (ib_main);
2116      map->replace_p = bp_unpack_value (&bp, 1);
2117      map->ref_p = bp_unpack_value (&bp, 1);
2118    }
2119  for (e = node->callees; e; e = e->next_callee)
2120    input_edge_opt_summary (e, ib_main);
2121  for (e = node->indirect_calls; e; e = e->next_callee)
2122    input_edge_opt_summary (e, ib_main);
2123}
2124
2125/* Read section in file FILE_DATA of length LEN with data DATA.  */
2126
2127static void
2128input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2129			  const char *data, size_t len,
2130			  vec<symtab_node *> nodes)
2131{
2132  const struct lto_function_header *header =
2133    (const struct lto_function_header *) data;
2134  const int cfg_offset = sizeof (struct lto_function_header);
2135  const int main_offset = cfg_offset + header->cfg_size;
2136  const int string_offset = main_offset + header->main_size;
2137  struct data_in *data_in;
2138  unsigned int i;
2139  unsigned int count;
2140
2141  lto_input_block ib_main ((const char *) data + main_offset,
2142			   header->main_size, file_data->mode_table);
2143
2144  data_in =
2145    lto_data_in_create (file_data, (const char *) data + string_offset,
2146			header->string_size, vNULL);
2147  count = streamer_read_uhwi (&ib_main);
2148
2149  for (i = 0; i < count; i++)
2150    {
2151      int ref = streamer_read_uhwi (&ib_main);
2152      input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2153			      &ib_main, data_in);
2154    }
2155  lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2156			 len);
2157  lto_data_in_delete (data_in);
2158}
2159
2160/* Input optimization summary of cgraph.  */
2161
2162static void
2163input_cgraph_opt_summary (vec<symtab_node *> nodes)
2164{
2165  struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2166  struct lto_file_decl_data *file_data;
2167  unsigned int j = 0;
2168
2169  while ((file_data = file_data_vec[j++]))
2170    {
2171      size_t len;
2172      const char *data =
2173	lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2174			      &len);
2175
2176      if (data)
2177	input_cgraph_opt_section (file_data, data, len, nodes);
2178    }
2179}
2180