1/* Write the GIMPLE representation to a file stream. 2 3 Copyright (C) 2009-2015 Free Software Foundation, Inc. 4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com> 5 Re-implemented by Diego Novillo <dnovillo@google.com> 6 7This file is part of GCC. 8 9GCC is free software; you can redistribute it and/or modify it under 10the terms of the GNU General Public License as published by the Free 11Software Foundation; either version 3, or (at your option) any later 12version. 13 14GCC is distributed in the hope that it will be useful, but WITHOUT ANY 15WARRANTY; without even the implied warranty of MERCHANTABILITY or 16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 17for more details. 18 19You should have received a copy of the GNU General Public License 20along with GCC; see the file COPYING3. If not see 21<http://www.gnu.org/licenses/>. */ 22 23#include "config.h" 24#include "system.h" 25#include "coretypes.h" 26#include "tm.h" 27#include "hash-set.h" 28#include "machmode.h" 29#include "vec.h" 30#include "double-int.h" 31#include "input.h" 32#include "alias.h" 33#include "symtab.h" 34#include "wide-int.h" 35#include "inchash.h" 36#include "tree.h" 37#include "fold-const.h" 38#include "stor-layout.h" 39#include "stringpool.h" 40#include "hashtab.h" 41#include "hard-reg-set.h" 42#include "function.h" 43#include "rtl.h" 44#include "flags.h" 45#include "statistics.h" 46#include "real.h" 47#include "fixed-value.h" 48#include "insn-config.h" 49#include "expmed.h" 50#include "dojump.h" 51#include "explow.h" 52#include "calls.h" 53#include "emit-rtl.h" 54#include "varasm.h" 55#include "stmt.h" 56#include "expr.h" 57#include "params.h" 58#include "predict.h" 59#include "dominance.h" 60#include "cfg.h" 61#include "basic-block.h" 62#include "tree-ssa-alias.h" 63#include "internal-fn.h" 64#include "gimple-expr.h" 65#include "is-a.h" 66#include "gimple.h" 67#include "gimple-iterator.h" 68#include "gimple-ssa.h" 69#include "tree-ssanames.h" 70#include "tree-pass.h" 71#include "diagnostic-core.h" 72#include "except.h" 73#include "lto-symtab.h" 74#include "hash-map.h" 75#include "plugin-api.h" 76#include "ipa-ref.h" 77#include "cgraph.h" 78#include "lto-streamer.h" 79#include "data-streamer.h" 80#include "gimple-streamer.h" 81#include "tree-streamer.h" 82#include "streamer-hooks.h" 83#include "cfgloop.h" 84#include "builtins.h" 85#include "gomp-constants.h" 86 87 88static void lto_write_tree (struct output_block*, tree, bool); 89 90/* Clear the line info stored in DATA_IN. */ 91 92static void 93clear_line_info (struct output_block *ob) 94{ 95 ob->current_file = NULL; 96 ob->current_line = 0; 97 ob->current_col = 0; 98} 99 100 101/* Create the output block and return it. SECTION_TYPE is 102 LTO_section_function_body or LTO_static_initializer. */ 103 104struct output_block * 105create_output_block (enum lto_section_type section_type) 106{ 107 struct output_block *ob = XCNEW (struct output_block); 108 109 ob->section_type = section_type; 110 ob->decl_state = lto_get_out_decl_state (); 111 ob->main_stream = XCNEW (struct lto_output_stream); 112 ob->string_stream = XCNEW (struct lto_output_stream); 113 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false); 114 115 if (section_type == LTO_section_function_body) 116 ob->cfg_stream = XCNEW (struct lto_output_stream); 117 118 clear_line_info (ob); 119 120 ob->string_hash_table = new hash_table<string_slot_hasher> (37); 121 gcc_obstack_init (&ob->obstack); 122 123 return ob; 124} 125 126 127/* Destroy the output block OB. */ 128 129void 130destroy_output_block (struct output_block *ob) 131{ 132 enum lto_section_type section_type = ob->section_type; 133 134 delete ob->string_hash_table; 135 ob->string_hash_table = NULL; 136 137 free (ob->main_stream); 138 free (ob->string_stream); 139 if (section_type == LTO_section_function_body) 140 free (ob->cfg_stream); 141 142 streamer_tree_cache_delete (ob->writer_cache); 143 obstack_free (&ob->obstack, NULL); 144 145 free (ob); 146} 147 148 149/* Look up NODE in the type table and write the index for it to OB. */ 150 151static void 152output_type_ref (struct output_block *ob, tree node) 153{ 154 streamer_write_record_start (ob, LTO_type_ref); 155 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node); 156} 157 158 159/* Return true if tree node T is written to various tables. For these 160 nodes, we sometimes want to write their phyiscal representation 161 (via lto_output_tree), and sometimes we need to emit an index 162 reference into a table (via lto_output_tree_ref). */ 163 164static bool 165tree_is_indexable (tree t) 166{ 167 /* Parameters and return values of functions of variably modified types 168 must go to global stream, because they may be used in the type 169 definition. */ 170 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL) 171 && DECL_CONTEXT (t)) 172 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE); 173 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */ 174 else if (TREE_CODE (t) == IMPORTED_DECL) 175 return false; 176 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t)) 177 || TREE_CODE (t) == TYPE_DECL 178 || TREE_CODE (t) == CONST_DECL 179 || TREE_CODE (t) == NAMELIST_DECL) 180 && decl_function_context (t)) 181 return false; 182 else if (TREE_CODE (t) == DEBUG_EXPR_DECL) 183 return false; 184 /* Variably modified types need to be streamed alongside function 185 bodies because they can refer to local entities. Together with 186 them we have to localize their members as well. 187 ??? In theory that includes non-FIELD_DECLs as well. */ 188 else if (TYPE_P (t) 189 && variably_modified_type_p (t, NULL_TREE)) 190 return false; 191 else if (TREE_CODE (t) == FIELD_DECL 192 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE)) 193 return false; 194 else 195 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME); 196} 197 198 199/* Output info about new location into bitpack BP. 200 After outputting bitpack, lto_output_location_data has 201 to be done to output actual data. */ 202 203void 204lto_output_location (struct output_block *ob, struct bitpack_d *bp, 205 location_t loc) 206{ 207 expanded_location xloc; 208 209 loc = LOCATION_LOCUS (loc); 210 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1); 211 if (loc == UNKNOWN_LOCATION) 212 return; 213 214 xloc = expand_location (loc); 215 216 bp_pack_value (bp, ob->current_file != xloc.file, 1); 217 bp_pack_value (bp, ob->current_line != xloc.line, 1); 218 bp_pack_value (bp, ob->current_col != xloc.column, 1); 219 220 if (ob->current_file != xloc.file) 221 bp_pack_string (ob, bp, xloc.file, true); 222 ob->current_file = xloc.file; 223 224 if (ob->current_line != xloc.line) 225 bp_pack_var_len_unsigned (bp, xloc.line); 226 ob->current_line = xloc.line; 227 228 if (ob->current_col != xloc.column) 229 bp_pack_var_len_unsigned (bp, xloc.column); 230 ob->current_col = xloc.column; 231} 232 233 234/* If EXPR is an indexable tree node, output a reference to it to 235 output block OB. Otherwise, output the physical representation of 236 EXPR to OB. */ 237 238static void 239lto_output_tree_ref (struct output_block *ob, tree expr) 240{ 241 enum tree_code code; 242 243 if (TYPE_P (expr)) 244 { 245 output_type_ref (ob, expr); 246 return; 247 } 248 249 code = TREE_CODE (expr); 250 switch (code) 251 { 252 case SSA_NAME: 253 streamer_write_record_start (ob, LTO_ssa_name_ref); 254 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr)); 255 break; 256 257 case FIELD_DECL: 258 streamer_write_record_start (ob, LTO_field_decl_ref); 259 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr); 260 break; 261 262 case FUNCTION_DECL: 263 streamer_write_record_start (ob, LTO_function_decl_ref); 264 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr); 265 break; 266 267 case VAR_DECL: 268 case DEBUG_EXPR_DECL: 269 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr)); 270 case PARM_DECL: 271 streamer_write_record_start (ob, LTO_global_decl_ref); 272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr); 273 break; 274 275 case CONST_DECL: 276 streamer_write_record_start (ob, LTO_const_decl_ref); 277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr); 278 break; 279 280 case IMPORTED_DECL: 281 gcc_assert (decl_function_context (expr) == NULL); 282 streamer_write_record_start (ob, LTO_imported_decl_ref); 283 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr); 284 break; 285 286 case TYPE_DECL: 287 streamer_write_record_start (ob, LTO_type_decl_ref); 288 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr); 289 break; 290 291 case NAMELIST_DECL: 292 streamer_write_record_start (ob, LTO_namelist_decl_ref); 293 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr); 294 break; 295 296 case NAMESPACE_DECL: 297 streamer_write_record_start (ob, LTO_namespace_decl_ref); 298 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr); 299 break; 300 301 case LABEL_DECL: 302 streamer_write_record_start (ob, LTO_label_decl_ref); 303 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr); 304 break; 305 306 case RESULT_DECL: 307 streamer_write_record_start (ob, LTO_result_decl_ref); 308 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr); 309 break; 310 311 case TRANSLATION_UNIT_DECL: 312 streamer_write_record_start (ob, LTO_translation_unit_decl_ref); 313 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr); 314 break; 315 316 default: 317 /* No other node is indexable, so it should have been handled by 318 lto_output_tree. */ 319 gcc_unreachable (); 320 } 321} 322 323 324/* Return true if EXPR is a tree node that can be written to disk. */ 325 326static inline bool 327lto_is_streamable (tree expr) 328{ 329 enum tree_code code = TREE_CODE (expr); 330 331 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA 332 name version in lto_output_tree_ref (see output_ssa_names). */ 333 return !is_lang_specific (expr) 334 && code != SSA_NAME 335 && code != CALL_EXPR 336 && code != LANG_TYPE 337 && code != MODIFY_EXPR 338 && code != INIT_EXPR 339 && code != TARGET_EXPR 340 && code != BIND_EXPR 341 && code != WITH_CLEANUP_EXPR 342 && code != STATEMENT_LIST 343 && (code == CASE_LABEL_EXPR 344 || code == DECL_EXPR 345 || TREE_CODE_CLASS (code) != tcc_statement); 346} 347 348 349/* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */ 350 351static tree 352get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr) 353{ 354 gcc_checking_assert (DECL_P (expr) 355 && TREE_CODE (expr) != FUNCTION_DECL 356 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL); 357 358 /* Handle DECL_INITIAL for symbols. */ 359 tree initial = DECL_INITIAL (expr); 360 if (TREE_CODE (expr) == VAR_DECL 361 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr)) 362 && !DECL_IN_CONSTANT_POOL (expr) 363 && initial) 364 { 365 varpool_node *vnode; 366 /* Extra section needs about 30 bytes; do not produce it for simple 367 scalar values. */ 368 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR 369 || !(vnode = varpool_node::get (expr)) 370 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode)) 371 initial = error_mark_node; 372 } 373 374 return initial; 375} 376 377 378/* Write a physical representation of tree node EXPR to output block 379 OB. If REF_P is true, the leaves of EXPR are emitted as references 380 via lto_output_tree_ref. IX is the index into the streamer cache 381 where EXPR is stored. */ 382 383static void 384lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p) 385{ 386 /* Pack all the non-pointer fields in EXPR into a bitpack and write 387 the resulting bitpack. */ 388 streamer_write_tree_bitfields (ob, expr); 389 390 /* Write all the pointer fields in EXPR. */ 391 streamer_write_tree_body (ob, expr, ref_p); 392 393 /* Write any LTO-specific data to OB. */ 394 if (DECL_P (expr) 395 && TREE_CODE (expr) != FUNCTION_DECL 396 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL) 397 { 398 /* Handle DECL_INITIAL for symbols. */ 399 tree initial = get_symbol_initial_value 400 (ob->decl_state->symtab_node_encoder, expr); 401 stream_write_tree (ob, initial, ref_p); 402 } 403} 404 405/* Write a physical representation of tree node EXPR to output block 406 OB. If REF_P is true, the leaves of EXPR are emitted as references 407 via lto_output_tree_ref. IX is the index into the streamer cache 408 where EXPR is stored. */ 409 410static void 411lto_write_tree (struct output_block *ob, tree expr, bool ref_p) 412{ 413 if (!lto_is_streamable (expr)) 414 internal_error ("tree code %qs is not supported in LTO streams", 415 get_tree_code_name (TREE_CODE (expr))); 416 417 /* Write the header, containing everything needed to materialize 418 EXPR on the reading side. */ 419 streamer_write_tree_header (ob, expr); 420 421 lto_write_tree_1 (ob, expr, ref_p); 422 423 /* Mark the end of EXPR. */ 424 streamer_write_zero (ob); 425} 426 427/* Emit the physical representation of tree node EXPR to output block 428 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references 429 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */ 430 431static void 432lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash, 433 bool ref_p, bool this_ref_p) 434{ 435 unsigned ix; 436 437 gcc_checking_assert (expr != NULL_TREE 438 && !(this_ref_p && tree_is_indexable (expr))); 439 440 bool exists_p = streamer_tree_cache_insert (ob->writer_cache, 441 expr, hash, &ix); 442 gcc_assert (!exists_p); 443 if (streamer_handle_as_builtin_p (expr)) 444 { 445 /* MD and NORMAL builtins do not need to be written out 446 completely as they are always instantiated by the 447 compiler on startup. The only builtins that need to 448 be written out are BUILT_IN_FRONTEND. For all other 449 builtins, we simply write the class and code. */ 450 streamer_write_builtin (ob, expr); 451 } 452 else if (TREE_CODE (expr) == INTEGER_CST 453 && !TREE_OVERFLOW (expr)) 454 { 455 /* Shared INTEGER_CST nodes are special because they need their 456 original type to be materialized by the reader (to implement 457 TYPE_CACHED_VALUES). */ 458 streamer_write_integer_cst (ob, expr, ref_p); 459 } 460 else 461 { 462 /* This is the first time we see EXPR, write its fields 463 to OB. */ 464 lto_write_tree (ob, expr, ref_p); 465 } 466} 467 468class DFS 469{ 470public: 471 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p, 472 bool single_p); 473 ~DFS (); 474 475 struct scc_entry 476 { 477 tree t; 478 hashval_t hash; 479 }; 480 vec<scc_entry> sccstack; 481 482private: 483 struct sccs 484 { 485 unsigned int dfsnum; 486 unsigned int low; 487 }; 488 struct worklist 489 { 490 tree expr; 491 sccs *from_state; 492 sccs *cstate; 493 bool ref_p; 494 bool this_ref_p; 495 }; 496 497 static int scc_entry_compare (const void *, const void *); 498 499 void DFS_write_tree_body (struct output_block *ob, 500 tree expr, sccs *expr_state, bool ref_p); 501 502 void DFS_write_tree (struct output_block *ob, sccs *from_state, 503 tree expr, bool ref_p, bool this_ref_p); 504 505 hashval_t 506 hash_scc (struct output_block *ob, unsigned first, unsigned size); 507 508 hash_map<tree, sccs *> sccstate; 509 vec<worklist> worklist_vec; 510 struct obstack sccstate_obstack; 511}; 512 513DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p, 514 bool single_p) 515{ 516 unsigned int next_dfs_num = 1; 517 sccstack.create (0); 518 gcc_obstack_init (&sccstate_obstack); 519 worklist_vec = vNULL; 520 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p); 521 while (!worklist_vec.is_empty ()) 522 { 523 worklist &w = worklist_vec.last (); 524 expr = w.expr; 525 sccs *from_state = w.from_state; 526 sccs *cstate = w.cstate; 527 ref_p = w.ref_p; 528 this_ref_p = w.this_ref_p; 529 if (cstate == NULL) 530 { 531 sccs **slot = &sccstate.get_or_insert (expr); 532 cstate = *slot; 533 if (cstate) 534 { 535 gcc_checking_assert (from_state); 536 if (cstate->dfsnum < from_state->dfsnum) 537 from_state->low = MIN (cstate->dfsnum, from_state->low); 538 worklist_vec.pop (); 539 continue; 540 } 541 542 scc_entry e = { expr, 0 }; 543 /* Not yet visited. DFS recurse and push it onto the stack. */ 544 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs); 545 sccstack.safe_push (e); 546 cstate->dfsnum = next_dfs_num++; 547 cstate->low = cstate->dfsnum; 548 w.cstate = cstate; 549 550 if (streamer_handle_as_builtin_p (expr)) 551 ; 552 else if (TREE_CODE (expr) == INTEGER_CST 553 && !TREE_OVERFLOW (expr)) 554 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p); 555 else 556 { 557 DFS_write_tree_body (ob, expr, cstate, ref_p); 558 559 /* Walk any LTO-specific edges. */ 560 if (DECL_P (expr) 561 && TREE_CODE (expr) != FUNCTION_DECL 562 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL) 563 { 564 /* Handle DECL_INITIAL for symbols. */ 565 tree initial 566 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder, 567 expr); 568 DFS_write_tree (ob, cstate, initial, ref_p, ref_p); 569 } 570 } 571 continue; 572 } 573 574 /* See if we found an SCC. */ 575 if (cstate->low == cstate->dfsnum) 576 { 577 unsigned first, size; 578 tree x; 579 580 /* If we are re-walking a single leaf-SCC just pop it, 581 let earlier worklist item access the sccstack. */ 582 if (single_p) 583 { 584 worklist_vec.pop (); 585 continue; 586 } 587 588 /* Pop the SCC and compute its size. */ 589 first = sccstack.length (); 590 do 591 { 592 x = sccstack[--first].t; 593 } 594 while (x != expr); 595 size = sccstack.length () - first; 596 597 /* No need to compute hashes for LTRANS units, we don't perform 598 any merging there. */ 599 hashval_t scc_hash = 0; 600 unsigned scc_entry_len = 0; 601 if (!flag_wpa) 602 { 603 scc_hash = hash_scc (ob, first, size); 604 605 /* Put the entries with the least number of collisions first. */ 606 unsigned entry_start = 0; 607 scc_entry_len = size + 1; 608 for (unsigned i = 0; i < size;) 609 { 610 unsigned from = i; 611 for (i = i + 1; i < size 612 && (sccstack[first + i].hash 613 == sccstack[first + from].hash); ++i) 614 ; 615 if (i - from < scc_entry_len) 616 { 617 scc_entry_len = i - from; 618 entry_start = from; 619 } 620 } 621 for (unsigned i = 0; i < scc_entry_len; ++i) 622 { 623 scc_entry tem = sccstack[first + i]; 624 sccstack[first + i] = sccstack[first + entry_start + i]; 625 sccstack[first + entry_start + i] = tem; 626 } 627 628 if (scc_entry_len == 1) 629 ; /* We already sorted SCC deterministically in hash_scc. */ 630 else 631 /* Check that we have only one SCC. 632 Naturally we may have conflicts if hash function is not 633 strong enough. Lets see how far this gets. */ 634 { 635#ifdef ENABLE_CHECKING 636 gcc_unreachable (); 637#endif 638 } 639 } 640 641 /* Write LTO_tree_scc. */ 642 streamer_write_record_start (ob, LTO_tree_scc); 643 streamer_write_uhwi (ob, size); 644 streamer_write_uhwi (ob, scc_hash); 645 646 /* Write size-1 SCCs without wrapping them inside SCC bundles. 647 All INTEGER_CSTs need to be handled this way as we need 648 their type to materialize them. Also builtins are handled 649 this way. 650 ??? We still wrap these in LTO_tree_scc so at the 651 input side we can properly identify the tree we want 652 to ultimatively return. */ 653 if (size == 1) 654 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p); 655 else 656 { 657 /* Write the size of the SCC entry candidates. */ 658 streamer_write_uhwi (ob, scc_entry_len); 659 660 /* Write all headers and populate the streamer cache. */ 661 for (unsigned i = 0; i < size; ++i) 662 { 663 hashval_t hash = sccstack[first+i].hash; 664 tree t = sccstack[first+i].t; 665 bool exists_p = streamer_tree_cache_insert (ob->writer_cache, 666 t, hash, NULL); 667 gcc_assert (!exists_p); 668 669 if (!lto_is_streamable (t)) 670 internal_error ("tree code %qs is not supported " 671 "in LTO streams", 672 get_tree_code_name (TREE_CODE (t))); 673 674 gcc_checking_assert (!streamer_handle_as_builtin_p (t)); 675 676 /* Write the header, containing everything needed to 677 materialize EXPR on the reading side. */ 678 streamer_write_tree_header (ob, t); 679 } 680 681 /* Write the bitpacks and tree references. */ 682 for (unsigned i = 0; i < size; ++i) 683 { 684 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p); 685 686 /* Mark the end of the tree. */ 687 streamer_write_zero (ob); 688 } 689 } 690 691 /* Finally truncate the vector. */ 692 sccstack.truncate (first); 693 694 if (from_state) 695 from_state->low = MIN (from_state->low, cstate->low); 696 worklist_vec.pop (); 697 continue; 698 } 699 700 gcc_checking_assert (from_state); 701 from_state->low = MIN (from_state->low, cstate->low); 702 if (cstate->dfsnum < from_state->dfsnum) 703 from_state->low = MIN (cstate->dfsnum, from_state->low); 704 worklist_vec.pop (); 705 } 706 worklist_vec.release (); 707} 708 709DFS::~DFS () 710{ 711 sccstack.release (); 712 obstack_free (&sccstate_obstack, NULL); 713} 714 715/* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and 716 DFS recurse for all tree edges originating from it. */ 717 718void 719DFS::DFS_write_tree_body (struct output_block *ob, 720 tree expr, sccs *expr_state, bool ref_p) 721{ 722#define DFS_follow_tree_edge(DEST) \ 723 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p) 724 725 enum tree_code code; 726 727 code = TREE_CODE (expr); 728 729 if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) 730 { 731 if (TREE_CODE (expr) != IDENTIFIER_NODE) 732 DFS_follow_tree_edge (TREE_TYPE (expr)); 733 } 734 735 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR)) 736 { 737 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i) 738 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i)); 739 } 740 741 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX)) 742 { 743 DFS_follow_tree_edge (TREE_REALPART (expr)); 744 DFS_follow_tree_edge (TREE_IMAGPART (expr)); 745 } 746 747 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL)) 748 { 749 /* Drop names that were created for anonymous entities. */ 750 if (DECL_NAME (expr) 751 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE 752 && ANON_AGGRNAME_P (DECL_NAME (expr))) 753 ; 754 else 755 DFS_follow_tree_edge (DECL_NAME (expr)); 756 DFS_follow_tree_edge (DECL_CONTEXT (expr)); 757 } 758 759 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) 760 { 761 DFS_follow_tree_edge (DECL_SIZE (expr)); 762 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr)); 763 764 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs 765 special handling in LTO, it must be handled by streamer hooks. */ 766 767 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr)); 768 769 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information 770 for early inlining so drop it on the floor instead of ICEing in 771 dwarf2out.c. */ 772 773 if ((TREE_CODE (expr) == VAR_DECL 774 || TREE_CODE (expr) == PARM_DECL) 775 && DECL_HAS_VALUE_EXPR_P (expr)) 776 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr)); 777 if (TREE_CODE (expr) == VAR_DECL) 778 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr)); 779 } 780 781 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON)) 782 { 783 if (TREE_CODE (expr) == TYPE_DECL) 784 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr)); 785 } 786 787 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) 788 { 789 /* Make sure we don't inadvertently set the assembler name. */ 790 if (DECL_ASSEMBLER_NAME_SET_P (expr)) 791 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr)); 792 } 793 794 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL)) 795 { 796 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr)); 797 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr)); 798 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr)); 799 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr)); 800 DFS_follow_tree_edge (DECL_FCONTEXT (expr)); 801 } 802 803 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) 804 { 805 DFS_follow_tree_edge (DECL_VINDEX (expr)); 806 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr)); 807 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr)); 808 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr)); 809 } 810 811 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) 812 { 813 DFS_follow_tree_edge (TYPE_SIZE (expr)); 814 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr)); 815 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr)); 816 DFS_follow_tree_edge (TYPE_NAME (expr)); 817 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be 818 reconstructed during fixup. */ 819 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists 820 during fixup. */ 821 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr)); 822 DFS_follow_tree_edge (TYPE_CONTEXT (expr)); 823 /* TYPE_CANONICAL is re-computed during type merging, so no need 824 to follow it here. */ 825 DFS_follow_tree_edge (TYPE_STUB_DECL (expr)); 826 } 827 828 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON)) 829 { 830 if (TREE_CODE (expr) == ENUMERAL_TYPE) 831 DFS_follow_tree_edge (TYPE_VALUES (expr)); 832 else if (TREE_CODE (expr) == ARRAY_TYPE) 833 DFS_follow_tree_edge (TYPE_DOMAIN (expr)); 834 else if (RECORD_OR_UNION_TYPE_P (expr)) 835 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t)) 836 DFS_follow_tree_edge (t); 837 else if (TREE_CODE (expr) == FUNCTION_TYPE 838 || TREE_CODE (expr) == METHOD_TYPE) 839 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr)); 840 841 if (!POINTER_TYPE_P (expr)) 842 DFS_follow_tree_edge (TYPE_MINVAL (expr)); 843 DFS_follow_tree_edge (TYPE_MAXVAL (expr)); 844 if (RECORD_OR_UNION_TYPE_P (expr)) 845 DFS_follow_tree_edge (TYPE_BINFO (expr)); 846 } 847 848 if (CODE_CONTAINS_STRUCT (code, TS_LIST)) 849 { 850 DFS_follow_tree_edge (TREE_PURPOSE (expr)); 851 DFS_follow_tree_edge (TREE_VALUE (expr)); 852 DFS_follow_tree_edge (TREE_CHAIN (expr)); 853 } 854 855 if (CODE_CONTAINS_STRUCT (code, TS_VEC)) 856 { 857 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++) 858 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i)); 859 } 860 861 if (CODE_CONTAINS_STRUCT (code, TS_EXP)) 862 { 863 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++) 864 DFS_follow_tree_edge (TREE_OPERAND (expr, i)); 865 DFS_follow_tree_edge (TREE_BLOCK (expr)); 866 } 867 868 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK)) 869 { 870 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t)) 871 if (VAR_OR_FUNCTION_DECL_P (t) 872 && DECL_EXTERNAL (t)) 873 /* We have to stream externals in the block chain as 874 non-references. See also 875 tree-streamer-out.c:streamer_write_chain. */ 876 DFS_write_tree (ob, expr_state, t, ref_p, false); 877 else 878 DFS_follow_tree_edge (t); 879 880 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr)); 881 882 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can 883 handle - those that represent inlined function scopes. 884 For the drop rest them on the floor instead of ICEing 885 in dwarf2out.c. */ 886 if (inlined_function_outer_scope_p (expr)) 887 { 888 tree ultimate_origin = block_ultimate_origin (expr); 889 DFS_follow_tree_edge (ultimate_origin); 890 } 891 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug 892 information for early inlined BLOCKs so drop it on the floor instead 893 of ICEing in dwarf2out.c. */ 894 895 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO 896 streaming time. */ 897 898 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this 899 list is re-constructed from BLOCK_SUPERCONTEXT. */ 900 } 901 902 if (CODE_CONTAINS_STRUCT (code, TS_BINFO)) 903 { 904 unsigned i; 905 tree t; 906 907 /* Note that the number of BINFO slots has already been emitted in 908 EXPR's header (see streamer_write_tree_header) because this length 909 is needed to build the empty BINFO node on the reader side. */ 910 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t) 911 DFS_follow_tree_edge (t); 912 DFS_follow_tree_edge (BINFO_OFFSET (expr)); 913 DFS_follow_tree_edge (BINFO_VTABLE (expr)); 914 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr)); 915 916 /* The number of BINFO_BASE_ACCESSES has already been emitted in 917 EXPR's bitfield section. */ 918 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t) 919 DFS_follow_tree_edge (t); 920 921 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX 922 and BINFO_VPTR_INDEX; these are used by C++ FE only. */ 923 } 924 925 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) 926 { 927 unsigned i; 928 tree index, value; 929 930 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value) 931 { 932 DFS_follow_tree_edge (index); 933 DFS_follow_tree_edge (value); 934 } 935 } 936 937 if (code == OMP_CLAUSE) 938 { 939 int i; 940 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++) 941 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i)); 942 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr)); 943 } 944 945#undef DFS_follow_tree_edge 946} 947 948/* Return a hash value for the tree T. 949 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL, 950 may hold hash values if trees inside current SCC. */ 951 952static hashval_t 953hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t) 954{ 955 inchash::hash hstate; 956 957#define visit(SIBLING) \ 958 do { \ 959 unsigned ix; \ 960 if (!SIBLING) \ 961 hstate.add_int (0); \ 962 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \ 963 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \ 964 else if (map) \ 965 hstate.add_int (*map->get (SIBLING)); \ 966 else \ 967 hstate.add_int (1); \ 968 } while (0) 969 970 /* Hash TS_BASE. */ 971 enum tree_code code = TREE_CODE (t); 972 hstate.add_int (code); 973 if (!TYPE_P (t)) 974 { 975 hstate.add_flag (TREE_SIDE_EFFECTS (t)); 976 hstate.add_flag (TREE_CONSTANT (t)); 977 hstate.add_flag (TREE_READONLY (t)); 978 hstate.add_flag (TREE_PUBLIC (t)); 979 } 980 hstate.add_flag (TREE_ADDRESSABLE (t)); 981 hstate.add_flag (TREE_THIS_VOLATILE (t)); 982 if (DECL_P (t)) 983 hstate.add_flag (DECL_UNSIGNED (t)); 984 else if (TYPE_P (t)) 985 hstate.add_flag (TYPE_UNSIGNED (t)); 986 if (TYPE_P (t)) 987 hstate.add_flag (TYPE_ARTIFICIAL (t)); 988 else 989 hstate.add_flag (TREE_NO_WARNING (t)); 990 hstate.add_flag (TREE_NOTHROW (t)); 991 hstate.add_flag (TREE_STATIC (t)); 992 hstate.add_flag (TREE_PROTECTED (t)); 993 hstate.add_flag (TREE_DEPRECATED (t)); 994 if (code != TREE_BINFO) 995 hstate.add_flag (TREE_PRIVATE (t)); 996 if (TYPE_P (t)) 997 { 998 hstate.add_flag (TYPE_SATURATING (t)); 999 hstate.add_flag (TYPE_ADDR_SPACE (t)); 1000 } 1001 else if (code == SSA_NAME) 1002 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t)); 1003 hstate.commit_flag (); 1004 1005 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST)) 1006 { 1007 int i; 1008 hstate.add_wide_int (TREE_INT_CST_NUNITS (t)); 1009 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t)); 1010 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++) 1011 hstate.add_wide_int (TREE_INT_CST_ELT (t, i)); 1012 } 1013 1014 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST)) 1015 { 1016 REAL_VALUE_TYPE r = TREE_REAL_CST (t); 1017 hstate.add_flag (r.cl); 1018 hstate.add_flag (r.sign); 1019 hstate.add_flag (r.signalling); 1020 hstate.add_flag (r.canonical); 1021 hstate.commit_flag (); 1022 hstate.add_int (r.uexp); 1023 hstate.add (r.sig, sizeof (r.sig)); 1024 } 1025 1026 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST)) 1027 { 1028 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t); 1029 hstate.add_int (f.mode); 1030 hstate.add_int (f.data.low); 1031 hstate.add_int (f.data.high); 1032 } 1033 1034 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) 1035 { 1036 hstate.add_wide_int (DECL_MODE (t)); 1037 hstate.add_flag (DECL_NONLOCAL (t)); 1038 hstate.add_flag (DECL_VIRTUAL_P (t)); 1039 hstate.add_flag (DECL_IGNORED_P (t)); 1040 hstate.add_flag (DECL_ABSTRACT_P (t)); 1041 hstate.add_flag (DECL_ARTIFICIAL (t)); 1042 hstate.add_flag (DECL_USER_ALIGN (t)); 1043 hstate.add_flag (DECL_PRESERVE_P (t)); 1044 hstate.add_flag (DECL_EXTERNAL (t)); 1045 hstate.add_flag (DECL_GIMPLE_REG_P (t)); 1046 hstate.commit_flag (); 1047 hstate.add_int (DECL_ALIGN (t)); 1048 if (code == LABEL_DECL) 1049 { 1050 hstate.add_int (EH_LANDING_PAD_NR (t)); 1051 hstate.add_int (LABEL_DECL_UID (t)); 1052 } 1053 else if (code == FIELD_DECL) 1054 { 1055 hstate.add_flag (DECL_PACKED (t)); 1056 hstate.add_flag (DECL_NONADDRESSABLE_P (t)); 1057 hstate.add_int (DECL_OFFSET_ALIGN (t)); 1058 } 1059 else if (code == VAR_DECL) 1060 { 1061 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t)); 1062 hstate.add_flag (DECL_NONLOCAL_FRAME (t)); 1063 } 1064 if (code == RESULT_DECL 1065 || code == PARM_DECL 1066 || code == VAR_DECL) 1067 { 1068 hstate.add_flag (DECL_BY_REFERENCE (t)); 1069 if (code == VAR_DECL 1070 || code == PARM_DECL) 1071 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t)); 1072 } 1073 hstate.commit_flag (); 1074 } 1075 1076 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL)) 1077 hstate.add_int (DECL_REGISTER (t)); 1078 1079 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) 1080 { 1081 hstate.add_flag (DECL_COMMON (t)); 1082 hstate.add_flag (DECL_DLLIMPORT_P (t)); 1083 hstate.add_flag (DECL_WEAK (t)); 1084 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t)); 1085 hstate.add_flag (DECL_COMDAT (t)); 1086 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t)); 1087 hstate.add_int (DECL_VISIBILITY (t)); 1088 if (code == VAR_DECL) 1089 { 1090 /* DECL_IN_TEXT_SECTION is set during final asm output only. */ 1091 hstate.add_flag (DECL_HARD_REGISTER (t)); 1092 hstate.add_flag (DECL_IN_CONSTANT_POOL (t)); 1093 } 1094 if (TREE_CODE (t) == FUNCTION_DECL) 1095 { 1096 hstate.add_flag (DECL_FINAL_P (t)); 1097 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t)); 1098 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t)); 1099 } 1100 hstate.commit_flag (); 1101 } 1102 1103 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) 1104 { 1105 hstate.add_int (DECL_BUILT_IN_CLASS (t)); 1106 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t)); 1107 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t)); 1108 hstate.add_flag (DECL_UNINLINABLE (t)); 1109 hstate.add_flag (DECL_POSSIBLY_INLINED (t)); 1110 hstate.add_flag (DECL_IS_NOVOPS (t)); 1111 hstate.add_flag (DECL_IS_RETURNS_TWICE (t)); 1112 hstate.add_flag (DECL_IS_MALLOC (t)); 1113 hstate.add_flag (DECL_IS_OPERATOR_NEW (t)); 1114 hstate.add_flag (DECL_DECLARED_INLINE_P (t)); 1115 hstate.add_flag (DECL_STATIC_CHAIN (t)); 1116 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t)); 1117 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t)); 1118 hstate.add_flag (DECL_NO_LIMIT_STACK (t)); 1119 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t)); 1120 hstate.add_flag (DECL_PURE_P (t)); 1121 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t)); 1122 hstate.commit_flag (); 1123 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN) 1124 hstate.add_int (DECL_FUNCTION_CODE (t)); 1125 } 1126 1127 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) 1128 { 1129 hstate.add_wide_int (TYPE_MODE (t)); 1130 hstate.add_flag (TYPE_STRING_FLAG (t)); 1131 hstate.add_flag (TYPE_NO_FORCE_BLK (t)); 1132 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t)); 1133 hstate.add_flag (TYPE_PACKED (t)); 1134 hstate.add_flag (TYPE_RESTRICT (t)); 1135 hstate.add_flag (TYPE_USER_ALIGN (t)); 1136 hstate.add_flag (TYPE_READONLY (t)); 1137 if (RECORD_OR_UNION_TYPE_P (t)) 1138 { 1139 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t)); 1140 hstate.add_flag (TYPE_FINAL_P (t)); 1141 } 1142 else if (code == ARRAY_TYPE) 1143 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t)); 1144 hstate.commit_flag (); 1145 hstate.add_int (TYPE_PRECISION (t)); 1146 hstate.add_int (TYPE_ALIGN (t)); 1147 hstate.add_int ((TYPE_ALIAS_SET (t) == 0 1148 || (!in_lto_p 1149 && get_alias_set (t) == 0)) 1150 ? 0 : -1); 1151 } 1152 1153 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL)) 1154 hstate.add (TRANSLATION_UNIT_LANGUAGE (t), 1155 strlen (TRANSLATION_UNIT_LANGUAGE (t))); 1156 1157 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION) 1158 /* We don't stream these when passing things to a different target. */ 1159 && !lto_stream_offload_p) 1160 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t))); 1161 1162 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION)) 1163 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t))); 1164 1165 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER)) 1166 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t)); 1167 1168 if (CODE_CONTAINS_STRUCT (code, TS_STRING)) 1169 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t)); 1170 1171 if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) 1172 { 1173 if (code != IDENTIFIER_NODE) 1174 visit (TREE_TYPE (t)); 1175 } 1176 1177 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR)) 1178 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i) 1179 visit (VECTOR_CST_ELT (t, i)); 1180 1181 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX)) 1182 { 1183 visit (TREE_REALPART (t)); 1184 visit (TREE_IMAGPART (t)); 1185 } 1186 1187 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL)) 1188 { 1189 /* Drop names that were created for anonymous entities. */ 1190 if (DECL_NAME (t) 1191 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE 1192 && ANON_AGGRNAME_P (DECL_NAME (t))) 1193 ; 1194 else 1195 visit (DECL_NAME (t)); 1196 if (DECL_FILE_SCOPE_P (t)) 1197 ; 1198 else 1199 visit (DECL_CONTEXT (t)); 1200 } 1201 1202 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) 1203 { 1204 visit (DECL_SIZE (t)); 1205 visit (DECL_SIZE_UNIT (t)); 1206 visit (DECL_ATTRIBUTES (t)); 1207 if ((code == VAR_DECL 1208 || code == PARM_DECL) 1209 && DECL_HAS_VALUE_EXPR_P (t)) 1210 visit (DECL_VALUE_EXPR (t)); 1211 if (code == VAR_DECL 1212 && DECL_HAS_DEBUG_EXPR_P (t)) 1213 visit (DECL_DEBUG_EXPR (t)); 1214 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to 1215 be able to call get_symbol_initial_value. */ 1216 } 1217 1218 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON)) 1219 { 1220 if (code == TYPE_DECL) 1221 visit (DECL_ORIGINAL_TYPE (t)); 1222 } 1223 1224 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) 1225 { 1226 if (DECL_ASSEMBLER_NAME_SET_P (t)) 1227 visit (DECL_ASSEMBLER_NAME (t)); 1228 } 1229 1230 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL)) 1231 { 1232 visit (DECL_FIELD_OFFSET (t)); 1233 visit (DECL_BIT_FIELD_TYPE (t)); 1234 visit (DECL_BIT_FIELD_REPRESENTATIVE (t)); 1235 visit (DECL_FIELD_BIT_OFFSET (t)); 1236 visit (DECL_FCONTEXT (t)); 1237 } 1238 1239 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) 1240 { 1241 visit (DECL_VINDEX (t)); 1242 visit (DECL_FUNCTION_PERSONALITY (t)); 1243 visit (DECL_FUNCTION_SPECIFIC_TARGET (t)); 1244 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t)); 1245 } 1246 1247 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) 1248 { 1249 visit (TYPE_SIZE (t)); 1250 visit (TYPE_SIZE_UNIT (t)); 1251 visit (TYPE_ATTRIBUTES (t)); 1252 visit (TYPE_NAME (t)); 1253 visit (TYPE_MAIN_VARIANT (t)); 1254 if (TYPE_FILE_SCOPE_P (t)) 1255 ; 1256 else 1257 visit (TYPE_CONTEXT (t)); 1258 visit (TYPE_STUB_DECL (t)); 1259 } 1260 1261 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON)) 1262 { 1263 if (code == ENUMERAL_TYPE) 1264 visit (TYPE_VALUES (t)); 1265 else if (code == ARRAY_TYPE) 1266 visit (TYPE_DOMAIN (t)); 1267 else if (RECORD_OR_UNION_TYPE_P (t)) 1268 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f)) 1269 visit (f); 1270 else if (code == FUNCTION_TYPE 1271 || code == METHOD_TYPE) 1272 visit (TYPE_ARG_TYPES (t)); 1273 if (!POINTER_TYPE_P (t)) 1274 visit (TYPE_MINVAL (t)); 1275 visit (TYPE_MAXVAL (t)); 1276 if (RECORD_OR_UNION_TYPE_P (t)) 1277 visit (TYPE_BINFO (t)); 1278 } 1279 1280 if (CODE_CONTAINS_STRUCT (code, TS_LIST)) 1281 { 1282 visit (TREE_PURPOSE (t)); 1283 visit (TREE_VALUE (t)); 1284 visit (TREE_CHAIN (t)); 1285 } 1286 1287 if (CODE_CONTAINS_STRUCT (code, TS_VEC)) 1288 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i) 1289 visit (TREE_VEC_ELT (t, i)); 1290 1291 if (CODE_CONTAINS_STRUCT (code, TS_EXP)) 1292 { 1293 hstate.add_wide_int (TREE_OPERAND_LENGTH (t)); 1294 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i) 1295 visit (TREE_OPERAND (t, i)); 1296 } 1297 1298 if (CODE_CONTAINS_STRUCT (code, TS_BINFO)) 1299 { 1300 unsigned i; 1301 tree b; 1302 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b) 1303 visit (b); 1304 visit (BINFO_OFFSET (t)); 1305 visit (BINFO_VTABLE (t)); 1306 visit (BINFO_VPTR_FIELD (t)); 1307 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b) 1308 visit (b); 1309 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX 1310 and BINFO_VPTR_INDEX; these are used by C++ FE only. */ 1311 } 1312 1313 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) 1314 { 1315 unsigned i; 1316 tree index, value; 1317 hstate.add_wide_int (CONSTRUCTOR_NELTS (t)); 1318 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value) 1319 { 1320 visit (index); 1321 visit (value); 1322 } 1323 } 1324 1325 if (code == OMP_CLAUSE) 1326 { 1327 int i; 1328 HOST_WIDE_INT val; 1329 1330 hstate.add_wide_int (OMP_CLAUSE_CODE (t)); 1331 switch (OMP_CLAUSE_CODE (t)) 1332 { 1333 case OMP_CLAUSE_DEFAULT: 1334 val = OMP_CLAUSE_DEFAULT_KIND (t); 1335 break; 1336 case OMP_CLAUSE_SCHEDULE: 1337 val = OMP_CLAUSE_SCHEDULE_KIND (t); 1338 break; 1339 case OMP_CLAUSE_DEPEND: 1340 val = OMP_CLAUSE_DEPEND_KIND (t); 1341 break; 1342 case OMP_CLAUSE_MAP: 1343 val = OMP_CLAUSE_MAP_KIND (t); 1344 break; 1345 case OMP_CLAUSE_PROC_BIND: 1346 val = OMP_CLAUSE_PROC_BIND_KIND (t); 1347 break; 1348 case OMP_CLAUSE_REDUCTION: 1349 val = OMP_CLAUSE_REDUCTION_CODE (t); 1350 break; 1351 default: 1352 val = 0; 1353 break; 1354 } 1355 hstate.add_wide_int (val); 1356 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++) 1357 visit (OMP_CLAUSE_OPERAND (t, i)); 1358 visit (OMP_CLAUSE_CHAIN (t)); 1359 } 1360 1361 return hstate.end (); 1362 1363#undef visit 1364} 1365 1366/* Compare two SCC entries by their hash value for qsorting them. */ 1367 1368int 1369DFS::scc_entry_compare (const void *p1_, const void *p2_) 1370{ 1371 const scc_entry *p1 = (const scc_entry *) p1_; 1372 const scc_entry *p2 = (const scc_entry *) p2_; 1373 if (p1->hash < p2->hash) 1374 return -1; 1375 else if (p1->hash > p2->hash) 1376 return 1; 1377 return 0; 1378} 1379 1380/* Return a hash value for the SCC on the SCC stack from FIRST with 1381 size SIZE. */ 1382 1383hashval_t 1384DFS::hash_scc (struct output_block *ob, 1385 unsigned first, unsigned size) 1386{ 1387 unsigned int last_classes = 0, iterations = 0; 1388 1389 /* Compute hash values for the SCC members. */ 1390 for (unsigned i = 0; i < size; ++i) 1391 sccstack[first+i].hash = hash_tree (ob->writer_cache, NULL, 1392 sccstack[first+i].t); 1393 1394 if (size == 1) 1395 return sccstack[first].hash; 1396 1397 /* We aim to get unique hash for every tree within SCC and compute hash value 1398 of the whole SCC by combing all values together in an stable (entry point 1399 independent) order. This guarantees that the same SCC regions within 1400 different translation units will get the same hash values and therefore 1401 will be merged at WPA time. 1402 1403 Often the hashes are already unique. In that case we compute scc hash 1404 by combining individual hash values in an increasing order. 1405 1406 If thre are duplicates we seek at least one tree with unique hash (and 1407 pick one with minimal hash and this property). Then we obtain stable 1408 order by DFS walk starting from this unique tree and then use index 1409 within this order to make individual hash values unique. 1410 1411 If there is no tree with unique hash, we iteratively propagate the hash 1412 values across the internal edges of SCC. This usually quickly leads 1413 to unique hashes. Consider, for example, an SCC containing two pointers 1414 that are identical except for type they point and assume that these 1415 types are also part of the SCC. 1416 The propagation will add the points-to type information into their hash 1417 values. */ 1418 do 1419 { 1420 /* Sort the SCC so we can easily see check for uniqueness. */ 1421 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare); 1422 1423 unsigned int classes = 1; 1424 int firstunique = -1; 1425 1426 /* Find tree with lowest unique hash (if it exists) and compute 1427 number of equivalence classes. */ 1428 if (sccstack[first].hash != sccstack[first+1].hash) 1429 firstunique = 0; 1430 for (unsigned i = 1; i < size; ++i) 1431 if (sccstack[first+i-1].hash != sccstack[first+i].hash) 1432 { 1433 classes++; 1434 if (firstunique == -1 1435 && (i == size - 1 1436 || sccstack[first+i+1].hash != sccstack[first+i].hash)) 1437 firstunique = i; 1438 } 1439 1440 /* If we found tree with unique hash; stop the iteration. */ 1441 if (firstunique != -1 1442 /* Also terminate if we run out of iterations or if the number of 1443 equivalence classes is no longer increasing. 1444 For example a cyclic list of trees that are all equivalent will 1445 never have unique entry point; we however do not build such SCCs 1446 in our IL. */ 1447 || classes <= last_classes || iterations > 16) 1448 { 1449 hashval_t scc_hash; 1450 1451 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk 1452 starting from FIRSTUNIQUE to obstain stable order. */ 1453 if (classes != size && firstunique != -1) 1454 { 1455 hash_map <tree, hashval_t> map(size*2); 1456 1457 /* Store hash values into a map, so we can associate them with 1458 reordered SCC. */ 1459 for (unsigned i = 0; i < size; ++i) 1460 map.put (sccstack[first+i].t, sccstack[first+i].hash); 1461 1462 DFS again (ob, sccstack[first+firstunique].t, false, false, true); 1463 gcc_assert (again.sccstack.length () == size); 1464 1465 memcpy (sccstack.address () + first, 1466 again.sccstack.address (), 1467 sizeof (scc_entry) * size); 1468 1469 /* Update hash values of individual members by hashing in the 1470 index within the stable order. This ensures uniqueness. 1471 Also compute the scc_hash by mixing in all hash values in the 1472 stable order we obtained. */ 1473 sccstack[first].hash = *map.get (sccstack[first].t); 1474 scc_hash = sccstack[first].hash; 1475 for (unsigned i = 1; i < size; ++i) 1476 { 1477 sccstack[first+i].hash 1478 = iterative_hash_hashval_t (i, 1479 *map.get (sccstack[first+i].t)); 1480 scc_hash = iterative_hash_hashval_t (scc_hash, 1481 sccstack[first+i].hash); 1482 } 1483 } 1484 /* If we got unique hash values for each tree, then sort already 1485 ensured entry point independent order. Only compute the final 1486 scc hash. 1487 1488 If we failed to find the unique entry point, we go by the same 1489 route. We will eventually introduce unwanted hash conflicts. */ 1490 else 1491 { 1492 scc_hash = sccstack[first].hash; 1493 for (unsigned i = 1; i < size; ++i) 1494 scc_hash = iterative_hash_hashval_t (scc_hash, 1495 sccstack[first+i].hash); 1496 /* We can not 100% guarantee that the hash will not conflict in 1497 in a way so the unique hash is not found. This however 1498 should be extremely rare situation. ICE for now so possible 1499 issues are found and evaulated. */ 1500 gcc_checking_assert (classes == size); 1501 } 1502 1503 /* To avoid conflicts across SCCs iteratively hash the whole SCC 1504 hash into the hash of each of the elements. */ 1505 for (unsigned i = 0; i < size; ++i) 1506 sccstack[first+i].hash 1507 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash); 1508 return scc_hash; 1509 } 1510 1511 last_classes = classes; 1512 iterations++; 1513 1514 /* We failed to identify the entry point; propagate hash values across 1515 the edges. */ 1516 { 1517 hash_map <tree, hashval_t> map(size*2); 1518 for (unsigned i = 0; i < size; ++i) 1519 map.put (sccstack[first+i].t, sccstack[first+i].hash); 1520 1521 for (unsigned i = 0; i < size; i++) 1522 sccstack[first+i].hash = hash_tree (ob->writer_cache, &map, 1523 sccstack[first+i].t); 1524 } 1525 } 1526 while (true); 1527} 1528 1529/* DFS walk EXPR and stream SCCs of tree bodies if they are not 1530 already in the streamer cache. Main routine called for 1531 each visit of EXPR. */ 1532 1533void 1534DFS::DFS_write_tree (struct output_block *ob, sccs *from_state, 1535 tree expr, bool ref_p, bool this_ref_p) 1536{ 1537 /* Handle special cases. */ 1538 if (expr == NULL_TREE) 1539 return; 1540 1541 /* Do not DFS walk into indexable trees. */ 1542 if (this_ref_p && tree_is_indexable (expr)) 1543 return; 1544 1545 /* Check if we already streamed EXPR. */ 1546 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL)) 1547 return; 1548 1549 worklist w; 1550 w.expr = expr; 1551 w.from_state = from_state; 1552 w.cstate = NULL; 1553 w.ref_p = ref_p; 1554 w.this_ref_p = this_ref_p; 1555 worklist_vec.safe_push (w); 1556} 1557 1558 1559/* Emit the physical representation of tree node EXPR to output block 1560 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references 1561 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */ 1562 1563void 1564lto_output_tree (struct output_block *ob, tree expr, 1565 bool ref_p, bool this_ref_p) 1566{ 1567 unsigned ix; 1568 bool existed_p; 1569 1570 if (expr == NULL_TREE) 1571 { 1572 streamer_write_record_start (ob, LTO_null); 1573 return; 1574 } 1575 1576 if (this_ref_p && tree_is_indexable (expr)) 1577 { 1578 lto_output_tree_ref (ob, expr); 1579 return; 1580 } 1581 1582 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix); 1583 if (existed_p) 1584 { 1585 /* If a node has already been streamed out, make sure that 1586 we don't write it more than once. Otherwise, the reader 1587 will instantiate two different nodes for the same object. */ 1588 streamer_write_record_start (ob, LTO_tree_pickle_reference); 1589 streamer_write_uhwi (ob, ix); 1590 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS, 1591 lto_tree_code_to_tag (TREE_CODE (expr))); 1592 lto_stats.num_pickle_refs_output++; 1593 } 1594 else 1595 { 1596 /* This is the first time we see EXPR, write all reachable 1597 trees to OB. */ 1598 static bool in_dfs_walk; 1599 1600 /* Protect against recursion which means disconnect between 1601 what tree edges we walk in the DFS walk and what edges 1602 we stream out. */ 1603 gcc_assert (!in_dfs_walk); 1604 1605 /* Start the DFS walk. */ 1606 /* Save ob state ... */ 1607 /* let's see ... */ 1608 in_dfs_walk = true; 1609 DFS (ob, expr, ref_p, this_ref_p, false); 1610 in_dfs_walk = false; 1611 1612 /* Finally append a reference to the tree we were writing. 1613 ??? If expr ended up as a singleton we could have 1614 inlined it here and avoid outputting a reference. */ 1615 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix); 1616 gcc_assert (existed_p); 1617 streamer_write_record_start (ob, LTO_tree_pickle_reference); 1618 streamer_write_uhwi (ob, ix); 1619 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS, 1620 lto_tree_code_to_tag (TREE_CODE (expr))); 1621 lto_stats.num_pickle_refs_output++; 1622 } 1623} 1624 1625 1626/* Output to OB a list of try/catch handlers starting with FIRST. */ 1627 1628static void 1629output_eh_try_list (struct output_block *ob, eh_catch first) 1630{ 1631 eh_catch n; 1632 1633 for (n = first; n; n = n->next_catch) 1634 { 1635 streamer_write_record_start (ob, LTO_eh_catch); 1636 stream_write_tree (ob, n->type_list, true); 1637 stream_write_tree (ob, n->filter_list, true); 1638 stream_write_tree (ob, n->label, true); 1639 } 1640 1641 streamer_write_record_start (ob, LTO_null); 1642} 1643 1644 1645/* Output EH region R in function FN to OB. CURR_RN is the slot index 1646 that is being emitted in FN->EH->REGION_ARRAY. This is used to 1647 detect EH region sharing. */ 1648 1649static void 1650output_eh_region (struct output_block *ob, eh_region r) 1651{ 1652 enum LTO_tags tag; 1653 1654 if (r == NULL) 1655 { 1656 streamer_write_record_start (ob, LTO_null); 1657 return; 1658 } 1659 1660 if (r->type == ERT_CLEANUP) 1661 tag = LTO_ert_cleanup; 1662 else if (r->type == ERT_TRY) 1663 tag = LTO_ert_try; 1664 else if (r->type == ERT_ALLOWED_EXCEPTIONS) 1665 tag = LTO_ert_allowed_exceptions; 1666 else if (r->type == ERT_MUST_NOT_THROW) 1667 tag = LTO_ert_must_not_throw; 1668 else 1669 gcc_unreachable (); 1670 1671 streamer_write_record_start (ob, tag); 1672 streamer_write_hwi (ob, r->index); 1673 1674 if (r->outer) 1675 streamer_write_hwi (ob, r->outer->index); 1676 else 1677 streamer_write_zero (ob); 1678 1679 if (r->inner) 1680 streamer_write_hwi (ob, r->inner->index); 1681 else 1682 streamer_write_zero (ob); 1683 1684 if (r->next_peer) 1685 streamer_write_hwi (ob, r->next_peer->index); 1686 else 1687 streamer_write_zero (ob); 1688 1689 if (r->type == ERT_TRY) 1690 { 1691 output_eh_try_list (ob, r->u.eh_try.first_catch); 1692 } 1693 else if (r->type == ERT_ALLOWED_EXCEPTIONS) 1694 { 1695 stream_write_tree (ob, r->u.allowed.type_list, true); 1696 stream_write_tree (ob, r->u.allowed.label, true); 1697 streamer_write_uhwi (ob, r->u.allowed.filter); 1698 } 1699 else if (r->type == ERT_MUST_NOT_THROW) 1700 { 1701 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true); 1702 bitpack_d bp = bitpack_create (ob->main_stream); 1703 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc); 1704 streamer_write_bitpack (&bp); 1705 } 1706 1707 if (r->landing_pads) 1708 streamer_write_hwi (ob, r->landing_pads->index); 1709 else 1710 streamer_write_zero (ob); 1711} 1712 1713 1714/* Output landing pad LP to OB. */ 1715 1716static void 1717output_eh_lp (struct output_block *ob, eh_landing_pad lp) 1718{ 1719 if (lp == NULL) 1720 { 1721 streamer_write_record_start (ob, LTO_null); 1722 return; 1723 } 1724 1725 streamer_write_record_start (ob, LTO_eh_landing_pad); 1726 streamer_write_hwi (ob, lp->index); 1727 if (lp->next_lp) 1728 streamer_write_hwi (ob, lp->next_lp->index); 1729 else 1730 streamer_write_zero (ob); 1731 1732 if (lp->region) 1733 streamer_write_hwi (ob, lp->region->index); 1734 else 1735 streamer_write_zero (ob); 1736 1737 stream_write_tree (ob, lp->post_landing_pad, true); 1738} 1739 1740 1741/* Output the existing eh_table to OB. */ 1742 1743static void 1744output_eh_regions (struct output_block *ob, struct function *fn) 1745{ 1746 if (fn->eh && fn->eh->region_tree) 1747 { 1748 unsigned i; 1749 eh_region eh; 1750 eh_landing_pad lp; 1751 tree ttype; 1752 1753 streamer_write_record_start (ob, LTO_eh_table); 1754 1755 /* Emit the index of the root of the EH region tree. */ 1756 streamer_write_hwi (ob, fn->eh->region_tree->index); 1757 1758 /* Emit all the EH regions in the region array. */ 1759 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array)); 1760 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh) 1761 output_eh_region (ob, eh); 1762 1763 /* Emit all landing pads. */ 1764 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array)); 1765 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp) 1766 output_eh_lp (ob, lp); 1767 1768 /* Emit all the runtime type data. */ 1769 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data)); 1770 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype) 1771 stream_write_tree (ob, ttype, true); 1772 1773 /* Emit the table of action chains. */ 1774 if (targetm.arm_eabi_unwinder) 1775 { 1776 tree t; 1777 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi)); 1778 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t) 1779 stream_write_tree (ob, t, true); 1780 } 1781 else 1782 { 1783 uchar c; 1784 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other)); 1785 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c) 1786 streamer_write_char_stream (ob->main_stream, c); 1787 } 1788 } 1789 1790 /* The LTO_null either terminates the record or indicates that there 1791 are no eh_records at all. */ 1792 streamer_write_record_start (ob, LTO_null); 1793} 1794 1795 1796/* Output all of the active ssa names to the ssa_names stream. */ 1797 1798static void 1799output_ssa_names (struct output_block *ob, struct function *fn) 1800{ 1801 unsigned int i, len; 1802 1803 len = vec_safe_length (SSANAMES (fn)); 1804 streamer_write_uhwi (ob, len); 1805 1806 for (i = 1; i < len; i++) 1807 { 1808 tree ptr = (*SSANAMES (fn))[i]; 1809 1810 if (ptr == NULL_TREE 1811 || SSA_NAME_IN_FREE_LIST (ptr) 1812 || virtual_operand_p (ptr)) 1813 continue; 1814 1815 streamer_write_uhwi (ob, i); 1816 streamer_write_char_stream (ob->main_stream, 1817 SSA_NAME_IS_DEFAULT_DEF (ptr)); 1818 if (SSA_NAME_VAR (ptr)) 1819 stream_write_tree (ob, SSA_NAME_VAR (ptr), true); 1820 else 1821 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */ 1822 stream_write_tree (ob, TREE_TYPE (ptr), true); 1823 } 1824 1825 streamer_write_zero (ob); 1826} 1827 1828 1829/* Output a wide-int. */ 1830 1831static void 1832streamer_write_wi (struct output_block *ob, 1833 const widest_int &w) 1834{ 1835 int len = w.get_len (); 1836 1837 streamer_write_uhwi (ob, w.get_precision ()); 1838 streamer_write_uhwi (ob, len); 1839 for (int i = 0; i < len; i++) 1840 streamer_write_hwi (ob, w.elt (i)); 1841} 1842 1843 1844/* Output the cfg. */ 1845 1846static void 1847output_cfg (struct output_block *ob, struct function *fn) 1848{ 1849 struct lto_output_stream *tmp_stream = ob->main_stream; 1850 basic_block bb; 1851 1852 ob->main_stream = ob->cfg_stream; 1853 1854 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST, 1855 profile_status_for_fn (fn)); 1856 1857 /* Output the number of the highest basic block. */ 1858 streamer_write_uhwi (ob, last_basic_block_for_fn (fn)); 1859 1860 FOR_ALL_BB_FN (bb, fn) 1861 { 1862 edge_iterator ei; 1863 edge e; 1864 1865 streamer_write_hwi (ob, bb->index); 1866 1867 /* Output the successors and the edge flags. */ 1868 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs)); 1869 FOR_EACH_EDGE (e, ei, bb->succs) 1870 { 1871 streamer_write_uhwi (ob, e->dest->index); 1872 streamer_write_hwi (ob, e->probability); 1873 streamer_write_gcov_count (ob, e->count); 1874 streamer_write_uhwi (ob, e->flags); 1875 } 1876 } 1877 1878 streamer_write_hwi (ob, -1); 1879 1880 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun); 1881 while (bb->next_bb) 1882 { 1883 streamer_write_hwi (ob, bb->next_bb->index); 1884 bb = bb->next_bb; 1885 } 1886 1887 streamer_write_hwi (ob, -1); 1888 1889 /* ??? The cfgloop interface is tied to cfun. */ 1890 gcc_assert (cfun == fn); 1891 1892 /* Output the number of loops. */ 1893 streamer_write_uhwi (ob, number_of_loops (fn)); 1894 1895 /* Output each loop, skipping the tree root which has number zero. */ 1896 for (unsigned i = 1; i < number_of_loops (fn); ++i) 1897 { 1898 struct loop *loop = get_loop (fn, i); 1899 1900 /* Write the index of the loop header. That's enough to rebuild 1901 the loop tree on the reader side. Stream -1 for an unused 1902 loop entry. */ 1903 if (!loop) 1904 { 1905 streamer_write_hwi (ob, -1); 1906 continue; 1907 } 1908 else 1909 streamer_write_hwi (ob, loop->header->index); 1910 1911 /* Write everything copy_loop_info copies. */ 1912 streamer_write_enum (ob->main_stream, 1913 loop_estimation, EST_LAST, loop->estimate_state); 1914 streamer_write_hwi (ob, loop->any_upper_bound); 1915 if (loop->any_upper_bound) 1916 streamer_write_wi (ob, loop->nb_iterations_upper_bound); 1917 streamer_write_hwi (ob, loop->any_estimate); 1918 if (loop->any_estimate) 1919 streamer_write_wi (ob, loop->nb_iterations_estimate); 1920 1921 /* Write OMP SIMD related info. */ 1922 streamer_write_hwi (ob, loop->safelen); 1923 streamer_write_hwi (ob, loop->dont_vectorize); 1924 streamer_write_hwi (ob, loop->force_vectorize); 1925 stream_write_tree (ob, loop->simduid, true); 1926 } 1927 1928 ob->main_stream = tmp_stream; 1929} 1930 1931 1932/* Create the header in the file using OB. If the section type is for 1933 a function, set FN to the decl for that function. */ 1934 1935void 1936produce_asm (struct output_block *ob, tree fn) 1937{ 1938 enum lto_section_type section_type = ob->section_type; 1939 struct lto_function_header header; 1940 char *section_name; 1941 1942 if (section_type == LTO_section_function_body) 1943 { 1944 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn)); 1945 section_name = lto_get_section_name (section_type, name, NULL); 1946 } 1947 else 1948 section_name = lto_get_section_name (section_type, NULL, NULL); 1949 1950 lto_begin_section (section_name, !flag_wpa); 1951 free (section_name); 1952 1953 /* The entire header is stream computed here. */ 1954 memset (&header, 0, sizeof (struct lto_function_header)); 1955 1956 /* Write the header. */ 1957 header.major_version = LTO_major_version; 1958 header.minor_version = LTO_minor_version; 1959 1960 if (section_type == LTO_section_function_body) 1961 header.cfg_size = ob->cfg_stream->total_size; 1962 header.main_size = ob->main_stream->total_size; 1963 header.string_size = ob->string_stream->total_size; 1964 lto_write_data (&header, sizeof header); 1965 1966 /* Put all of the gimple and the string table out the asm file as a 1967 block of text. */ 1968 if (section_type == LTO_section_function_body) 1969 lto_write_stream (ob->cfg_stream); 1970 lto_write_stream (ob->main_stream); 1971 lto_write_stream (ob->string_stream); 1972 1973 lto_end_section (); 1974} 1975 1976 1977/* Output the base body of struct function FN using output block OB. */ 1978 1979static void 1980output_struct_function_base (struct output_block *ob, struct function *fn) 1981{ 1982 struct bitpack_d bp; 1983 unsigned i; 1984 tree t; 1985 1986 /* Output the static chain and non-local goto save area. */ 1987 stream_write_tree (ob, fn->static_chain_decl, true); 1988 stream_write_tree (ob, fn->nonlocal_goto_save_area, true); 1989 1990 /* Output all the local variables in the function. */ 1991 streamer_write_hwi (ob, vec_safe_length (fn->local_decls)); 1992 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t) 1993 stream_write_tree (ob, t, true); 1994 1995 /* Output current IL state of the function. */ 1996 streamer_write_uhwi (ob, fn->curr_properties); 1997 1998 /* Write all the attributes for FN. */ 1999 bp = bitpack_create (ob->main_stream); 2000 bp_pack_value (&bp, fn->is_thunk, 1); 2001 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1); 2002 bp_pack_value (&bp, fn->returns_pcc_struct, 1); 2003 bp_pack_value (&bp, fn->returns_struct, 1); 2004 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1); 2005 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1); 2006 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1); 2007 bp_pack_value (&bp, fn->after_inlining, 1); 2008 bp_pack_value (&bp, fn->stdarg, 1); 2009 bp_pack_value (&bp, fn->has_nonlocal_label, 1); 2010 bp_pack_value (&bp, fn->calls_alloca, 1); 2011 bp_pack_value (&bp, fn->calls_setjmp, 1); 2012 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1); 2013 bp_pack_value (&bp, fn->has_simduid_loops, 1); 2014 bp_pack_value (&bp, fn->va_list_fpr_size, 8); 2015 bp_pack_value (&bp, fn->va_list_gpr_size, 8); 2016 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8); 2017 2018 /* Output the function start and end loci. */ 2019 stream_output_location (ob, &bp, fn->function_start_locus); 2020 stream_output_location (ob, &bp, fn->function_end_locus); 2021 2022 streamer_write_bitpack (&bp); 2023} 2024 2025 2026/* Output the body of function NODE->DECL. */ 2027 2028static void 2029output_function (struct cgraph_node *node) 2030{ 2031 tree function; 2032 struct function *fn; 2033 basic_block bb; 2034 struct output_block *ob; 2035 2036 function = node->decl; 2037 fn = DECL_STRUCT_FUNCTION (function); 2038 ob = create_output_block (LTO_section_function_body); 2039 2040 clear_line_info (ob); 2041 ob->symbol = node; 2042 2043 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL); 2044 2045 /* Set current_function_decl and cfun. */ 2046 push_cfun (fn); 2047 2048 /* Make string 0 be a NULL string. */ 2049 streamer_write_char_stream (ob->string_stream, 0); 2050 2051 streamer_write_record_start (ob, LTO_function); 2052 2053 /* Output decls for parameters and args. */ 2054 stream_write_tree (ob, DECL_RESULT (function), true); 2055 streamer_write_chain (ob, DECL_ARGUMENTS (function), true); 2056 2057 /* Output DECL_INITIAL for the function, which contains the tree of 2058 lexical scopes. */ 2059 stream_write_tree (ob, DECL_INITIAL (function), true); 2060 2061 /* We also stream abstract functions where we stream only stuff needed for 2062 debug info. */ 2063 if (gimple_has_body_p (function)) 2064 { 2065 streamer_write_uhwi (ob, 1); 2066 output_struct_function_base (ob, fn); 2067 2068 /* Output all the SSA names used in the function. */ 2069 output_ssa_names (ob, fn); 2070 2071 /* Output any exception handling regions. */ 2072 output_eh_regions (ob, fn); 2073 2074 2075 /* We will renumber the statements. The code that does this uses 2076 the same ordering that we use for serializing them so we can use 2077 the same code on the other end and not have to write out the 2078 statement numbers. We do not assign UIDs to PHIs here because 2079 virtual PHIs get re-computed on-the-fly which would make numbers 2080 inconsistent. */ 2081 set_gimple_stmt_max_uid (cfun, 0); 2082 FOR_ALL_BB_FN (bb, cfun) 2083 { 2084 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); 2085 gsi_next (&gsi)) 2086 { 2087 gphi *stmt = gsi.phi (); 2088 2089 /* Virtual PHIs are not going to be streamed. */ 2090 if (!virtual_operand_p (gimple_phi_result (stmt))) 2091 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 2092 } 2093 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); 2094 gsi_next (&gsi)) 2095 { 2096 gimple stmt = gsi_stmt (gsi); 2097 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 2098 } 2099 } 2100 /* To avoid keeping duplicate gimple IDs in the statements, renumber 2101 virtual phis now. */ 2102 FOR_ALL_BB_FN (bb, cfun) 2103 { 2104 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); 2105 gsi_next (&gsi)) 2106 { 2107 gphi *stmt = gsi.phi (); 2108 if (virtual_operand_p (gimple_phi_result (stmt))) 2109 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 2110 } 2111 } 2112 2113 /* Output the code for the function. */ 2114 FOR_ALL_BB_FN (bb, fn) 2115 output_bb (ob, bb, fn); 2116 2117 /* The terminator for this function. */ 2118 streamer_write_record_start (ob, LTO_null); 2119 2120 output_cfg (ob, fn); 2121 2122 pop_cfun (); 2123 } 2124 else 2125 streamer_write_uhwi (ob, 0); 2126 2127 /* Create a section to hold the pickled output of this function. */ 2128 produce_asm (ob, function); 2129 2130 destroy_output_block (ob); 2131} 2132 2133/* Output the body of function NODE->DECL. */ 2134 2135static void 2136output_constructor (struct varpool_node *node) 2137{ 2138 tree var = node->decl; 2139 struct output_block *ob; 2140 2141 ob = create_output_block (LTO_section_function_body); 2142 2143 clear_line_info (ob); 2144 ob->symbol = node; 2145 2146 /* Make string 0 be a NULL string. */ 2147 streamer_write_char_stream (ob->string_stream, 0); 2148 2149 /* Output DECL_INITIAL for the function, which contains the tree of 2150 lexical scopes. */ 2151 stream_write_tree (ob, DECL_INITIAL (var), true); 2152 2153 /* Create a section to hold the pickled output of this function. */ 2154 produce_asm (ob, var); 2155 2156 destroy_output_block (ob); 2157} 2158 2159 2160/* Emit toplevel asms. */ 2161 2162void 2163lto_output_toplevel_asms (void) 2164{ 2165 struct output_block *ob; 2166 struct asm_node *can; 2167 char *section_name; 2168 struct lto_simple_header_with_strings header; 2169 2170 if (!symtab->first_asm_symbol ()) 2171 return; 2172 2173 ob = create_output_block (LTO_section_asm); 2174 2175 /* Make string 0 be a NULL string. */ 2176 streamer_write_char_stream (ob->string_stream, 0); 2177 2178 for (can = symtab->first_asm_symbol (); can; can = can->next) 2179 { 2180 streamer_write_string_cst (ob, ob->main_stream, can->asm_str); 2181 streamer_write_hwi (ob, can->order); 2182 } 2183 2184 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE); 2185 2186 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL); 2187 lto_begin_section (section_name, !flag_wpa); 2188 free (section_name); 2189 2190 /* The entire header stream is computed here. */ 2191 memset (&header, 0, sizeof (header)); 2192 2193 /* Write the header. */ 2194 header.major_version = LTO_major_version; 2195 header.minor_version = LTO_minor_version; 2196 2197 header.main_size = ob->main_stream->total_size; 2198 header.string_size = ob->string_stream->total_size; 2199 lto_write_data (&header, sizeof header); 2200 2201 /* Put all of the gimple and the string table out the asm file as a 2202 block of text. */ 2203 lto_write_stream (ob->main_stream); 2204 lto_write_stream (ob->string_stream); 2205 2206 lto_end_section (); 2207 2208 destroy_output_block (ob); 2209} 2210 2211 2212/* Copy the function body or variable constructor of NODE without deserializing. */ 2213 2214static void 2215copy_function_or_variable (struct symtab_node *node) 2216{ 2217 tree function = node->decl; 2218 struct lto_file_decl_data *file_data = node->lto_file_data; 2219 const char *data; 2220 size_t len; 2221 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function)); 2222 char *section_name = 2223 lto_get_section_name (LTO_section_function_body, name, NULL); 2224 size_t i, j; 2225 struct lto_in_decl_state *in_state; 2226 struct lto_out_decl_state *out_state = lto_get_out_decl_state (); 2227 2228 lto_begin_section (section_name, !flag_wpa); 2229 free (section_name); 2230 2231 /* We may have renamed the declaration, e.g., a static function. */ 2232 name = lto_get_decl_name_mapping (file_data, name); 2233 2234 data = lto_get_section_data (file_data, LTO_section_function_body, 2235 name, &len); 2236 gcc_assert (data); 2237 2238 /* Do a bit copy of the function body. */ 2239 lto_write_data (data, len); 2240 2241 /* Copy decls. */ 2242 in_state = 2243 lto_get_function_in_decl_state (node->lto_file_data, function); 2244 gcc_assert (in_state); 2245 2246 for (i = 0; i < LTO_N_DECL_STREAMS; i++) 2247 { 2248 size_t n = vec_safe_length (in_state->streams[i]); 2249 vec<tree, va_gc> *trees = in_state->streams[i]; 2250 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]); 2251 2252 /* The out state must have the same indices and the in state. 2253 So just copy the vector. All the encoders in the in state 2254 must be empty where we reach here. */ 2255 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0); 2256 encoder->trees.reserve_exact (n); 2257 for (j = 0; j < n; j++) 2258 encoder->trees.safe_push ((*trees)[j]); 2259 } 2260 2261 lto_free_section_data (file_data, LTO_section_function_body, name, 2262 data, len); 2263 lto_end_section (); 2264} 2265 2266/* Wrap symbol references in *TP inside a type-preserving MEM_REF. */ 2267 2268static tree 2269wrap_refs (tree *tp, int *ws, void *) 2270{ 2271 tree t = *tp; 2272 if (handled_component_p (t) 2273 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL) 2274 { 2275 tree decl = TREE_OPERAND (t, 0); 2276 tree ptrtype = build_pointer_type (TREE_TYPE (decl)); 2277 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl), 2278 build1 (ADDR_EXPR, ptrtype, decl), 2279 build_int_cst (ptrtype, 0)); 2280 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl); 2281 *ws = 0; 2282 } 2283 else if (TREE_CODE (t) == CONSTRUCTOR) 2284 ; 2285 else if (!EXPR_P (t)) 2286 *ws = 0; 2287 return NULL_TREE; 2288} 2289 2290/* Main entry point from the pass manager. */ 2291 2292void 2293lto_output (void) 2294{ 2295 struct lto_out_decl_state *decl_state; 2296#ifdef ENABLE_CHECKING 2297 bitmap output = lto_bitmap_alloc (); 2298#endif 2299 int i, n_nodes; 2300 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder; 2301 2302 /* Initialize the streamer. */ 2303 lto_streamer_init (); 2304 2305 n_nodes = lto_symtab_encoder_size (encoder); 2306 /* Process only the functions with bodies. */ 2307 for (i = 0; i < n_nodes; i++) 2308 { 2309 symtab_node *snode = lto_symtab_encoder_deref (encoder, i); 2310 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode)) 2311 { 2312 if (lto_symtab_encoder_encode_body_p (encoder, node) 2313 && !node->alias) 2314 { 2315#ifdef ENABLE_CHECKING 2316 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl))); 2317 bitmap_set_bit (output, DECL_UID (node->decl)); 2318#endif 2319 decl_state = lto_new_out_decl_state (); 2320 lto_push_out_decl_state (decl_state); 2321 if (gimple_has_body_p (node->decl) || !flag_wpa 2322 /* Thunks have no body but they may be synthetized 2323 at WPA time. */ 2324 || DECL_ARGUMENTS (node->decl)) 2325 output_function (node); 2326 else 2327 copy_function_or_variable (node); 2328 gcc_assert (lto_get_out_decl_state () == decl_state); 2329 lto_pop_out_decl_state (); 2330 lto_record_function_out_decl_state (node->decl, decl_state); 2331 } 2332 } 2333 else if (varpool_node *node = dyn_cast <varpool_node *> (snode)) 2334 { 2335 /* Wrap symbol references inside the ctor in a type 2336 preserving MEM_REF. */ 2337 tree ctor = DECL_INITIAL (node->decl); 2338 if (ctor && !in_lto_p) 2339 walk_tree (&ctor, wrap_refs, NULL, NULL); 2340 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node 2341 && lto_symtab_encoder_encode_initializer_p (encoder, node) 2342 && !node->alias) 2343 { 2344 timevar_push (TV_IPA_LTO_CTORS_OUT); 2345#ifdef ENABLE_CHECKING 2346 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl))); 2347 bitmap_set_bit (output, DECL_UID (node->decl)); 2348#endif 2349 decl_state = lto_new_out_decl_state (); 2350 lto_push_out_decl_state (decl_state); 2351 if (DECL_INITIAL (node->decl) != error_mark_node 2352 || !flag_wpa) 2353 output_constructor (node); 2354 else 2355 copy_function_or_variable (node); 2356 gcc_assert (lto_get_out_decl_state () == decl_state); 2357 lto_pop_out_decl_state (); 2358 lto_record_function_out_decl_state (node->decl, decl_state); 2359 timevar_pop (TV_IPA_LTO_CTORS_OUT); 2360 } 2361 } 2362 } 2363 2364 /* Emit the callgraph after emitting function bodies. This needs to 2365 be done now to make sure that all the statements in every function 2366 have been renumbered so that edges can be associated with call 2367 statements using the statement UIDs. */ 2368 output_symtab (); 2369 2370 output_offload_tables (); 2371 2372#ifdef ENABLE_CHECKING 2373 lto_bitmap_free (output); 2374#endif 2375} 2376 2377/* Write each node in encoded by ENCODER to OB, as well as those reachable 2378 from it and required for correct representation of its semantics. 2379 Each node in ENCODER must be a global declaration or a type. A node 2380 is written only once, even if it appears multiple times in the 2381 vector. Certain transitively-reachable nodes, such as those 2382 representing expressions, may be duplicated, but such nodes 2383 must not appear in ENCODER itself. */ 2384 2385static void 2386write_global_stream (struct output_block *ob, 2387 struct lto_tree_ref_encoder *encoder) 2388{ 2389 tree t; 2390 size_t index; 2391 const size_t size = lto_tree_ref_encoder_size (encoder); 2392 2393 for (index = 0; index < size; index++) 2394 { 2395 t = lto_tree_ref_encoder_get_tree (encoder, index); 2396 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL)) 2397 stream_write_tree (ob, t, false); 2398 } 2399} 2400 2401 2402/* Write a sequence of indices into the globals vector corresponding 2403 to the trees in ENCODER. These are used by the reader to map the 2404 indices used to refer to global entities within function bodies to 2405 their referents. */ 2406 2407static void 2408write_global_references (struct output_block *ob, 2409 struct lto_tree_ref_encoder *encoder) 2410{ 2411 tree t; 2412 uint32_t index; 2413 const uint32_t size = lto_tree_ref_encoder_size (encoder); 2414 2415 /* Write size and slot indexes as 32-bit unsigned numbers. */ 2416 uint32_t *data = XNEWVEC (uint32_t, size + 1); 2417 data[0] = size; 2418 2419 for (index = 0; index < size; index++) 2420 { 2421 uint32_t slot_num; 2422 2423 t = lto_tree_ref_encoder_get_tree (encoder, index); 2424 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num); 2425 gcc_assert (slot_num != (unsigned)-1); 2426 data[index + 1] = slot_num; 2427 } 2428 2429 lto_write_data (data, sizeof (int32_t) * (size + 1)); 2430 free (data); 2431} 2432 2433 2434/* Write all the streams in an lto_out_decl_state STATE using 2435 output block OB and output stream OUT_STREAM. */ 2436 2437void 2438lto_output_decl_state_streams (struct output_block *ob, 2439 struct lto_out_decl_state *state) 2440{ 2441 int i; 2442 2443 for (i = 0; i < LTO_N_DECL_STREAMS; i++) 2444 write_global_stream (ob, &state->streams[i]); 2445} 2446 2447 2448/* Write all the references in an lto_out_decl_state STATE using 2449 output block OB and output stream OUT_STREAM. */ 2450 2451void 2452lto_output_decl_state_refs (struct output_block *ob, 2453 struct lto_out_decl_state *state) 2454{ 2455 unsigned i; 2456 uint32_t ref; 2457 tree decl; 2458 2459 /* Write reference to FUNCTION_DECL. If there is not function, 2460 write reference to void_type_node. */ 2461 decl = (state->fn_decl) ? state->fn_decl : void_type_node; 2462 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref); 2463 gcc_assert (ref != (unsigned)-1); 2464 lto_write_data (&ref, sizeof (uint32_t)); 2465 2466 for (i = 0; i < LTO_N_DECL_STREAMS; i++) 2467 write_global_references (ob, &state->streams[i]); 2468} 2469 2470 2471/* Return the written size of STATE. */ 2472 2473static size_t 2474lto_out_decl_state_written_size (struct lto_out_decl_state *state) 2475{ 2476 int i; 2477 size_t size; 2478 2479 size = sizeof (int32_t); /* fn_ref. */ 2480 for (i = 0; i < LTO_N_DECL_STREAMS; i++) 2481 { 2482 size += sizeof (int32_t); /* vector size. */ 2483 size += (lto_tree_ref_encoder_size (&state->streams[i]) 2484 * sizeof (int32_t)); 2485 } 2486 return size; 2487} 2488 2489 2490/* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote 2491 so far. */ 2492 2493static void 2494write_symbol (struct streamer_tree_cache_d *cache, 2495 tree t, hash_set<const char *> *seen, bool alias) 2496{ 2497 const char *name; 2498 enum gcc_plugin_symbol_kind kind; 2499 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT; 2500 unsigned slot_num; 2501 uint64_t size; 2502 const char *comdat; 2503 unsigned char c; 2504 2505 /* None of the following kinds of symbols are needed in the 2506 symbol table. */ 2507 if (!TREE_PUBLIC (t) 2508 || is_builtin_fn (t) 2509 || DECL_ABSTRACT_P (t) 2510 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))) 2511 return; 2512 gcc_assert (TREE_CODE (t) != RESULT_DECL); 2513 2514 gcc_assert (TREE_CODE (t) == VAR_DECL 2515 || TREE_CODE (t) == FUNCTION_DECL); 2516 2517 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t)); 2518 2519 /* This behaves like assemble_name_raw in varasm.c, performing the 2520 same name manipulations that ASM_OUTPUT_LABELREF does. */ 2521 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name)); 2522 2523 if (seen->add (name)) 2524 return; 2525 2526 streamer_tree_cache_lookup (cache, t, &slot_num); 2527 gcc_assert (slot_num != (unsigned)-1); 2528 2529 if (DECL_EXTERNAL (t)) 2530 { 2531 if (DECL_WEAK (t)) 2532 kind = GCCPK_WEAKUNDEF; 2533 else 2534 kind = GCCPK_UNDEF; 2535 } 2536 else 2537 { 2538 if (DECL_WEAK (t)) 2539 kind = GCCPK_WEAKDEF; 2540 else if (DECL_COMMON (t)) 2541 kind = GCCPK_COMMON; 2542 else 2543 kind = GCCPK_DEF; 2544 2545 /* When something is defined, it should have node attached. */ 2546 gcc_assert (alias || TREE_CODE (t) != VAR_DECL 2547 || varpool_node::get (t)->definition); 2548 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL 2549 || (cgraph_node::get (t) 2550 && cgraph_node::get (t)->definition)); 2551 } 2552 2553 /* Imitate what default_elf_asm_output_external do. 2554 When symbol is external, we need to output it with DEFAULT visibility 2555 when compiling with -fvisibility=default, while with HIDDEN visibility 2556 when symbol has attribute (visibility("hidden")) specified. 2557 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this 2558 right. */ 2559 2560 if (DECL_EXTERNAL (t) 2561 && !targetm.binds_local_p (t)) 2562 visibility = GCCPV_DEFAULT; 2563 else 2564 switch (DECL_VISIBILITY (t)) 2565 { 2566 case VISIBILITY_DEFAULT: 2567 visibility = GCCPV_DEFAULT; 2568 break; 2569 case VISIBILITY_PROTECTED: 2570 visibility = GCCPV_PROTECTED; 2571 break; 2572 case VISIBILITY_HIDDEN: 2573 visibility = GCCPV_HIDDEN; 2574 break; 2575 case VISIBILITY_INTERNAL: 2576 visibility = GCCPV_INTERNAL; 2577 break; 2578 } 2579 2580 if (kind == GCCPK_COMMON 2581 && DECL_SIZE_UNIT (t) 2582 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST) 2583 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t)); 2584 else 2585 size = 0; 2586 2587 if (DECL_ONE_ONLY (t)) 2588 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t)); 2589 else 2590 comdat = ""; 2591 2592 lto_write_data (name, strlen (name) + 1); 2593 lto_write_data (comdat, strlen (comdat) + 1); 2594 c = (unsigned char) kind; 2595 lto_write_data (&c, 1); 2596 c = (unsigned char) visibility; 2597 lto_write_data (&c, 1); 2598 lto_write_data (&size, 8); 2599 lto_write_data (&slot_num, 4); 2600} 2601 2602/* Return true if NODE should appear in the plugin symbol table. */ 2603 2604bool 2605output_symbol_p (symtab_node *node) 2606{ 2607 struct cgraph_node *cnode; 2608 if (!node->real_symbol_p ()) 2609 return false; 2610 /* We keep external functions in symtab for sake of inlining 2611 and devirtualization. We do not want to see them in symbol table as 2612 references unless they are really used. */ 2613 cnode = dyn_cast <cgraph_node *> (node); 2614 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl)) 2615 && cnode->callers) 2616 return true; 2617 2618 /* Ignore all references from external vars initializers - they are not really 2619 part of the compilation unit until they are used by folding. Some symbols, 2620 like references to external construction vtables can not be referred to at all. 2621 We decide this at can_refer_decl_in_current_unit_p. */ 2622 if (!node->definition || DECL_EXTERNAL (node->decl)) 2623 { 2624 int i; 2625 struct ipa_ref *ref; 2626 for (i = 0; node->iterate_referring (i, ref); i++) 2627 { 2628 if (ref->use == IPA_REF_ALIAS) 2629 continue; 2630 if (is_a <cgraph_node *> (ref->referring)) 2631 return true; 2632 if (!DECL_EXTERNAL (ref->referring->decl)) 2633 return true; 2634 } 2635 return false; 2636 } 2637 return true; 2638} 2639 2640 2641/* Write an IL symbol table to OB. 2642 SET and VSET are cgraph/varpool node sets we are outputting. */ 2643 2644static void 2645produce_symtab (struct output_block *ob) 2646{ 2647 struct streamer_tree_cache_d *cache = ob->writer_cache; 2648 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL); 2649 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder; 2650 lto_symtab_encoder_iterator lsei; 2651 2652 lto_begin_section (section_name, false); 2653 free (section_name); 2654 2655 hash_set<const char *> seen; 2656 2657 /* Write the symbol table. 2658 First write everything defined and then all declarations. 2659 This is necessary to handle cases where we have duplicated symbols. */ 2660 for (lsei = lsei_start (encoder); 2661 !lsei_end_p (lsei); lsei_next (&lsei)) 2662 { 2663 symtab_node *node = lsei_node (lsei); 2664 2665 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl)) 2666 continue; 2667 write_symbol (cache, node->decl, &seen, false); 2668 } 2669 for (lsei = lsei_start (encoder); 2670 !lsei_end_p (lsei); lsei_next (&lsei)) 2671 { 2672 symtab_node *node = lsei_node (lsei); 2673 2674 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl)) 2675 continue; 2676 write_symbol (cache, node->decl, &seen, false); 2677 } 2678 2679 lto_end_section (); 2680} 2681 2682 2683/* Init the streamer_mode_table for output, where we collect info on what 2684 machine_mode values have been streamed. */ 2685void 2686lto_output_init_mode_table (void) 2687{ 2688 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE); 2689} 2690 2691 2692/* Write the mode table. */ 2693static void 2694lto_write_mode_table (void) 2695{ 2696 struct output_block *ob; 2697 ob = create_output_block (LTO_section_mode_table); 2698 bitpack_d bp = bitpack_create (ob->main_stream); 2699 2700 /* Ensure that for GET_MODE_INNER (m) != VOIDmode we have 2701 also the inner mode marked. */ 2702 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++) 2703 if (streamer_mode_table[i]) 2704 { 2705 machine_mode m = (machine_mode) i; 2706 if (GET_MODE_INNER (m) != VOIDmode) 2707 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1; 2708 } 2709 /* First stream modes that have GET_MODE_INNER (m) == VOIDmode, 2710 so that we can refer to them afterwards. */ 2711 for (int pass = 0; pass < 2; pass++) 2712 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++) 2713 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode) 2714 { 2715 machine_mode m = (machine_mode) i; 2716 if ((GET_MODE_INNER (m) == VOIDmode) ^ (pass == 0)) 2717 continue; 2718 bp_pack_value (&bp, m, 8); 2719 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m)); 2720 bp_pack_value (&bp, GET_MODE_SIZE (m), 8); 2721 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16); 2722 bp_pack_value (&bp, GET_MODE_INNER (m), 8); 2723 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8); 2724 switch (GET_MODE_CLASS (m)) 2725 { 2726 case MODE_FRACT: 2727 case MODE_UFRACT: 2728 case MODE_ACCUM: 2729 case MODE_UACCUM: 2730 bp_pack_value (&bp, GET_MODE_IBIT (m), 8); 2731 bp_pack_value (&bp, GET_MODE_FBIT (m), 8); 2732 break; 2733 case MODE_FLOAT: 2734 case MODE_DECIMAL_FLOAT: 2735 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true); 2736 break; 2737 default: 2738 break; 2739 } 2740 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true); 2741 } 2742 bp_pack_value (&bp, VOIDmode, 8); 2743 2744 streamer_write_bitpack (&bp); 2745 2746 char *section_name 2747 = lto_get_section_name (LTO_section_mode_table, NULL, NULL); 2748 lto_begin_section (section_name, !flag_wpa); 2749 free (section_name); 2750 2751 /* The entire header stream is computed here. */ 2752 struct lto_simple_header_with_strings header; 2753 memset (&header, 0, sizeof (header)); 2754 2755 /* Write the header. */ 2756 header.major_version = LTO_major_version; 2757 header.minor_version = LTO_minor_version; 2758 2759 header.main_size = ob->main_stream->total_size; 2760 header.string_size = ob->string_stream->total_size; 2761 lto_write_data (&header, sizeof header); 2762 2763 /* Put all of the gimple and the string table out the asm file as a 2764 block of text. */ 2765 lto_write_stream (ob->main_stream); 2766 lto_write_stream (ob->string_stream); 2767 2768 lto_end_section (); 2769 destroy_output_block (ob); 2770} 2771 2772 2773/* This pass is run after all of the functions are serialized and all 2774 of the IPA passes have written their serialized forms. This pass 2775 causes the vector of all of the global decls and types used from 2776 this file to be written in to a section that can then be read in to 2777 recover these on other side. */ 2778 2779void 2780produce_asm_for_decls (void) 2781{ 2782 struct lto_out_decl_state *out_state; 2783 struct lto_out_decl_state *fn_out_state; 2784 struct lto_decl_header header; 2785 char *section_name; 2786 struct output_block *ob; 2787 unsigned idx, num_fns; 2788 size_t decl_state_size; 2789 int32_t num_decl_states; 2790 2791 ob = create_output_block (LTO_section_decls); 2792 2793 memset (&header, 0, sizeof (struct lto_decl_header)); 2794 2795 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL); 2796 lto_begin_section (section_name, !flag_wpa); 2797 free (section_name); 2798 2799 /* Make string 0 be a NULL string. */ 2800 streamer_write_char_stream (ob->string_stream, 0); 2801 2802 gcc_assert (!alias_pairs); 2803 2804 /* Get rid of the global decl state hash tables to save some memory. */ 2805 out_state = lto_get_out_decl_state (); 2806 for (int i = 0; i < LTO_N_DECL_STREAMS; i++) 2807 if (out_state->streams[i].tree_hash_table) 2808 { 2809 delete out_state->streams[i].tree_hash_table; 2810 out_state->streams[i].tree_hash_table = NULL; 2811 } 2812 2813 /* Write the global symbols. */ 2814 lto_output_decl_state_streams (ob, out_state); 2815 num_fns = lto_function_decl_states.length (); 2816 for (idx = 0; idx < num_fns; idx++) 2817 { 2818 fn_out_state = 2819 lto_function_decl_states[idx]; 2820 lto_output_decl_state_streams (ob, fn_out_state); 2821 } 2822 2823 header.major_version = LTO_major_version; 2824 header.minor_version = LTO_minor_version; 2825 2826 /* Currently not used. This field would allow us to preallocate 2827 the globals vector, so that it need not be resized as it is extended. */ 2828 header.num_nodes = -1; 2829 2830 /* Compute the total size of all decl out states. */ 2831 decl_state_size = sizeof (int32_t); 2832 decl_state_size += lto_out_decl_state_written_size (out_state); 2833 for (idx = 0; idx < num_fns; idx++) 2834 { 2835 fn_out_state = 2836 lto_function_decl_states[idx]; 2837 decl_state_size += lto_out_decl_state_written_size (fn_out_state); 2838 } 2839 header.decl_state_size = decl_state_size; 2840 2841 header.main_size = ob->main_stream->total_size; 2842 header.string_size = ob->string_stream->total_size; 2843 2844 lto_write_data (&header, sizeof header); 2845 2846 /* Write the main out-decl state, followed by out-decl states of 2847 functions. */ 2848 num_decl_states = num_fns + 1; 2849 lto_write_data (&num_decl_states, sizeof (num_decl_states)); 2850 lto_output_decl_state_refs (ob, out_state); 2851 for (idx = 0; idx < num_fns; idx++) 2852 { 2853 fn_out_state = lto_function_decl_states[idx]; 2854 lto_output_decl_state_refs (ob, fn_out_state); 2855 } 2856 2857 lto_write_stream (ob->main_stream); 2858 lto_write_stream (ob->string_stream); 2859 2860 lto_end_section (); 2861 2862 /* Write the symbol table. It is used by linker to determine dependencies 2863 and thus we can skip it for WPA. */ 2864 if (!flag_wpa) 2865 produce_symtab (ob); 2866 2867 /* Write command line opts. */ 2868 lto_write_options (); 2869 2870 /* Deallocate memory and clean up. */ 2871 for (idx = 0; idx < num_fns; idx++) 2872 { 2873 fn_out_state = 2874 lto_function_decl_states[idx]; 2875 lto_delete_out_decl_state (fn_out_state); 2876 } 2877 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder); 2878 lto_function_decl_states.release (); 2879 destroy_output_block (ob); 2880 if (lto_stream_offload_p) 2881 lto_write_mode_table (); 2882} 2883