1/* SSA-PRE for trees. 2 Copyright (C) 2001-2015 Free Software Foundation, Inc. 3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher 4 <stevenb@suse.de> 5 6This file is part of GCC. 7 8GCC is free software; you can redistribute it and/or modify 9it under the terms of the GNU General Public License as published by 10the Free Software Foundation; either version 3, or (at your option) 11any later version. 12 13GCC is distributed in the hope that it will be useful, 14but WITHOUT ANY WARRANTY; without even the implied warranty of 15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16GNU General Public License for more details. 17 18You should have received a copy of the GNU General Public License 19along with GCC; see the file COPYING3. If not see 20<http://www.gnu.org/licenses/>. */ 21 22#include "config.h" 23#include "system.h" 24#include "coretypes.h" 25#include "tm.h" 26#include "hash-set.h" 27#include "machmode.h" 28#include "vec.h" 29#include "double-int.h" 30#include "input.h" 31#include "alias.h" 32#include "symtab.h" 33#include "wide-int.h" 34#include "inchash.h" 35#include "tree.h" 36#include "fold-const.h" 37#include "predict.h" 38#include "hard-reg-set.h" 39#include "function.h" 40#include "dominance.h" 41#include "cfg.h" 42#include "cfganal.h" 43#include "basic-block.h" 44#include "gimple-pretty-print.h" 45#include "tree-inline.h" 46#include "hash-table.h" 47#include "tree-ssa-alias.h" 48#include "internal-fn.h" 49#include "gimple-fold.h" 50#include "tree-eh.h" 51#include "gimple-expr.h" 52#include "is-a.h" 53#include "gimple.h" 54#include "gimplify.h" 55#include "gimple-iterator.h" 56#include "gimplify-me.h" 57#include "gimple-ssa.h" 58#include "tree-cfg.h" 59#include "tree-phinodes.h" 60#include "ssa-iterators.h" 61#include "stringpool.h" 62#include "tree-ssanames.h" 63#include "tree-ssa-loop.h" 64#include "tree-into-ssa.h" 65#include "hashtab.h" 66#include "rtl.h" 67#include "flags.h" 68#include "statistics.h" 69#include "real.h" 70#include "fixed-value.h" 71#include "insn-config.h" 72#include "expmed.h" 73#include "dojump.h" 74#include "explow.h" 75#include "calls.h" 76#include "emit-rtl.h" 77#include "varasm.h" 78#include "stmt.h" 79#include "expr.h" 80#include "tree-dfa.h" 81#include "tree-ssa.h" 82#include "tree-iterator.h" 83#include "alloc-pool.h" 84#include "obstack.h" 85#include "tree-pass.h" 86#include "langhooks.h" 87#include "cfgloop.h" 88#include "tree-ssa-sccvn.h" 89#include "tree-scalar-evolution.h" 90#include "params.h" 91#include "dbgcnt.h" 92#include "domwalk.h" 93#include "hash-map.h" 94#include "plugin-api.h" 95#include "ipa-ref.h" 96#include "cgraph.h" 97#include "symbol-summary.h" 98#include "ipa-prop.h" 99#include "tree-ssa-propagate.h" 100#include "ipa-utils.h" 101#include "tree-cfgcleanup.h" 102 103/* TODO: 104 105 1. Avail sets can be shared by making an avail_find_leader that 106 walks up the dominator tree and looks in those avail sets. 107 This might affect code optimality, it's unclear right now. 108 2. Strength reduction can be performed by anticipating expressions 109 we can repair later on. 110 3. We can do back-substitution or smarter value numbering to catch 111 commutative expressions split up over multiple statements. 112*/ 113 114/* For ease of terminology, "expression node" in the below refers to 115 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs 116 represent the actual statement containing the expressions we care about, 117 and we cache the value number by putting it in the expression. */ 118 119/* Basic algorithm 120 121 First we walk the statements to generate the AVAIL sets, the 122 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the 123 generation of values/expressions by a given block. We use them 124 when computing the ANTIC sets. The AVAIL sets consist of 125 SSA_NAME's that represent values, so we know what values are 126 available in what blocks. AVAIL is a forward dataflow problem. In 127 SSA, values are never killed, so we don't need a kill set, or a 128 fixpoint iteration, in order to calculate the AVAIL sets. In 129 traditional parlance, AVAIL sets tell us the downsafety of the 130 expressions/values. 131 132 Next, we generate the ANTIC sets. These sets represent the 133 anticipatable expressions. ANTIC is a backwards dataflow 134 problem. An expression is anticipatable in a given block if it could 135 be generated in that block. This means that if we had to perform 136 an insertion in that block, of the value of that expression, we 137 could. Calculating the ANTIC sets requires phi translation of 138 expressions, because the flow goes backwards through phis. We must 139 iterate to a fixpoint of the ANTIC sets, because we have a kill 140 set. Even in SSA form, values are not live over the entire 141 function, only from their definition point onwards. So we have to 142 remove values from the ANTIC set once we go past the definition 143 point of the leaders that make them up. 144 compute_antic/compute_antic_aux performs this computation. 145 146 Third, we perform insertions to make partially redundant 147 expressions fully redundant. 148 149 An expression is partially redundant (excluding partial 150 anticipation) if: 151 152 1. It is AVAIL in some, but not all, of the predecessors of a 153 given block. 154 2. It is ANTIC in all the predecessors. 155 156 In order to make it fully redundant, we insert the expression into 157 the predecessors where it is not available, but is ANTIC. 158 159 For the partial anticipation case, we only perform insertion if it 160 is partially anticipated in some block, and fully available in all 161 of the predecessors. 162 163 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion 164 performs these steps. 165 166 Fourth, we eliminate fully redundant expressions. 167 This is a simple statement walk that replaces redundant 168 calculations with the now available values. */ 169 170/* Representations of value numbers: 171 172 Value numbers are represented by a representative SSA_NAME. We 173 will create fake SSA_NAME's in situations where we need a 174 representative but do not have one (because it is a complex 175 expression). In order to facilitate storing the value numbers in 176 bitmaps, and keep the number of wasted SSA_NAME's down, we also 177 associate a value_id with each value number, and create full blown 178 ssa_name's only where we actually need them (IE in operands of 179 existing expressions). 180 181 Theoretically you could replace all the value_id's with 182 SSA_NAME_VERSION, but this would allocate a large number of 183 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number. 184 It would also require an additional indirection at each point we 185 use the value id. */ 186 187/* Representation of expressions on value numbers: 188 189 Expressions consisting of value numbers are represented the same 190 way as our VN internally represents them, with an additional 191 "pre_expr" wrapping around them in order to facilitate storing all 192 of the expressions in the same sets. */ 193 194/* Representation of sets: 195 196 The dataflow sets do not need to be sorted in any particular order 197 for the majority of their lifetime, are simply represented as two 198 bitmaps, one that keeps track of values present in the set, and one 199 that keeps track of expressions present in the set. 200 201 When we need them in topological order, we produce it on demand by 202 transforming the bitmap into an array and sorting it into topo 203 order. */ 204 205/* Type of expression, used to know which member of the PRE_EXPR union 206 is valid. */ 207 208enum pre_expr_kind 209{ 210 NAME, 211 NARY, 212 REFERENCE, 213 CONSTANT 214}; 215 216typedef union pre_expr_union_d 217{ 218 tree name; 219 tree constant; 220 vn_nary_op_t nary; 221 vn_reference_t reference; 222} pre_expr_union; 223 224typedef struct pre_expr_d : typed_noop_remove <pre_expr_d> 225{ 226 enum pre_expr_kind kind; 227 unsigned int id; 228 pre_expr_union u; 229 230 /* hash_table support. */ 231 typedef pre_expr_d value_type; 232 typedef pre_expr_d compare_type; 233 static inline hashval_t hash (const pre_expr_d *); 234 static inline int equal (const pre_expr_d *, const pre_expr_d *); 235} *pre_expr; 236 237#define PRE_EXPR_NAME(e) (e)->u.name 238#define PRE_EXPR_NARY(e) (e)->u.nary 239#define PRE_EXPR_REFERENCE(e) (e)->u.reference 240#define PRE_EXPR_CONSTANT(e) (e)->u.constant 241 242/* Compare E1 and E1 for equality. */ 243 244inline int 245pre_expr_d::equal (const value_type *e1, const compare_type *e2) 246{ 247 if (e1->kind != e2->kind) 248 return false; 249 250 switch (e1->kind) 251 { 252 case CONSTANT: 253 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1), 254 PRE_EXPR_CONSTANT (e2)); 255 case NAME: 256 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2); 257 case NARY: 258 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2)); 259 case REFERENCE: 260 return vn_reference_eq (PRE_EXPR_REFERENCE (e1), 261 PRE_EXPR_REFERENCE (e2)); 262 default: 263 gcc_unreachable (); 264 } 265} 266 267/* Hash E. */ 268 269inline hashval_t 270pre_expr_d::hash (const value_type *e) 271{ 272 switch (e->kind) 273 { 274 case CONSTANT: 275 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e)); 276 case NAME: 277 return SSA_NAME_VERSION (PRE_EXPR_NAME (e)); 278 case NARY: 279 return PRE_EXPR_NARY (e)->hashcode; 280 case REFERENCE: 281 return PRE_EXPR_REFERENCE (e)->hashcode; 282 default: 283 gcc_unreachable (); 284 } 285} 286 287/* Next global expression id number. */ 288static unsigned int next_expression_id; 289 290/* Mapping from expression to id number we can use in bitmap sets. */ 291static vec<pre_expr> expressions; 292static hash_table<pre_expr_d> *expression_to_id; 293static vec<unsigned> name_to_id; 294 295/* Allocate an expression id for EXPR. */ 296 297static inline unsigned int 298alloc_expression_id (pre_expr expr) 299{ 300 struct pre_expr_d **slot; 301 /* Make sure we won't overflow. */ 302 gcc_assert (next_expression_id + 1 > next_expression_id); 303 expr->id = next_expression_id++; 304 expressions.safe_push (expr); 305 if (expr->kind == NAME) 306 { 307 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr)); 308 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent 309 re-allocations by using vec::reserve upfront. */ 310 unsigned old_len = name_to_id.length (); 311 name_to_id.reserve (num_ssa_names - old_len); 312 name_to_id.quick_grow_cleared (num_ssa_names); 313 gcc_assert (name_to_id[version] == 0); 314 name_to_id[version] = expr->id; 315 } 316 else 317 { 318 slot = expression_to_id->find_slot (expr, INSERT); 319 gcc_assert (!*slot); 320 *slot = expr; 321 } 322 return next_expression_id - 1; 323} 324 325/* Return the expression id for tree EXPR. */ 326 327static inline unsigned int 328get_expression_id (const pre_expr expr) 329{ 330 return expr->id; 331} 332 333static inline unsigned int 334lookup_expression_id (const pre_expr expr) 335{ 336 struct pre_expr_d **slot; 337 338 if (expr->kind == NAME) 339 { 340 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr)); 341 if (name_to_id.length () <= version) 342 return 0; 343 return name_to_id[version]; 344 } 345 else 346 { 347 slot = expression_to_id->find_slot (expr, NO_INSERT); 348 if (!slot) 349 return 0; 350 return ((pre_expr)*slot)->id; 351 } 352} 353 354/* Return the existing expression id for EXPR, or create one if one 355 does not exist yet. */ 356 357static inline unsigned int 358get_or_alloc_expression_id (pre_expr expr) 359{ 360 unsigned int id = lookup_expression_id (expr); 361 if (id == 0) 362 return alloc_expression_id (expr); 363 return expr->id = id; 364} 365 366/* Return the expression that has expression id ID */ 367 368static inline pre_expr 369expression_for_id (unsigned int id) 370{ 371 return expressions[id]; 372} 373 374/* Free the expression id field in all of our expressions, 375 and then destroy the expressions array. */ 376 377static void 378clear_expression_ids (void) 379{ 380 expressions.release (); 381} 382 383static alloc_pool pre_expr_pool; 384 385/* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */ 386 387static pre_expr 388get_or_alloc_expr_for_name (tree name) 389{ 390 struct pre_expr_d expr; 391 pre_expr result; 392 unsigned int result_id; 393 394 expr.kind = NAME; 395 expr.id = 0; 396 PRE_EXPR_NAME (&expr) = name; 397 result_id = lookup_expression_id (&expr); 398 if (result_id != 0) 399 return expression_for_id (result_id); 400 401 result = (pre_expr) pool_alloc (pre_expr_pool); 402 result->kind = NAME; 403 PRE_EXPR_NAME (result) = name; 404 alloc_expression_id (result); 405 return result; 406} 407 408/* An unordered bitmap set. One bitmap tracks values, the other, 409 expressions. */ 410typedef struct bitmap_set 411{ 412 bitmap_head expressions; 413 bitmap_head values; 414} *bitmap_set_t; 415 416#define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \ 417 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi)) 418 419#define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \ 420 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi)) 421 422/* Mapping from value id to expressions with that value_id. */ 423static vec<bitmap> value_expressions; 424 425/* Sets that we need to keep track of. */ 426typedef struct bb_bitmap_sets 427{ 428 /* The EXP_GEN set, which represents expressions/values generated in 429 a basic block. */ 430 bitmap_set_t exp_gen; 431 432 /* The PHI_GEN set, which represents PHI results generated in a 433 basic block. */ 434 bitmap_set_t phi_gen; 435 436 /* The TMP_GEN set, which represents results/temporaries generated 437 in a basic block. IE the LHS of an expression. */ 438 bitmap_set_t tmp_gen; 439 440 /* The AVAIL_OUT set, which represents which values are available in 441 a given basic block. */ 442 bitmap_set_t avail_out; 443 444 /* The ANTIC_IN set, which represents which values are anticipatable 445 in a given basic block. */ 446 bitmap_set_t antic_in; 447 448 /* The PA_IN set, which represents which values are 449 partially anticipatable in a given basic block. */ 450 bitmap_set_t pa_in; 451 452 /* The NEW_SETS set, which is used during insertion to augment the 453 AVAIL_OUT set of blocks with the new insertions performed during 454 the current iteration. */ 455 bitmap_set_t new_sets; 456 457 /* A cache for value_dies_in_block_x. */ 458 bitmap expr_dies; 459 460 /* The live virtual operand on successor edges. */ 461 tree vop_on_exit; 462 463 /* True if we have visited this block during ANTIC calculation. */ 464 unsigned int visited : 1; 465 466 /* True when the block contains a call that might not return. */ 467 unsigned int contains_may_not_return_call : 1; 468} *bb_value_sets_t; 469 470#define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen 471#define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen 472#define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen 473#define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out 474#define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in 475#define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in 476#define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets 477#define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies 478#define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited 479#define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call 480#define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit 481 482 483/* Basic block list in postorder. */ 484static int *postorder; 485static int postorder_num; 486 487/* This structure is used to keep track of statistics on what 488 optimization PRE was able to perform. */ 489static struct 490{ 491 /* The number of RHS computations eliminated by PRE. */ 492 int eliminations; 493 494 /* The number of new expressions/temporaries generated by PRE. */ 495 int insertions; 496 497 /* The number of inserts found due to partial anticipation */ 498 int pa_insert; 499 500 /* The number of new PHI nodes added by PRE. */ 501 int phis; 502} pre_stats; 503 504static bool do_partial_partial; 505static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int); 506static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr); 507static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr); 508static void bitmap_set_copy (bitmap_set_t, bitmap_set_t); 509static bool bitmap_set_contains_value (bitmap_set_t, unsigned int); 510static void bitmap_insert_into_set (bitmap_set_t, pre_expr); 511static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr, 512 unsigned int, bool); 513static bitmap_set_t bitmap_set_new (void); 514static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *, 515 tree); 516static tree find_or_generate_expression (basic_block, tree, gimple_seq *); 517static unsigned int get_expr_value_id (pre_expr); 518 519/* We can add and remove elements and entries to and from sets 520 and hash tables, so we use alloc pools for them. */ 521 522static alloc_pool bitmap_set_pool; 523static bitmap_obstack grand_bitmap_obstack; 524 525/* Set of blocks with statements that have had their EH properties changed. */ 526static bitmap need_eh_cleanup; 527 528/* Set of blocks with statements that have had their AB properties changed. */ 529static bitmap need_ab_cleanup; 530 531/* A three tuple {e, pred, v} used to cache phi translations in the 532 phi_translate_table. */ 533 534typedef struct expr_pred_trans_d : typed_free_remove<expr_pred_trans_d> 535{ 536 /* The expression. */ 537 pre_expr e; 538 539 /* The predecessor block along which we translated the expression. */ 540 basic_block pred; 541 542 /* The value that resulted from the translation. */ 543 pre_expr v; 544 545 /* The hashcode for the expression, pred pair. This is cached for 546 speed reasons. */ 547 hashval_t hashcode; 548 549 /* hash_table support. */ 550 typedef expr_pred_trans_d value_type; 551 typedef expr_pred_trans_d compare_type; 552 static inline hashval_t hash (const value_type *); 553 static inline int equal (const value_type *, const compare_type *); 554} *expr_pred_trans_t; 555typedef const struct expr_pred_trans_d *const_expr_pred_trans_t; 556 557inline hashval_t 558expr_pred_trans_d::hash (const expr_pred_trans_d *e) 559{ 560 return e->hashcode; 561} 562 563inline int 564expr_pred_trans_d::equal (const value_type *ve1, 565 const compare_type *ve2) 566{ 567 basic_block b1 = ve1->pred; 568 basic_block b2 = ve2->pred; 569 570 /* If they are not translations for the same basic block, they can't 571 be equal. */ 572 if (b1 != b2) 573 return false; 574 return pre_expr_d::equal (ve1->e, ve2->e); 575} 576 577/* The phi_translate_table caches phi translations for a given 578 expression and predecessor. */ 579static hash_table<expr_pred_trans_d> *phi_translate_table; 580 581/* Add the tuple mapping from {expression E, basic block PRED} to 582 the phi translation table and return whether it pre-existed. */ 583 584static inline bool 585phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred) 586{ 587 expr_pred_trans_t *slot; 588 expr_pred_trans_d tem; 589 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e), 590 pred->index); 591 tem.e = e; 592 tem.pred = pred; 593 tem.hashcode = hash; 594 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT); 595 if (*slot) 596 { 597 *entry = *slot; 598 return true; 599 } 600 601 *entry = *slot = XNEW (struct expr_pred_trans_d); 602 (*entry)->e = e; 603 (*entry)->pred = pred; 604 (*entry)->hashcode = hash; 605 return false; 606} 607 608 609/* Add expression E to the expression set of value id V. */ 610 611static void 612add_to_value (unsigned int v, pre_expr e) 613{ 614 bitmap set; 615 616 gcc_checking_assert (get_expr_value_id (e) == v); 617 618 if (v >= value_expressions.length ()) 619 { 620 value_expressions.safe_grow_cleared (v + 1); 621 } 622 623 set = value_expressions[v]; 624 if (!set) 625 { 626 set = BITMAP_ALLOC (&grand_bitmap_obstack); 627 value_expressions[v] = set; 628 } 629 630 bitmap_set_bit (set, get_or_alloc_expression_id (e)); 631} 632 633/* Create a new bitmap set and return it. */ 634 635static bitmap_set_t 636bitmap_set_new (void) 637{ 638 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool); 639 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack); 640 bitmap_initialize (&ret->values, &grand_bitmap_obstack); 641 return ret; 642} 643 644/* Return the value id for a PRE expression EXPR. */ 645 646static unsigned int 647get_expr_value_id (pre_expr expr) 648{ 649 unsigned int id; 650 switch (expr->kind) 651 { 652 case CONSTANT: 653 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr)); 654 break; 655 case NAME: 656 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id; 657 break; 658 case NARY: 659 id = PRE_EXPR_NARY (expr)->value_id; 660 break; 661 case REFERENCE: 662 id = PRE_EXPR_REFERENCE (expr)->value_id; 663 break; 664 default: 665 gcc_unreachable (); 666 } 667 /* ??? We cannot assert that expr has a value-id (it can be 0), because 668 we assign value-ids only to expressions that have a result 669 in set_hashtable_value_ids. */ 670 return id; 671} 672 673/* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */ 674 675static tree 676sccvn_valnum_from_value_id (unsigned int val) 677{ 678 bitmap_iterator bi; 679 unsigned int i; 680 bitmap exprset = value_expressions[val]; 681 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi) 682 { 683 pre_expr vexpr = expression_for_id (i); 684 if (vexpr->kind == NAME) 685 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum; 686 else if (vexpr->kind == CONSTANT) 687 return PRE_EXPR_CONSTANT (vexpr); 688 } 689 return NULL_TREE; 690} 691 692/* Remove an expression EXPR from a bitmapped set. */ 693 694static void 695bitmap_remove_from_set (bitmap_set_t set, pre_expr expr) 696{ 697 unsigned int val = get_expr_value_id (expr); 698 if (!value_id_constant_p (val)) 699 { 700 bitmap_clear_bit (&set->values, val); 701 bitmap_clear_bit (&set->expressions, get_expression_id (expr)); 702 } 703} 704 705static void 706bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr, 707 unsigned int val, bool allow_constants) 708{ 709 if (allow_constants || !value_id_constant_p (val)) 710 { 711 /* We specifically expect this and only this function to be able to 712 insert constants into a set. */ 713 bitmap_set_bit (&set->values, val); 714 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr)); 715 } 716} 717 718/* Insert an expression EXPR into a bitmapped set. */ 719 720static void 721bitmap_insert_into_set (bitmap_set_t set, pre_expr expr) 722{ 723 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false); 724} 725 726/* Copy a bitmapped set ORIG, into bitmapped set DEST. */ 727 728static void 729bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig) 730{ 731 bitmap_copy (&dest->expressions, &orig->expressions); 732 bitmap_copy (&dest->values, &orig->values); 733} 734 735 736/* Free memory used up by SET. */ 737static void 738bitmap_set_free (bitmap_set_t set) 739{ 740 bitmap_clear (&set->expressions); 741 bitmap_clear (&set->values); 742} 743 744 745/* Generate an topological-ordered array of bitmap set SET. */ 746 747static vec<pre_expr> 748sorted_array_from_bitmap_set (bitmap_set_t set) 749{ 750 unsigned int i, j; 751 bitmap_iterator bi, bj; 752 vec<pre_expr> result; 753 754 /* Pre-allocate enough space for the array. */ 755 result.create (bitmap_count_bits (&set->expressions)); 756 757 FOR_EACH_VALUE_ID_IN_SET (set, i, bi) 758 { 759 /* The number of expressions having a given value is usually 760 relatively small. Thus, rather than making a vector of all 761 the expressions and sorting it by value-id, we walk the values 762 and check in the reverse mapping that tells us what expressions 763 have a given value, to filter those in our set. As a result, 764 the expressions are inserted in value-id order, which means 765 topological order. 766 767 If this is somehow a significant lose for some cases, we can 768 choose which set to walk based on the set size. */ 769 bitmap exprset = value_expressions[i]; 770 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj) 771 { 772 if (bitmap_bit_p (&set->expressions, j)) 773 result.quick_push (expression_for_id (j)); 774 } 775 } 776 777 return result; 778} 779 780/* Perform bitmapped set operation DEST &= ORIG. */ 781 782static void 783bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig) 784{ 785 bitmap_iterator bi; 786 unsigned int i; 787 788 if (dest != orig) 789 { 790 bitmap_head temp; 791 bitmap_initialize (&temp, &grand_bitmap_obstack); 792 793 bitmap_and_into (&dest->values, &orig->values); 794 bitmap_copy (&temp, &dest->expressions); 795 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi) 796 { 797 pre_expr expr = expression_for_id (i); 798 unsigned int value_id = get_expr_value_id (expr); 799 if (!bitmap_bit_p (&dest->values, value_id)) 800 bitmap_clear_bit (&dest->expressions, i); 801 } 802 bitmap_clear (&temp); 803 } 804} 805 806/* Subtract all values and expressions contained in ORIG from DEST. */ 807 808static bitmap_set_t 809bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig) 810{ 811 bitmap_set_t result = bitmap_set_new (); 812 bitmap_iterator bi; 813 unsigned int i; 814 815 bitmap_and_compl (&result->expressions, &dest->expressions, 816 &orig->expressions); 817 818 FOR_EACH_EXPR_ID_IN_SET (result, i, bi) 819 { 820 pre_expr expr = expression_for_id (i); 821 unsigned int value_id = get_expr_value_id (expr); 822 bitmap_set_bit (&result->values, value_id); 823 } 824 825 return result; 826} 827 828/* Subtract all the values in bitmap set B from bitmap set A. */ 829 830static void 831bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b) 832{ 833 unsigned int i; 834 bitmap_iterator bi; 835 bitmap_head temp; 836 837 bitmap_initialize (&temp, &grand_bitmap_obstack); 838 839 bitmap_copy (&temp, &a->expressions); 840 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi) 841 { 842 pre_expr expr = expression_for_id (i); 843 if (bitmap_set_contains_value (b, get_expr_value_id (expr))) 844 bitmap_remove_from_set (a, expr); 845 } 846 bitmap_clear (&temp); 847} 848 849 850/* Return true if bitmapped set SET contains the value VALUE_ID. */ 851 852static bool 853bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id) 854{ 855 if (value_id_constant_p (value_id)) 856 return true; 857 858 if (!set || bitmap_empty_p (&set->expressions)) 859 return false; 860 861 return bitmap_bit_p (&set->values, value_id); 862} 863 864static inline bool 865bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr) 866{ 867 return bitmap_bit_p (&set->expressions, get_expression_id (expr)); 868} 869 870/* Replace an instance of value LOOKFOR with expression EXPR in SET. */ 871 872static void 873bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor, 874 const pre_expr expr) 875{ 876 bitmap exprset; 877 unsigned int i; 878 bitmap_iterator bi; 879 880 if (value_id_constant_p (lookfor)) 881 return; 882 883 if (!bitmap_set_contains_value (set, lookfor)) 884 return; 885 886 /* The number of expressions having a given value is usually 887 significantly less than the total number of expressions in SET. 888 Thus, rather than check, for each expression in SET, whether it 889 has the value LOOKFOR, we walk the reverse mapping that tells us 890 what expressions have a given value, and see if any of those 891 expressions are in our set. For large testcases, this is about 892 5-10x faster than walking the bitmap. If this is somehow a 893 significant lose for some cases, we can choose which set to walk 894 based on the set size. */ 895 exprset = value_expressions[lookfor]; 896 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi) 897 { 898 if (bitmap_clear_bit (&set->expressions, i)) 899 { 900 bitmap_set_bit (&set->expressions, get_expression_id (expr)); 901 return; 902 } 903 } 904 905 gcc_unreachable (); 906} 907 908/* Return true if two bitmap sets are equal. */ 909 910static bool 911bitmap_set_equal (bitmap_set_t a, bitmap_set_t b) 912{ 913 return bitmap_equal_p (&a->values, &b->values); 914} 915 916/* Replace an instance of EXPR's VALUE with EXPR in SET if it exists, 917 and add it otherwise. */ 918 919static void 920bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr) 921{ 922 unsigned int val = get_expr_value_id (expr); 923 924 if (bitmap_set_contains_value (set, val)) 925 bitmap_set_replace_value (set, val, expr); 926 else 927 bitmap_insert_into_set (set, expr); 928} 929 930/* Insert EXPR into SET if EXPR's value is not already present in 931 SET. */ 932 933static void 934bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr) 935{ 936 unsigned int val = get_expr_value_id (expr); 937 938 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr)); 939 940 /* Constant values are always considered to be part of the set. */ 941 if (value_id_constant_p (val)) 942 return; 943 944 /* If the value membership changed, add the expression. */ 945 if (bitmap_set_bit (&set->values, val)) 946 bitmap_set_bit (&set->expressions, expr->id); 947} 948 949/* Print out EXPR to outfile. */ 950 951static void 952print_pre_expr (FILE *outfile, const pre_expr expr) 953{ 954 switch (expr->kind) 955 { 956 case CONSTANT: 957 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0); 958 break; 959 case NAME: 960 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0); 961 break; 962 case NARY: 963 { 964 unsigned int i; 965 vn_nary_op_t nary = PRE_EXPR_NARY (expr); 966 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode)); 967 for (i = 0; i < nary->length; i++) 968 { 969 print_generic_expr (outfile, nary->op[i], 0); 970 if (i != (unsigned) nary->length - 1) 971 fprintf (outfile, ","); 972 } 973 fprintf (outfile, "}"); 974 } 975 break; 976 977 case REFERENCE: 978 { 979 vn_reference_op_t vro; 980 unsigned int i; 981 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); 982 fprintf (outfile, "{"); 983 for (i = 0; 984 ref->operands.iterate (i, &vro); 985 i++) 986 { 987 bool closebrace = false; 988 if (vro->opcode != SSA_NAME 989 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration) 990 { 991 fprintf (outfile, "%s", get_tree_code_name (vro->opcode)); 992 if (vro->op0) 993 { 994 fprintf (outfile, "<"); 995 closebrace = true; 996 } 997 } 998 if (vro->op0) 999 { 1000 print_generic_expr (outfile, vro->op0, 0); 1001 if (vro->op1) 1002 { 1003 fprintf (outfile, ","); 1004 print_generic_expr (outfile, vro->op1, 0); 1005 } 1006 if (vro->op2) 1007 { 1008 fprintf (outfile, ","); 1009 print_generic_expr (outfile, vro->op2, 0); 1010 } 1011 } 1012 if (closebrace) 1013 fprintf (outfile, ">"); 1014 if (i != ref->operands.length () - 1) 1015 fprintf (outfile, ","); 1016 } 1017 fprintf (outfile, "}"); 1018 if (ref->vuse) 1019 { 1020 fprintf (outfile, "@"); 1021 print_generic_expr (outfile, ref->vuse, 0); 1022 } 1023 } 1024 break; 1025 } 1026} 1027void debug_pre_expr (pre_expr); 1028 1029/* Like print_pre_expr but always prints to stderr. */ 1030DEBUG_FUNCTION void 1031debug_pre_expr (pre_expr e) 1032{ 1033 print_pre_expr (stderr, e); 1034 fprintf (stderr, "\n"); 1035} 1036 1037/* Print out SET to OUTFILE. */ 1038 1039static void 1040print_bitmap_set (FILE *outfile, bitmap_set_t set, 1041 const char *setname, int blockindex) 1042{ 1043 fprintf (outfile, "%s[%d] := { ", setname, blockindex); 1044 if (set) 1045 { 1046 bool first = true; 1047 unsigned i; 1048 bitmap_iterator bi; 1049 1050 FOR_EACH_EXPR_ID_IN_SET (set, i, bi) 1051 { 1052 const pre_expr expr = expression_for_id (i); 1053 1054 if (!first) 1055 fprintf (outfile, ", "); 1056 first = false; 1057 print_pre_expr (outfile, expr); 1058 1059 fprintf (outfile, " (%04d)", get_expr_value_id (expr)); 1060 } 1061 } 1062 fprintf (outfile, " }\n"); 1063} 1064 1065void debug_bitmap_set (bitmap_set_t); 1066 1067DEBUG_FUNCTION void 1068debug_bitmap_set (bitmap_set_t set) 1069{ 1070 print_bitmap_set (stderr, set, "debug", 0); 1071} 1072 1073void debug_bitmap_sets_for (basic_block); 1074 1075DEBUG_FUNCTION void 1076debug_bitmap_sets_for (basic_block bb) 1077{ 1078 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index); 1079 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index); 1080 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index); 1081 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index); 1082 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index); 1083 if (do_partial_partial) 1084 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index); 1085 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index); 1086} 1087 1088/* Print out the expressions that have VAL to OUTFILE. */ 1089 1090static void 1091print_value_expressions (FILE *outfile, unsigned int val) 1092{ 1093 bitmap set = value_expressions[val]; 1094 if (set) 1095 { 1096 bitmap_set x; 1097 char s[10]; 1098 sprintf (s, "%04d", val); 1099 x.expressions = *set; 1100 print_bitmap_set (outfile, &x, s, 0); 1101 } 1102} 1103 1104 1105DEBUG_FUNCTION void 1106debug_value_expressions (unsigned int val) 1107{ 1108 print_value_expressions (stderr, val); 1109} 1110 1111/* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to 1112 represent it. */ 1113 1114static pre_expr 1115get_or_alloc_expr_for_constant (tree constant) 1116{ 1117 unsigned int result_id; 1118 unsigned int value_id; 1119 struct pre_expr_d expr; 1120 pre_expr newexpr; 1121 1122 expr.kind = CONSTANT; 1123 PRE_EXPR_CONSTANT (&expr) = constant; 1124 result_id = lookup_expression_id (&expr); 1125 if (result_id != 0) 1126 return expression_for_id (result_id); 1127 1128 newexpr = (pre_expr) pool_alloc (pre_expr_pool); 1129 newexpr->kind = CONSTANT; 1130 PRE_EXPR_CONSTANT (newexpr) = constant; 1131 alloc_expression_id (newexpr); 1132 value_id = get_or_alloc_constant_value_id (constant); 1133 add_to_value (value_id, newexpr); 1134 return newexpr; 1135} 1136 1137/* Given a value id V, find the actual tree representing the constant 1138 value if there is one, and return it. Return NULL if we can't find 1139 a constant. */ 1140 1141static tree 1142get_constant_for_value_id (unsigned int v) 1143{ 1144 if (value_id_constant_p (v)) 1145 { 1146 unsigned int i; 1147 bitmap_iterator bi; 1148 bitmap exprset = value_expressions[v]; 1149 1150 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi) 1151 { 1152 pre_expr expr = expression_for_id (i); 1153 if (expr->kind == CONSTANT) 1154 return PRE_EXPR_CONSTANT (expr); 1155 } 1156 } 1157 return NULL; 1158} 1159 1160/* Get or allocate a pre_expr for a piece of GIMPLE, and return it. 1161 Currently only supports constants and SSA_NAMES. */ 1162static pre_expr 1163get_or_alloc_expr_for (tree t) 1164{ 1165 if (TREE_CODE (t) == SSA_NAME) 1166 return get_or_alloc_expr_for_name (t); 1167 else if (is_gimple_min_invariant (t)) 1168 return get_or_alloc_expr_for_constant (t); 1169 else 1170 { 1171 /* More complex expressions can result from SCCVN expression 1172 simplification that inserts values for them. As they all 1173 do not have VOPs the get handled by the nary ops struct. */ 1174 vn_nary_op_t result; 1175 unsigned int result_id; 1176 vn_nary_op_lookup (t, &result); 1177 if (result != NULL) 1178 { 1179 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool); 1180 e->kind = NARY; 1181 PRE_EXPR_NARY (e) = result; 1182 result_id = lookup_expression_id (e); 1183 if (result_id != 0) 1184 { 1185 pool_free (pre_expr_pool, e); 1186 e = expression_for_id (result_id); 1187 return e; 1188 } 1189 alloc_expression_id (e); 1190 return e; 1191 } 1192 } 1193 return NULL; 1194} 1195 1196/* Return the folded version of T if T, when folded, is a gimple 1197 min_invariant. Otherwise, return T. */ 1198 1199static pre_expr 1200fully_constant_expression (pre_expr e) 1201{ 1202 switch (e->kind) 1203 { 1204 case CONSTANT: 1205 return e; 1206 case NARY: 1207 { 1208 vn_nary_op_t nary = PRE_EXPR_NARY (e); 1209 switch (TREE_CODE_CLASS (nary->opcode)) 1210 { 1211 case tcc_binary: 1212 case tcc_comparison: 1213 { 1214 /* We have to go from trees to pre exprs to value ids to 1215 constants. */ 1216 tree naryop0 = nary->op[0]; 1217 tree naryop1 = nary->op[1]; 1218 tree result; 1219 if (!is_gimple_min_invariant (naryop0)) 1220 { 1221 pre_expr rep0 = get_or_alloc_expr_for (naryop0); 1222 unsigned int vrep0 = get_expr_value_id (rep0); 1223 tree const0 = get_constant_for_value_id (vrep0); 1224 if (const0) 1225 naryop0 = fold_convert (TREE_TYPE (naryop0), const0); 1226 } 1227 if (!is_gimple_min_invariant (naryop1)) 1228 { 1229 pre_expr rep1 = get_or_alloc_expr_for (naryop1); 1230 unsigned int vrep1 = get_expr_value_id (rep1); 1231 tree const1 = get_constant_for_value_id (vrep1); 1232 if (const1) 1233 naryop1 = fold_convert (TREE_TYPE (naryop1), const1); 1234 } 1235 result = fold_binary (nary->opcode, nary->type, 1236 naryop0, naryop1); 1237 if (result && is_gimple_min_invariant (result)) 1238 return get_or_alloc_expr_for_constant (result); 1239 /* We might have simplified the expression to a 1240 SSA_NAME for example from x_1 * 1. But we cannot 1241 insert a PHI for x_1 unconditionally as x_1 might 1242 not be available readily. */ 1243 return e; 1244 } 1245 case tcc_reference: 1246 if (nary->opcode != REALPART_EXPR 1247 && nary->opcode != IMAGPART_EXPR 1248 && nary->opcode != VIEW_CONVERT_EXPR) 1249 return e; 1250 /* Fallthrough. */ 1251 case tcc_unary: 1252 { 1253 /* We have to go from trees to pre exprs to value ids to 1254 constants. */ 1255 tree naryop0 = nary->op[0]; 1256 tree const0, result; 1257 if (is_gimple_min_invariant (naryop0)) 1258 const0 = naryop0; 1259 else 1260 { 1261 pre_expr rep0 = get_or_alloc_expr_for (naryop0); 1262 unsigned int vrep0 = get_expr_value_id (rep0); 1263 const0 = get_constant_for_value_id (vrep0); 1264 } 1265 result = NULL; 1266 if (const0) 1267 { 1268 tree type1 = TREE_TYPE (nary->op[0]); 1269 const0 = fold_convert (type1, const0); 1270 result = fold_unary (nary->opcode, nary->type, const0); 1271 } 1272 if (result && is_gimple_min_invariant (result)) 1273 return get_or_alloc_expr_for_constant (result); 1274 return e; 1275 } 1276 default: 1277 return e; 1278 } 1279 } 1280 case REFERENCE: 1281 { 1282 vn_reference_t ref = PRE_EXPR_REFERENCE (e); 1283 tree folded; 1284 if ((folded = fully_constant_vn_reference_p (ref))) 1285 return get_or_alloc_expr_for_constant (folded); 1286 return e; 1287 } 1288 default: 1289 return e; 1290 } 1291 return e; 1292} 1293 1294/* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that 1295 it has the value it would have in BLOCK. Set *SAME_VALID to true 1296 in case the new vuse doesn't change the value id of the OPERANDS. */ 1297 1298static tree 1299translate_vuse_through_block (vec<vn_reference_op_s> operands, 1300 alias_set_type set, tree type, tree vuse, 1301 basic_block phiblock, 1302 basic_block block, bool *same_valid) 1303{ 1304 gimple phi = SSA_NAME_DEF_STMT (vuse); 1305 ao_ref ref; 1306 edge e = NULL; 1307 bool use_oracle; 1308 1309 *same_valid = true; 1310 1311 if (gimple_bb (phi) != phiblock) 1312 return vuse; 1313 1314 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands); 1315 1316 /* Use the alias-oracle to find either the PHI node in this block, 1317 the first VUSE used in this block that is equivalent to vuse or 1318 the first VUSE which definition in this block kills the value. */ 1319 if (gimple_code (phi) == GIMPLE_PHI) 1320 e = find_edge (block, phiblock); 1321 else if (use_oracle) 1322 while (!stmt_may_clobber_ref_p_1 (phi, &ref)) 1323 { 1324 vuse = gimple_vuse (phi); 1325 phi = SSA_NAME_DEF_STMT (vuse); 1326 if (gimple_bb (phi) != phiblock) 1327 return vuse; 1328 if (gimple_code (phi) == GIMPLE_PHI) 1329 { 1330 e = find_edge (block, phiblock); 1331 break; 1332 } 1333 } 1334 else 1335 return NULL_TREE; 1336 1337 if (e) 1338 { 1339 if (use_oracle) 1340 { 1341 bitmap visited = NULL; 1342 unsigned int cnt; 1343 /* Try to find a vuse that dominates this phi node by skipping 1344 non-clobbering statements. */ 1345 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false, 1346 NULL, NULL); 1347 if (visited) 1348 BITMAP_FREE (visited); 1349 } 1350 else 1351 vuse = NULL_TREE; 1352 if (!vuse) 1353 { 1354 /* If we didn't find any, the value ID can't stay the same, 1355 but return the translated vuse. */ 1356 *same_valid = false; 1357 vuse = PHI_ARG_DEF (phi, e->dest_idx); 1358 } 1359 /* ??? We would like to return vuse here as this is the canonical 1360 upmost vdef that this reference is associated with. But during 1361 insertion of the references into the hash tables we only ever 1362 directly insert with their direct gimple_vuse, hence returning 1363 something else would make us not find the other expression. */ 1364 return PHI_ARG_DEF (phi, e->dest_idx); 1365 } 1366 1367 return NULL_TREE; 1368} 1369 1370/* Like bitmap_find_leader, but checks for the value existing in SET1 *or* 1371 SET2. This is used to avoid making a set consisting of the union 1372 of PA_IN and ANTIC_IN during insert. */ 1373 1374static inline pre_expr 1375find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2) 1376{ 1377 pre_expr result; 1378 1379 result = bitmap_find_leader (set1, val); 1380 if (!result && set2) 1381 result = bitmap_find_leader (set2, val); 1382 return result; 1383} 1384 1385/* Get the tree type for our PRE expression e. */ 1386 1387static tree 1388get_expr_type (const pre_expr e) 1389{ 1390 switch (e->kind) 1391 { 1392 case NAME: 1393 return TREE_TYPE (PRE_EXPR_NAME (e)); 1394 case CONSTANT: 1395 return TREE_TYPE (PRE_EXPR_CONSTANT (e)); 1396 case REFERENCE: 1397 return PRE_EXPR_REFERENCE (e)->type; 1398 case NARY: 1399 return PRE_EXPR_NARY (e)->type; 1400 } 1401 gcc_unreachable (); 1402} 1403 1404/* Get a representative SSA_NAME for a given expression. 1405 Since all of our sub-expressions are treated as values, we require 1406 them to be SSA_NAME's for simplicity. 1407 Prior versions of GVNPRE used to use "value handles" here, so that 1408 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In 1409 either case, the operands are really values (IE we do not expect 1410 them to be usable without finding leaders). */ 1411 1412static tree 1413get_representative_for (const pre_expr e) 1414{ 1415 tree name; 1416 unsigned int value_id = get_expr_value_id (e); 1417 1418 switch (e->kind) 1419 { 1420 case NAME: 1421 return PRE_EXPR_NAME (e); 1422 case CONSTANT: 1423 return PRE_EXPR_CONSTANT (e); 1424 case NARY: 1425 case REFERENCE: 1426 { 1427 /* Go through all of the expressions representing this value 1428 and pick out an SSA_NAME. */ 1429 unsigned int i; 1430 bitmap_iterator bi; 1431 bitmap exprs = value_expressions[value_id]; 1432 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi) 1433 { 1434 pre_expr rep = expression_for_id (i); 1435 if (rep->kind == NAME) 1436 return PRE_EXPR_NAME (rep); 1437 else if (rep->kind == CONSTANT) 1438 return PRE_EXPR_CONSTANT (rep); 1439 } 1440 } 1441 break; 1442 } 1443 1444 /* If we reached here we couldn't find an SSA_NAME. This can 1445 happen when we've discovered a value that has never appeared in 1446 the program as set to an SSA_NAME, as the result of phi translation. 1447 Create one here. 1448 ??? We should be able to re-use this when we insert the statement 1449 to compute it. */ 1450 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp"); 1451 VN_INFO_GET (name)->value_id = value_id; 1452 VN_INFO (name)->valnum = name; 1453 /* ??? For now mark this SSA name for release by SCCVN. */ 1454 VN_INFO (name)->needs_insertion = true; 1455 add_to_value (value_id, get_or_alloc_expr_for_name (name)); 1456 if (dump_file && (dump_flags & TDF_DETAILS)) 1457 { 1458 fprintf (dump_file, "Created SSA_NAME representative "); 1459 print_generic_expr (dump_file, name, 0); 1460 fprintf (dump_file, " for expression:"); 1461 print_pre_expr (dump_file, e); 1462 fprintf (dump_file, " (%04d)\n", value_id); 1463 } 1464 1465 return name; 1466} 1467 1468 1469 1470static pre_expr 1471phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, 1472 basic_block pred, basic_block phiblock); 1473 1474/* Translate EXPR using phis in PHIBLOCK, so that it has the values of 1475 the phis in PRED. Return NULL if we can't find a leader for each part 1476 of the translated expression. */ 1477 1478static pre_expr 1479phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, 1480 basic_block pred, basic_block phiblock) 1481{ 1482 switch (expr->kind) 1483 { 1484 case NARY: 1485 { 1486 unsigned int i; 1487 bool changed = false; 1488 vn_nary_op_t nary = PRE_EXPR_NARY (expr); 1489 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s, 1490 sizeof_vn_nary_op (nary->length)); 1491 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length)); 1492 1493 for (i = 0; i < newnary->length; i++) 1494 { 1495 if (TREE_CODE (newnary->op[i]) != SSA_NAME) 1496 continue; 1497 else 1498 { 1499 pre_expr leader, result; 1500 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id; 1501 leader = find_leader_in_sets (op_val_id, set1, set2); 1502 result = phi_translate (leader, set1, set2, pred, phiblock); 1503 if (result && result != leader) 1504 { 1505 tree name = get_representative_for (result); 1506 if (!name) 1507 return NULL; 1508 newnary->op[i] = name; 1509 } 1510 else if (!result) 1511 return NULL; 1512 1513 changed |= newnary->op[i] != nary->op[i]; 1514 } 1515 } 1516 if (changed) 1517 { 1518 pre_expr constant; 1519 unsigned int new_val_id; 1520 1521 tree result = vn_nary_op_lookup_pieces (newnary->length, 1522 newnary->opcode, 1523 newnary->type, 1524 &newnary->op[0], 1525 &nary); 1526 if (result && is_gimple_min_invariant (result)) 1527 return get_or_alloc_expr_for_constant (result); 1528 1529 expr = (pre_expr) pool_alloc (pre_expr_pool); 1530 expr->kind = NARY; 1531 expr->id = 0; 1532 if (nary) 1533 { 1534 PRE_EXPR_NARY (expr) = nary; 1535 constant = fully_constant_expression (expr); 1536 if (constant != expr) 1537 return constant; 1538 1539 new_val_id = nary->value_id; 1540 get_or_alloc_expression_id (expr); 1541 } 1542 else 1543 { 1544 new_val_id = get_next_value_id (); 1545 value_expressions.safe_grow_cleared (get_max_value_id () + 1); 1546 nary = vn_nary_op_insert_pieces (newnary->length, 1547 newnary->opcode, 1548 newnary->type, 1549 &newnary->op[0], 1550 result, new_val_id); 1551 PRE_EXPR_NARY (expr) = nary; 1552 constant = fully_constant_expression (expr); 1553 if (constant != expr) 1554 return constant; 1555 get_or_alloc_expression_id (expr); 1556 } 1557 add_to_value (new_val_id, expr); 1558 } 1559 return expr; 1560 } 1561 break; 1562 1563 case REFERENCE: 1564 { 1565 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); 1566 vec<vn_reference_op_s> operands = ref->operands; 1567 tree vuse = ref->vuse; 1568 tree newvuse = vuse; 1569 vec<vn_reference_op_s> newoperands = vNULL; 1570 bool changed = false, same_valid = true; 1571 unsigned int i, n; 1572 vn_reference_op_t operand; 1573 vn_reference_t newref; 1574 1575 for (i = 0; operands.iterate (i, &operand); i++) 1576 { 1577 pre_expr opresult; 1578 pre_expr leader; 1579 tree op[3]; 1580 tree type = operand->type; 1581 vn_reference_op_s newop = *operand; 1582 op[0] = operand->op0; 1583 op[1] = operand->op1; 1584 op[2] = operand->op2; 1585 for (n = 0; n < 3; ++n) 1586 { 1587 unsigned int op_val_id; 1588 if (!op[n]) 1589 continue; 1590 if (TREE_CODE (op[n]) != SSA_NAME) 1591 { 1592 /* We can't possibly insert these. */ 1593 if (n != 0 1594 && !is_gimple_min_invariant (op[n])) 1595 break; 1596 continue; 1597 } 1598 op_val_id = VN_INFO (op[n])->value_id; 1599 leader = find_leader_in_sets (op_val_id, set1, set2); 1600 if (!leader) 1601 break; 1602 opresult = phi_translate (leader, set1, set2, pred, phiblock); 1603 if (!opresult) 1604 break; 1605 if (opresult != leader) 1606 { 1607 tree name = get_representative_for (opresult); 1608 if (!name) 1609 break; 1610 changed |= name != op[n]; 1611 op[n] = name; 1612 } 1613 } 1614 if (n != 3) 1615 { 1616 newoperands.release (); 1617 return NULL; 1618 } 1619 if (!changed) 1620 continue; 1621 if (!newoperands.exists ()) 1622 newoperands = operands.copy (); 1623 /* We may have changed from an SSA_NAME to a constant */ 1624 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME) 1625 newop.opcode = TREE_CODE (op[0]); 1626 newop.type = type; 1627 newop.op0 = op[0]; 1628 newop.op1 = op[1]; 1629 newop.op2 = op[2]; 1630 newoperands[i] = newop; 1631 } 1632 gcc_checking_assert (i == operands.length ()); 1633 1634 if (vuse) 1635 { 1636 newvuse = translate_vuse_through_block (newoperands.exists () 1637 ? newoperands : operands, 1638 ref->set, ref->type, 1639 vuse, phiblock, pred, 1640 &same_valid); 1641 if (newvuse == NULL_TREE) 1642 { 1643 newoperands.release (); 1644 return NULL; 1645 } 1646 } 1647 1648 if (changed || newvuse != vuse) 1649 { 1650 unsigned int new_val_id; 1651 pre_expr constant; 1652 1653 tree result = vn_reference_lookup_pieces (newvuse, ref->set, 1654 ref->type, 1655 newoperands.exists () 1656 ? newoperands : operands, 1657 &newref, VN_WALK); 1658 if (result) 1659 newoperands.release (); 1660 1661 /* We can always insert constants, so if we have a partial 1662 redundant constant load of another type try to translate it 1663 to a constant of appropriate type. */ 1664 if (result && is_gimple_min_invariant (result)) 1665 { 1666 tree tem = result; 1667 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result))) 1668 { 1669 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result); 1670 if (tem && !is_gimple_min_invariant (tem)) 1671 tem = NULL_TREE; 1672 } 1673 if (tem) 1674 return get_or_alloc_expr_for_constant (tem); 1675 } 1676 1677 /* If we'd have to convert things we would need to validate 1678 if we can insert the translated expression. So fail 1679 here for now - we cannot insert an alias with a different 1680 type in the VN tables either, as that would assert. */ 1681 if (result 1682 && !useless_type_conversion_p (ref->type, TREE_TYPE (result))) 1683 return NULL; 1684 else if (!result && newref 1685 && !useless_type_conversion_p (ref->type, newref->type)) 1686 { 1687 newoperands.release (); 1688 return NULL; 1689 } 1690 1691 expr = (pre_expr) pool_alloc (pre_expr_pool); 1692 expr->kind = REFERENCE; 1693 expr->id = 0; 1694 1695 if (newref) 1696 { 1697 PRE_EXPR_REFERENCE (expr) = newref; 1698 constant = fully_constant_expression (expr); 1699 if (constant != expr) 1700 return constant; 1701 1702 new_val_id = newref->value_id; 1703 get_or_alloc_expression_id (expr); 1704 } 1705 else 1706 { 1707 if (changed || !same_valid) 1708 { 1709 new_val_id = get_next_value_id (); 1710 value_expressions.safe_grow_cleared 1711 (get_max_value_id () + 1); 1712 } 1713 else 1714 new_val_id = ref->value_id; 1715 if (!newoperands.exists ()) 1716 newoperands = operands.copy (); 1717 newref = vn_reference_insert_pieces (newvuse, ref->set, 1718 ref->type, 1719 newoperands, 1720 result, new_val_id); 1721 newoperands = vNULL; 1722 PRE_EXPR_REFERENCE (expr) = newref; 1723 constant = fully_constant_expression (expr); 1724 if (constant != expr) 1725 return constant; 1726 get_or_alloc_expression_id (expr); 1727 } 1728 add_to_value (new_val_id, expr); 1729 } 1730 newoperands.release (); 1731 return expr; 1732 } 1733 break; 1734 1735 case NAME: 1736 { 1737 tree name = PRE_EXPR_NAME (expr); 1738 gimple def_stmt = SSA_NAME_DEF_STMT (name); 1739 /* If the SSA name is defined by a PHI node in this block, 1740 translate it. */ 1741 if (gimple_code (def_stmt) == GIMPLE_PHI 1742 && gimple_bb (def_stmt) == phiblock) 1743 { 1744 edge e = find_edge (pred, gimple_bb (def_stmt)); 1745 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx); 1746 1747 /* Handle constant. */ 1748 if (is_gimple_min_invariant (def)) 1749 return get_or_alloc_expr_for_constant (def); 1750 1751 return get_or_alloc_expr_for_name (def); 1752 } 1753 /* Otherwise return it unchanged - it will get removed if its 1754 value is not available in PREDs AVAIL_OUT set of expressions 1755 by the subtraction of TMP_GEN. */ 1756 return expr; 1757 } 1758 1759 default: 1760 gcc_unreachable (); 1761 } 1762} 1763 1764/* Wrapper around phi_translate_1 providing caching functionality. */ 1765 1766static pre_expr 1767phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, 1768 basic_block pred, basic_block phiblock) 1769{ 1770 expr_pred_trans_t slot = NULL; 1771 pre_expr phitrans; 1772 1773 if (!expr) 1774 return NULL; 1775 1776 /* Constants contain no values that need translation. */ 1777 if (expr->kind == CONSTANT) 1778 return expr; 1779 1780 if (value_id_constant_p (get_expr_value_id (expr))) 1781 return expr; 1782 1783 /* Don't add translations of NAMEs as those are cheap to translate. */ 1784 if (expr->kind != NAME) 1785 { 1786 if (phi_trans_add (&slot, expr, pred)) 1787 return slot->v; 1788 /* Store NULL for the value we want to return in the case of 1789 recursing. */ 1790 slot->v = NULL; 1791 } 1792 1793 /* Translate. */ 1794 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock); 1795 1796 if (slot) 1797 { 1798 if (phitrans) 1799 slot->v = phitrans; 1800 else 1801 /* Remove failed translations again, they cause insert 1802 iteration to not pick up new opportunities reliably. */ 1803 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode); 1804 } 1805 1806 return phitrans; 1807} 1808 1809 1810/* For each expression in SET, translate the values through phi nodes 1811 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting 1812 expressions in DEST. */ 1813 1814static void 1815phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred, 1816 basic_block phiblock) 1817{ 1818 vec<pre_expr> exprs; 1819 pre_expr expr; 1820 int i; 1821 1822 if (gimple_seq_empty_p (phi_nodes (phiblock))) 1823 { 1824 bitmap_set_copy (dest, set); 1825 return; 1826 } 1827 1828 exprs = sorted_array_from_bitmap_set (set); 1829 FOR_EACH_VEC_ELT (exprs, i, expr) 1830 { 1831 pre_expr translated; 1832 translated = phi_translate (expr, set, NULL, pred, phiblock); 1833 if (!translated) 1834 continue; 1835 1836 /* We might end up with multiple expressions from SET being 1837 translated to the same value. In this case we do not want 1838 to retain the NARY or REFERENCE expression but prefer a NAME 1839 which would be the leader. */ 1840 if (translated->kind == NAME) 1841 bitmap_value_replace_in_set (dest, translated); 1842 else 1843 bitmap_value_insert_into_set (dest, translated); 1844 } 1845 exprs.release (); 1846} 1847 1848/* Find the leader for a value (i.e., the name representing that 1849 value) in a given set, and return it. Return NULL if no leader 1850 is found. */ 1851 1852static pre_expr 1853bitmap_find_leader (bitmap_set_t set, unsigned int val) 1854{ 1855 if (value_id_constant_p (val)) 1856 { 1857 unsigned int i; 1858 bitmap_iterator bi; 1859 bitmap exprset = value_expressions[val]; 1860 1861 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi) 1862 { 1863 pre_expr expr = expression_for_id (i); 1864 if (expr->kind == CONSTANT) 1865 return expr; 1866 } 1867 } 1868 if (bitmap_set_contains_value (set, val)) 1869 { 1870 /* Rather than walk the entire bitmap of expressions, and see 1871 whether any of them has the value we are looking for, we look 1872 at the reverse mapping, which tells us the set of expressions 1873 that have a given value (IE value->expressions with that 1874 value) and see if any of those expressions are in our set. 1875 The number of expressions per value is usually significantly 1876 less than the number of expressions in the set. In fact, for 1877 large testcases, doing it this way is roughly 5-10x faster 1878 than walking the bitmap. 1879 If this is somehow a significant lose for some cases, we can 1880 choose which set to walk based on which set is smaller. */ 1881 unsigned int i; 1882 bitmap_iterator bi; 1883 bitmap exprset = value_expressions[val]; 1884 1885 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi) 1886 return expression_for_id (i); 1887 } 1888 return NULL; 1889} 1890 1891/* Determine if EXPR, a memory expression, is ANTIC_IN at the top of 1892 BLOCK by seeing if it is not killed in the block. Note that we are 1893 only determining whether there is a store that kills it. Because 1894 of the order in which clean iterates over values, we are guaranteed 1895 that altered operands will have caused us to be eliminated from the 1896 ANTIC_IN set already. */ 1897 1898static bool 1899value_dies_in_block_x (pre_expr expr, basic_block block) 1900{ 1901 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse; 1902 vn_reference_t refx = PRE_EXPR_REFERENCE (expr); 1903 gimple def; 1904 gimple_stmt_iterator gsi; 1905 unsigned id = get_expression_id (expr); 1906 bool res = false; 1907 ao_ref ref; 1908 1909 if (!vuse) 1910 return false; 1911 1912 /* Lookup a previously calculated result. */ 1913 if (EXPR_DIES (block) 1914 && bitmap_bit_p (EXPR_DIES (block), id * 2)) 1915 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1); 1916 1917 /* A memory expression {e, VUSE} dies in the block if there is a 1918 statement that may clobber e. If, starting statement walk from the 1919 top of the basic block, a statement uses VUSE there can be no kill 1920 inbetween that use and the original statement that loaded {e, VUSE}, 1921 so we can stop walking. */ 1922 ref.base = NULL_TREE; 1923 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi)) 1924 { 1925 tree def_vuse, def_vdef; 1926 def = gsi_stmt (gsi); 1927 def_vuse = gimple_vuse (def); 1928 def_vdef = gimple_vdef (def); 1929 1930 /* Not a memory statement. */ 1931 if (!def_vuse) 1932 continue; 1933 1934 /* Not a may-def. */ 1935 if (!def_vdef) 1936 { 1937 /* A load with the same VUSE, we're done. */ 1938 if (def_vuse == vuse) 1939 break; 1940 1941 continue; 1942 } 1943 1944 /* Init ref only if we really need it. */ 1945 if (ref.base == NULL_TREE 1946 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type, 1947 refx->operands)) 1948 { 1949 res = true; 1950 break; 1951 } 1952 /* If the statement may clobber expr, it dies. */ 1953 if (stmt_may_clobber_ref_p_1 (def, &ref)) 1954 { 1955 res = true; 1956 break; 1957 } 1958 } 1959 1960 /* Remember the result. */ 1961 if (!EXPR_DIES (block)) 1962 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack); 1963 bitmap_set_bit (EXPR_DIES (block), id * 2); 1964 if (res) 1965 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1); 1966 1967 return res; 1968} 1969 1970 1971/* Determine if OP is valid in SET1 U SET2, which it is when the union 1972 contains its value-id. */ 1973 1974static bool 1975op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op) 1976{ 1977 if (op && TREE_CODE (op) == SSA_NAME) 1978 { 1979 unsigned int value_id = VN_INFO (op)->value_id; 1980 if (!(bitmap_set_contains_value (set1, value_id) 1981 || (set2 && bitmap_set_contains_value (set2, value_id)))) 1982 return false; 1983 } 1984 return true; 1985} 1986 1987/* Determine if the expression EXPR is valid in SET1 U SET2. 1988 ONLY SET2 CAN BE NULL. 1989 This means that we have a leader for each part of the expression 1990 (if it consists of values), or the expression is an SSA_NAME. 1991 For loads/calls, we also see if the vuse is killed in this block. */ 1992 1993static bool 1994valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr) 1995{ 1996 switch (expr->kind) 1997 { 1998 case NAME: 1999 /* By construction all NAMEs are available. Non-available 2000 NAMEs are removed by subtracting TMP_GEN from the sets. */ 2001 return true; 2002 case NARY: 2003 { 2004 unsigned int i; 2005 vn_nary_op_t nary = PRE_EXPR_NARY (expr); 2006 for (i = 0; i < nary->length; i++) 2007 if (!op_valid_in_sets (set1, set2, nary->op[i])) 2008 return false; 2009 return true; 2010 } 2011 break; 2012 case REFERENCE: 2013 { 2014 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); 2015 vn_reference_op_t vro; 2016 unsigned int i; 2017 2018 FOR_EACH_VEC_ELT (ref->operands, i, vro) 2019 { 2020 if (!op_valid_in_sets (set1, set2, vro->op0) 2021 || !op_valid_in_sets (set1, set2, vro->op1) 2022 || !op_valid_in_sets (set1, set2, vro->op2)) 2023 return false; 2024 } 2025 return true; 2026 } 2027 default: 2028 gcc_unreachable (); 2029 } 2030} 2031 2032/* Clean the set of expressions that are no longer valid in SET1 or 2033 SET2. This means expressions that are made up of values we have no 2034 leaders for in SET1 or SET2. This version is used for partial 2035 anticipation, which means it is not valid in either ANTIC_IN or 2036 PA_IN. */ 2037 2038static void 2039dependent_clean (bitmap_set_t set1, bitmap_set_t set2) 2040{ 2041 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1); 2042 pre_expr expr; 2043 int i; 2044 2045 FOR_EACH_VEC_ELT (exprs, i, expr) 2046 { 2047 if (!valid_in_sets (set1, set2, expr)) 2048 bitmap_remove_from_set (set1, expr); 2049 } 2050 exprs.release (); 2051} 2052 2053/* Clean the set of expressions that are no longer valid in SET. This 2054 means expressions that are made up of values we have no leaders for 2055 in SET. */ 2056 2057static void 2058clean (bitmap_set_t set) 2059{ 2060 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set); 2061 pre_expr expr; 2062 int i; 2063 2064 FOR_EACH_VEC_ELT (exprs, i, expr) 2065 { 2066 if (!valid_in_sets (set, NULL, expr)) 2067 bitmap_remove_from_set (set, expr); 2068 } 2069 exprs.release (); 2070} 2071 2072/* Clean the set of expressions that are no longer valid in SET because 2073 they are clobbered in BLOCK or because they trap and may not be executed. */ 2074 2075static void 2076prune_clobbered_mems (bitmap_set_t set, basic_block block) 2077{ 2078 bitmap_iterator bi; 2079 unsigned i; 2080 2081 FOR_EACH_EXPR_ID_IN_SET (set, i, bi) 2082 { 2083 pre_expr expr = expression_for_id (i); 2084 if (expr->kind == REFERENCE) 2085 { 2086 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); 2087 if (ref->vuse) 2088 { 2089 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse); 2090 if (!gimple_nop_p (def_stmt) 2091 && ((gimple_bb (def_stmt) != block 2092 && !dominated_by_p (CDI_DOMINATORS, 2093 block, gimple_bb (def_stmt))) 2094 || (gimple_bb (def_stmt) == block 2095 && value_dies_in_block_x (expr, block)))) 2096 bitmap_remove_from_set (set, expr); 2097 } 2098 } 2099 else if (expr->kind == NARY) 2100 { 2101 vn_nary_op_t nary = PRE_EXPR_NARY (expr); 2102 /* If the NARY may trap make sure the block does not contain 2103 a possible exit point. 2104 ??? This is overly conservative if we translate AVAIL_OUT 2105 as the available expression might be after the exit point. */ 2106 if (BB_MAY_NOTRETURN (block) 2107 && vn_nary_may_trap (nary)) 2108 bitmap_remove_from_set (set, expr); 2109 } 2110 } 2111} 2112 2113static sbitmap has_abnormal_preds; 2114 2115/* Compute the ANTIC set for BLOCK. 2116 2117 If succs(BLOCK) > 1 then 2118 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK) 2119 else if succs(BLOCK) == 1 then 2120 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)]) 2121 2122 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK]) 2123*/ 2124 2125static bool 2126compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge) 2127{ 2128 bool changed = false; 2129 bitmap_set_t S, old, ANTIC_OUT; 2130 bitmap_iterator bi; 2131 unsigned int bii; 2132 edge e; 2133 edge_iterator ei; 2134 bool was_visited = BB_VISITED (block); 2135 2136 old = ANTIC_OUT = S = NULL; 2137 BB_VISITED (block) = 1; 2138 2139 /* If any edges from predecessors are abnormal, antic_in is empty, 2140 so do nothing. */ 2141 if (block_has_abnormal_pred_edge) 2142 goto maybe_dump_sets; 2143 2144 old = ANTIC_IN (block); 2145 ANTIC_OUT = bitmap_set_new (); 2146 2147 /* If the block has no successors, ANTIC_OUT is empty. */ 2148 if (EDGE_COUNT (block->succs) == 0) 2149 ; 2150 /* If we have one successor, we could have some phi nodes to 2151 translate through. */ 2152 else if (single_succ_p (block)) 2153 { 2154 basic_block succ_bb = single_succ (block); 2155 gcc_assert (BB_VISITED (succ_bb)); 2156 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb); 2157 } 2158 /* If we have multiple successors, we take the intersection of all of 2159 them. Note that in the case of loop exit phi nodes, we may have 2160 phis to translate through. */ 2161 else 2162 { 2163 size_t i; 2164 basic_block bprime, first = NULL; 2165 2166 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs)); 2167 FOR_EACH_EDGE (e, ei, block->succs) 2168 { 2169 if (!first 2170 && BB_VISITED (e->dest)) 2171 first = e->dest; 2172 else if (BB_VISITED (e->dest)) 2173 worklist.quick_push (e->dest); 2174 else 2175 { 2176 /* Unvisited successors get their ANTIC_IN replaced by the 2177 maximal set to arrive at a maximum ANTIC_IN solution. 2178 We can ignore them in the intersection operation and thus 2179 need not explicitely represent that maximum solution. */ 2180 if (dump_file && (dump_flags & TDF_DETAILS)) 2181 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n", 2182 e->src->index, e->dest->index); 2183 } 2184 } 2185 2186 /* Of multiple successors we have to have visited one already 2187 which is guaranteed by iteration order. */ 2188 gcc_assert (first != NULL); 2189 2190 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first); 2191 2192 FOR_EACH_VEC_ELT (worklist, i, bprime) 2193 { 2194 if (!gimple_seq_empty_p (phi_nodes (bprime))) 2195 { 2196 bitmap_set_t tmp = bitmap_set_new (); 2197 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime); 2198 bitmap_set_and (ANTIC_OUT, tmp); 2199 bitmap_set_free (tmp); 2200 } 2201 else 2202 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime)); 2203 } 2204 } 2205 2206 /* Prune expressions that are clobbered in block and thus become 2207 invalid if translated from ANTIC_OUT to ANTIC_IN. */ 2208 prune_clobbered_mems (ANTIC_OUT, block); 2209 2210 /* Generate ANTIC_OUT - TMP_GEN. */ 2211 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block)); 2212 2213 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */ 2214 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block), 2215 TMP_GEN (block)); 2216 2217 /* Then union in the ANTIC_OUT - TMP_GEN values, 2218 to get ANTIC_OUT U EXP_GEN - TMP_GEN */ 2219 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi) 2220 bitmap_value_insert_into_set (ANTIC_IN (block), 2221 expression_for_id (bii)); 2222 2223 clean (ANTIC_IN (block)); 2224 2225 if (!was_visited || !bitmap_set_equal (old, ANTIC_IN (block))) 2226 changed = true; 2227 2228 maybe_dump_sets: 2229 if (dump_file && (dump_flags & TDF_DETAILS)) 2230 { 2231 if (ANTIC_OUT) 2232 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index); 2233 2234 if (changed) 2235 fprintf (dump_file, "[changed] "); 2236 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN", 2237 block->index); 2238 2239 if (S) 2240 print_bitmap_set (dump_file, S, "S", block->index); 2241 } 2242 if (old) 2243 bitmap_set_free (old); 2244 if (S) 2245 bitmap_set_free (S); 2246 if (ANTIC_OUT) 2247 bitmap_set_free (ANTIC_OUT); 2248 return changed; 2249} 2250 2251/* Compute PARTIAL_ANTIC for BLOCK. 2252 2253 If succs(BLOCK) > 1 then 2254 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not 2255 in ANTIC_OUT for all succ(BLOCK) 2256 else if succs(BLOCK) == 1 then 2257 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)]) 2258 2259 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] 2260 - ANTIC_IN[BLOCK]) 2261 2262*/ 2263static bool 2264compute_partial_antic_aux (basic_block block, 2265 bool block_has_abnormal_pred_edge) 2266{ 2267 bool changed = false; 2268 bitmap_set_t old_PA_IN; 2269 bitmap_set_t PA_OUT; 2270 edge e; 2271 edge_iterator ei; 2272 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH); 2273 2274 old_PA_IN = PA_OUT = NULL; 2275 2276 /* If any edges from predecessors are abnormal, antic_in is empty, 2277 so do nothing. */ 2278 if (block_has_abnormal_pred_edge) 2279 goto maybe_dump_sets; 2280 2281 /* If there are too many partially anticipatable values in the 2282 block, phi_translate_set can take an exponential time: stop 2283 before the translation starts. */ 2284 if (max_pa 2285 && single_succ_p (block) 2286 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa) 2287 goto maybe_dump_sets; 2288 2289 old_PA_IN = PA_IN (block); 2290 PA_OUT = bitmap_set_new (); 2291 2292 /* If the block has no successors, ANTIC_OUT is empty. */ 2293 if (EDGE_COUNT (block->succs) == 0) 2294 ; 2295 /* If we have one successor, we could have some phi nodes to 2296 translate through. Note that we can't phi translate across DFS 2297 back edges in partial antic, because it uses a union operation on 2298 the successors. For recurrences like IV's, we will end up 2299 generating a new value in the set on each go around (i + 3 (VH.1) 2300 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */ 2301 else if (single_succ_p (block)) 2302 { 2303 basic_block succ = single_succ (block); 2304 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK)) 2305 phi_translate_set (PA_OUT, PA_IN (succ), block, succ); 2306 } 2307 /* If we have multiple successors, we take the union of all of 2308 them. */ 2309 else 2310 { 2311 size_t i; 2312 basic_block bprime; 2313 2314 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs)); 2315 FOR_EACH_EDGE (e, ei, block->succs) 2316 { 2317 if (e->flags & EDGE_DFS_BACK) 2318 continue; 2319 worklist.quick_push (e->dest); 2320 } 2321 if (worklist.length () > 0) 2322 { 2323 FOR_EACH_VEC_ELT (worklist, i, bprime) 2324 { 2325 unsigned int i; 2326 bitmap_iterator bi; 2327 2328 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi) 2329 bitmap_value_insert_into_set (PA_OUT, 2330 expression_for_id (i)); 2331 if (!gimple_seq_empty_p (phi_nodes (bprime))) 2332 { 2333 bitmap_set_t pa_in = bitmap_set_new (); 2334 phi_translate_set (pa_in, PA_IN (bprime), block, bprime); 2335 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi) 2336 bitmap_value_insert_into_set (PA_OUT, 2337 expression_for_id (i)); 2338 bitmap_set_free (pa_in); 2339 } 2340 else 2341 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi) 2342 bitmap_value_insert_into_set (PA_OUT, 2343 expression_for_id (i)); 2344 } 2345 } 2346 } 2347 2348 /* Prune expressions that are clobbered in block and thus become 2349 invalid if translated from PA_OUT to PA_IN. */ 2350 prune_clobbered_mems (PA_OUT, block); 2351 2352 /* PA_IN starts with PA_OUT - TMP_GEN. 2353 Then we subtract things from ANTIC_IN. */ 2354 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block)); 2355 2356 /* For partial antic, we want to put back in the phi results, since 2357 we will properly avoid making them partially antic over backedges. */ 2358 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values); 2359 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions); 2360 2361 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */ 2362 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block)); 2363 2364 dependent_clean (PA_IN (block), ANTIC_IN (block)); 2365 2366 if (!bitmap_set_equal (old_PA_IN, PA_IN (block))) 2367 changed = true; 2368 2369 maybe_dump_sets: 2370 if (dump_file && (dump_flags & TDF_DETAILS)) 2371 { 2372 if (PA_OUT) 2373 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index); 2374 2375 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index); 2376 } 2377 if (old_PA_IN) 2378 bitmap_set_free (old_PA_IN); 2379 if (PA_OUT) 2380 bitmap_set_free (PA_OUT); 2381 return changed; 2382} 2383 2384/* Compute ANTIC and partial ANTIC sets. */ 2385 2386static void 2387compute_antic (void) 2388{ 2389 bool changed = true; 2390 int num_iterations = 0; 2391 basic_block block; 2392 int i; 2393 edge_iterator ei; 2394 edge e; 2395 2396 /* If any predecessor edges are abnormal, we punt, so antic_in is empty. 2397 We pre-build the map of blocks with incoming abnormal edges here. */ 2398 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun)); 2399 bitmap_clear (has_abnormal_preds); 2400 2401 FOR_ALL_BB_FN (block, cfun) 2402 { 2403 BB_VISITED (block) = 0; 2404 2405 FOR_EACH_EDGE (e, ei, block->preds) 2406 if (e->flags & EDGE_ABNORMAL) 2407 { 2408 bitmap_set_bit (has_abnormal_preds, block->index); 2409 2410 /* We also anticipate nothing. */ 2411 BB_VISITED (block) = 1; 2412 break; 2413 } 2414 2415 /* While we are here, give empty ANTIC_IN sets to each block. */ 2416 ANTIC_IN (block) = bitmap_set_new (); 2417 PA_IN (block) = bitmap_set_new (); 2418 } 2419 2420 /* At the exit block we anticipate nothing. */ 2421 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1; 2422 2423 sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1); 2424 bitmap_ones (worklist); 2425 while (changed) 2426 { 2427 if (dump_file && (dump_flags & TDF_DETAILS)) 2428 fprintf (dump_file, "Starting iteration %d\n", num_iterations); 2429 /* ??? We need to clear our PHI translation cache here as the 2430 ANTIC sets shrink and we restrict valid translations to 2431 those having operands with leaders in ANTIC. Same below 2432 for PA ANTIC computation. */ 2433 num_iterations++; 2434 changed = false; 2435 for (i = postorder_num - 1; i >= 0; i--) 2436 { 2437 if (bitmap_bit_p (worklist, postorder[i])) 2438 { 2439 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]); 2440 bitmap_clear_bit (worklist, block->index); 2441 if (compute_antic_aux (block, 2442 bitmap_bit_p (has_abnormal_preds, 2443 block->index))) 2444 { 2445 FOR_EACH_EDGE (e, ei, block->preds) 2446 bitmap_set_bit (worklist, e->src->index); 2447 changed = true; 2448 } 2449 } 2450 } 2451 /* Theoretically possible, but *highly* unlikely. */ 2452 gcc_checking_assert (num_iterations < 500); 2453 } 2454 2455 statistics_histogram_event (cfun, "compute_antic iterations", 2456 num_iterations); 2457 2458 if (do_partial_partial) 2459 { 2460 bitmap_ones (worklist); 2461 mark_dfs_back_edges (); 2462 num_iterations = 0; 2463 changed = true; 2464 while (changed) 2465 { 2466 if (dump_file && (dump_flags & TDF_DETAILS)) 2467 fprintf (dump_file, "Starting iteration %d\n", num_iterations); 2468 num_iterations++; 2469 changed = false; 2470 for (i = postorder_num - 1 ; i >= 0; i--) 2471 { 2472 if (bitmap_bit_p (worklist, postorder[i])) 2473 { 2474 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]); 2475 bitmap_clear_bit (worklist, block->index); 2476 if (compute_partial_antic_aux (block, 2477 bitmap_bit_p (has_abnormal_preds, 2478 block->index))) 2479 { 2480 FOR_EACH_EDGE (e, ei, block->preds) 2481 bitmap_set_bit (worklist, e->src->index); 2482 changed = true; 2483 } 2484 } 2485 } 2486 /* Theoretically possible, but *highly* unlikely. */ 2487 gcc_checking_assert (num_iterations < 500); 2488 } 2489 statistics_histogram_event (cfun, "compute_partial_antic iterations", 2490 num_iterations); 2491 } 2492 sbitmap_free (has_abnormal_preds); 2493 sbitmap_free (worklist); 2494} 2495 2496 2497/* Inserted expressions are placed onto this worklist, which is used 2498 for performing quick dead code elimination of insertions we made 2499 that didn't turn out to be necessary. */ 2500static bitmap inserted_exprs; 2501 2502/* The actual worker for create_component_ref_by_pieces. */ 2503 2504static tree 2505create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref, 2506 unsigned int *operand, gimple_seq *stmts) 2507{ 2508 vn_reference_op_t currop = &ref->operands[*operand]; 2509 tree genop; 2510 ++*operand; 2511 switch (currop->opcode) 2512 { 2513 case CALL_EXPR: 2514 { 2515 tree folded, sc = NULL_TREE; 2516 unsigned int nargs = 0; 2517 tree fn, *args; 2518 if (TREE_CODE (currop->op0) == FUNCTION_DECL) 2519 fn = currop->op0; 2520 else 2521 fn = find_or_generate_expression (block, currop->op0, stmts); 2522 if (!fn) 2523 return NULL_TREE; 2524 if (currop->op1) 2525 { 2526 sc = find_or_generate_expression (block, currop->op1, stmts); 2527 if (!sc) 2528 return NULL_TREE; 2529 } 2530 args = XNEWVEC (tree, ref->operands.length () - 1); 2531 while (*operand < ref->operands.length ()) 2532 { 2533 args[nargs] = create_component_ref_by_pieces_1 (block, ref, 2534 operand, stmts); 2535 if (!args[nargs]) 2536 return NULL_TREE; 2537 nargs++; 2538 } 2539 folded = build_call_array (currop->type, 2540 (TREE_CODE (fn) == FUNCTION_DECL 2541 ? build_fold_addr_expr (fn) : fn), 2542 nargs, args); 2543 if (currop->with_bounds) 2544 CALL_WITH_BOUNDS_P (folded) = true; 2545 free (args); 2546 if (sc) 2547 CALL_EXPR_STATIC_CHAIN (folded) = sc; 2548 return folded; 2549 } 2550 2551 case MEM_REF: 2552 { 2553 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, 2554 stmts); 2555 if (!baseop) 2556 return NULL_TREE; 2557 tree offset = currop->op0; 2558 if (TREE_CODE (baseop) == ADDR_EXPR 2559 && handled_component_p (TREE_OPERAND (baseop, 0))) 2560 { 2561 HOST_WIDE_INT off; 2562 tree base; 2563 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0), 2564 &off); 2565 gcc_assert (base); 2566 offset = int_const_binop (PLUS_EXPR, offset, 2567 build_int_cst (TREE_TYPE (offset), 2568 off)); 2569 baseop = build_fold_addr_expr (base); 2570 } 2571 return fold_build2 (MEM_REF, currop->type, baseop, offset); 2572 } 2573 2574 case TARGET_MEM_REF: 2575 { 2576 tree genop0 = NULL_TREE, genop1 = NULL_TREE; 2577 vn_reference_op_t nextop = &ref->operands[++*operand]; 2578 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, 2579 stmts); 2580 if (!baseop) 2581 return NULL_TREE; 2582 if (currop->op0) 2583 { 2584 genop0 = find_or_generate_expression (block, currop->op0, stmts); 2585 if (!genop0) 2586 return NULL_TREE; 2587 } 2588 if (nextop->op0) 2589 { 2590 genop1 = find_or_generate_expression (block, nextop->op0, stmts); 2591 if (!genop1) 2592 return NULL_TREE; 2593 } 2594 return build5 (TARGET_MEM_REF, currop->type, 2595 baseop, currop->op2, genop0, currop->op1, genop1); 2596 } 2597 2598 case ADDR_EXPR: 2599 if (currop->op0) 2600 { 2601 gcc_assert (is_gimple_min_invariant (currop->op0)); 2602 return currop->op0; 2603 } 2604 /* Fallthrough. */ 2605 case REALPART_EXPR: 2606 case IMAGPART_EXPR: 2607 case VIEW_CONVERT_EXPR: 2608 { 2609 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, 2610 stmts); 2611 if (!genop0) 2612 return NULL_TREE; 2613 return fold_build1 (currop->opcode, currop->type, genop0); 2614 } 2615 2616 case WITH_SIZE_EXPR: 2617 { 2618 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, 2619 stmts); 2620 if (!genop0) 2621 return NULL_TREE; 2622 tree genop1 = find_or_generate_expression (block, currop->op0, stmts); 2623 if (!genop1) 2624 return NULL_TREE; 2625 return fold_build2 (currop->opcode, currop->type, genop0, genop1); 2626 } 2627 2628 case BIT_FIELD_REF: 2629 { 2630 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, 2631 stmts); 2632 if (!genop0) 2633 return NULL_TREE; 2634 tree op1 = currop->op0; 2635 tree op2 = currop->op1; 2636 return fold_build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2); 2637 } 2638 2639 /* For array ref vn_reference_op's, operand 1 of the array ref 2640 is op0 of the reference op and operand 3 of the array ref is 2641 op1. */ 2642 case ARRAY_RANGE_REF: 2643 case ARRAY_REF: 2644 { 2645 tree genop0; 2646 tree genop1 = currop->op0; 2647 tree genop2 = currop->op1; 2648 tree genop3 = currop->op2; 2649 genop0 = create_component_ref_by_pieces_1 (block, ref, operand, 2650 stmts); 2651 if (!genop0) 2652 return NULL_TREE; 2653 genop1 = find_or_generate_expression (block, genop1, stmts); 2654 if (!genop1) 2655 return NULL_TREE; 2656 if (genop2) 2657 { 2658 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0)); 2659 /* Drop zero minimum index if redundant. */ 2660 if (integer_zerop (genop2) 2661 && (!domain_type 2662 || integer_zerop (TYPE_MIN_VALUE (domain_type)))) 2663 genop2 = NULL_TREE; 2664 else 2665 { 2666 genop2 = find_or_generate_expression (block, genop2, stmts); 2667 if (!genop2) 2668 return NULL_TREE; 2669 } 2670 } 2671 if (genop3) 2672 { 2673 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0)); 2674 /* We can't always put a size in units of the element alignment 2675 here as the element alignment may be not visible. See 2676 PR43783. Simply drop the element size for constant 2677 sizes. */ 2678 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type))) 2679 genop3 = NULL_TREE; 2680 else 2681 { 2682 genop3 = size_binop (EXACT_DIV_EXPR, genop3, 2683 size_int (TYPE_ALIGN_UNIT (elmt_type))); 2684 genop3 = find_or_generate_expression (block, genop3, stmts); 2685 if (!genop3) 2686 return NULL_TREE; 2687 } 2688 } 2689 return build4 (currop->opcode, currop->type, genop0, genop1, 2690 genop2, genop3); 2691 } 2692 case COMPONENT_REF: 2693 { 2694 tree op0; 2695 tree op1; 2696 tree genop2 = currop->op1; 2697 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts); 2698 if (!op0) 2699 return NULL_TREE; 2700 /* op1 should be a FIELD_DECL, which are represented by themselves. */ 2701 op1 = currop->op0; 2702 if (genop2) 2703 { 2704 genop2 = find_or_generate_expression (block, genop2, stmts); 2705 if (!genop2) 2706 return NULL_TREE; 2707 } 2708 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2); 2709 } 2710 2711 case SSA_NAME: 2712 { 2713 genop = find_or_generate_expression (block, currop->op0, stmts); 2714 return genop; 2715 } 2716 case STRING_CST: 2717 case INTEGER_CST: 2718 case COMPLEX_CST: 2719 case VECTOR_CST: 2720 case REAL_CST: 2721 case CONSTRUCTOR: 2722 case VAR_DECL: 2723 case PARM_DECL: 2724 case CONST_DECL: 2725 case RESULT_DECL: 2726 case FUNCTION_DECL: 2727 return currop->op0; 2728 2729 default: 2730 gcc_unreachable (); 2731 } 2732} 2733 2734/* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the 2735 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with 2736 trying to rename aggregates into ssa form directly, which is a no no. 2737 2738 Thus, this routine doesn't create temporaries, it just builds a 2739 single access expression for the array, calling 2740 find_or_generate_expression to build the innermost pieces. 2741 2742 This function is a subroutine of create_expression_by_pieces, and 2743 should not be called on it's own unless you really know what you 2744 are doing. */ 2745 2746static tree 2747create_component_ref_by_pieces (basic_block block, vn_reference_t ref, 2748 gimple_seq *stmts) 2749{ 2750 unsigned int op = 0; 2751 return create_component_ref_by_pieces_1 (block, ref, &op, stmts); 2752} 2753 2754/* Find a simple leader for an expression, or generate one using 2755 create_expression_by_pieces from a NARY expression for the value. 2756 BLOCK is the basic_block we are looking for leaders in. 2757 OP is the tree expression to find a leader for or generate. 2758 Returns the leader or NULL_TREE on failure. */ 2759 2760static tree 2761find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts) 2762{ 2763 pre_expr expr = get_or_alloc_expr_for (op); 2764 unsigned int lookfor = get_expr_value_id (expr); 2765 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor); 2766 if (leader) 2767 { 2768 if (leader->kind == NAME) 2769 return PRE_EXPR_NAME (leader); 2770 else if (leader->kind == CONSTANT) 2771 return PRE_EXPR_CONSTANT (leader); 2772 2773 /* Defer. */ 2774 return NULL_TREE; 2775 } 2776 2777 /* It must be a complex expression, so generate it recursively. Note 2778 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c 2779 where the insert algorithm fails to insert a required expression. */ 2780 bitmap exprset = value_expressions[lookfor]; 2781 bitmap_iterator bi; 2782 unsigned int i; 2783 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi) 2784 { 2785 pre_expr temp = expression_for_id (i); 2786 /* We cannot insert random REFERENCE expressions at arbitrary 2787 places. We can insert NARYs which eventually re-materializes 2788 its operand values. */ 2789 if (temp->kind == NARY) 2790 return create_expression_by_pieces (block, temp, stmts, 2791 get_expr_type (expr)); 2792 } 2793 2794 /* Defer. */ 2795 return NULL_TREE; 2796} 2797 2798#define NECESSARY GF_PLF_1 2799 2800/* Create an expression in pieces, so that we can handle very complex 2801 expressions that may be ANTIC, but not necessary GIMPLE. 2802 BLOCK is the basic block the expression will be inserted into, 2803 EXPR is the expression to insert (in value form) 2804 STMTS is a statement list to append the necessary insertions into. 2805 2806 This function will die if we hit some value that shouldn't be 2807 ANTIC but is (IE there is no leader for it, or its components). 2808 The function returns NULL_TREE in case a different antic expression 2809 has to be inserted first. 2810 This function may also generate expressions that are themselves 2811 partially or fully redundant. Those that are will be either made 2812 fully redundant during the next iteration of insert (for partially 2813 redundant ones), or eliminated by eliminate (for fully redundant 2814 ones). */ 2815 2816static tree 2817create_expression_by_pieces (basic_block block, pre_expr expr, 2818 gimple_seq *stmts, tree type) 2819{ 2820 tree name; 2821 tree folded; 2822 gimple_seq forced_stmts = NULL; 2823 unsigned int value_id; 2824 gimple_stmt_iterator gsi; 2825 tree exprtype = type ? type : get_expr_type (expr); 2826 pre_expr nameexpr; 2827 gassign *newstmt; 2828 2829 switch (expr->kind) 2830 { 2831 /* We may hit the NAME/CONSTANT case if we have to convert types 2832 that value numbering saw through. */ 2833 case NAME: 2834 folded = PRE_EXPR_NAME (expr); 2835 break; 2836 case CONSTANT: 2837 folded = PRE_EXPR_CONSTANT (expr); 2838 break; 2839 case REFERENCE: 2840 { 2841 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); 2842 folded = create_component_ref_by_pieces (block, ref, stmts); 2843 if (!folded) 2844 return NULL_TREE; 2845 } 2846 break; 2847 case NARY: 2848 { 2849 vn_nary_op_t nary = PRE_EXPR_NARY (expr); 2850 tree *genop = XALLOCAVEC (tree, nary->length); 2851 unsigned i; 2852 for (i = 0; i < nary->length; ++i) 2853 { 2854 genop[i] = find_or_generate_expression (block, nary->op[i], stmts); 2855 if (!genop[i]) 2856 return NULL_TREE; 2857 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It 2858 may have conversions stripped. */ 2859 if (nary->opcode == POINTER_PLUS_EXPR) 2860 { 2861 if (i == 0) 2862 genop[i] = gimple_convert (&forced_stmts, 2863 nary->type, genop[i]); 2864 else if (i == 1) 2865 genop[i] = gimple_convert (&forced_stmts, 2866 sizetype, genop[i]); 2867 } 2868 else 2869 genop[i] = gimple_convert (&forced_stmts, 2870 TREE_TYPE (nary->op[i]), genop[i]); 2871 } 2872 if (nary->opcode == CONSTRUCTOR) 2873 { 2874 vec<constructor_elt, va_gc> *elts = NULL; 2875 for (i = 0; i < nary->length; ++i) 2876 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]); 2877 folded = build_constructor (nary->type, elts); 2878 } 2879 else 2880 { 2881 switch (nary->length) 2882 { 2883 case 1: 2884 folded = fold_build1 (nary->opcode, nary->type, 2885 genop[0]); 2886 break; 2887 case 2: 2888 folded = fold_build2 (nary->opcode, nary->type, 2889 genop[0], genop[1]); 2890 break; 2891 case 3: 2892 folded = fold_build3 (nary->opcode, nary->type, 2893 genop[0], genop[1], genop[2]); 2894 break; 2895 default: 2896 gcc_unreachable (); 2897 } 2898 } 2899 } 2900 break; 2901 default: 2902 gcc_unreachable (); 2903 } 2904 2905 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded))) 2906 folded = fold_convert (exprtype, folded); 2907 2908 /* Force the generated expression to be a sequence of GIMPLE 2909 statements. 2910 We have to call unshare_expr because force_gimple_operand may 2911 modify the tree we pass to it. */ 2912 gimple_seq tem = NULL; 2913 folded = force_gimple_operand (unshare_expr (folded), &tem, 2914 false, NULL); 2915 gimple_seq_add_seq_without_update (&forced_stmts, tem); 2916 2917 /* If we have any intermediate expressions to the value sets, add them 2918 to the value sets and chain them in the instruction stream. */ 2919 if (forced_stmts) 2920 { 2921 gsi = gsi_start (forced_stmts); 2922 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 2923 { 2924 gimple stmt = gsi_stmt (gsi); 2925 tree forcedname = gimple_get_lhs (stmt); 2926 pre_expr nameexpr; 2927 2928 if (TREE_CODE (forcedname) == SSA_NAME) 2929 { 2930 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname)); 2931 VN_INFO_GET (forcedname)->valnum = forcedname; 2932 VN_INFO (forcedname)->value_id = get_next_value_id (); 2933 nameexpr = get_or_alloc_expr_for_name (forcedname); 2934 add_to_value (VN_INFO (forcedname)->value_id, nameexpr); 2935 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr); 2936 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr); 2937 } 2938 2939 gimple_set_vuse (stmt, BB_LIVE_VOP_ON_EXIT (block)); 2940 gimple_set_modified (stmt, true); 2941 } 2942 gimple_seq_add_seq (stmts, forced_stmts); 2943 } 2944 2945 name = make_temp_ssa_name (exprtype, NULL, "pretmp"); 2946 newstmt = gimple_build_assign (name, folded); 2947 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block)); 2948 gimple_set_modified (newstmt, true); 2949 gimple_set_plf (newstmt, NECESSARY, false); 2950 2951 gimple_seq_add_stmt (stmts, newstmt); 2952 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name)); 2953 2954 /* Fold the last statement. */ 2955 gsi = gsi_last (*stmts); 2956 if (fold_stmt_inplace (&gsi)) 2957 update_stmt (gsi_stmt (gsi)); 2958 2959 /* Add a value number to the temporary. 2960 The value may already exist in either NEW_SETS, or AVAIL_OUT, because 2961 we are creating the expression by pieces, and this particular piece of 2962 the expression may have been represented. There is no harm in replacing 2963 here. */ 2964 value_id = get_expr_value_id (expr); 2965 VN_INFO_GET (name)->value_id = value_id; 2966 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id); 2967 if (VN_INFO (name)->valnum == NULL_TREE) 2968 VN_INFO (name)->valnum = name; 2969 gcc_assert (VN_INFO (name)->valnum != NULL_TREE); 2970 nameexpr = get_or_alloc_expr_for_name (name); 2971 add_to_value (value_id, nameexpr); 2972 if (NEW_SETS (block)) 2973 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr); 2974 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr); 2975 2976 pre_stats.insertions++; 2977 if (dump_file && (dump_flags & TDF_DETAILS)) 2978 { 2979 fprintf (dump_file, "Inserted "); 2980 print_gimple_stmt (dump_file, newstmt, 0, 0); 2981 fprintf (dump_file, " in predecessor %d (%04d)\n", 2982 block->index, value_id); 2983 } 2984 2985 return name; 2986} 2987 2988 2989/* Insert the to-be-made-available values of expression EXPRNUM for each 2990 predecessor, stored in AVAIL, into the predecessors of BLOCK, and 2991 merge the result with a phi node, given the same value number as 2992 NODE. Return true if we have inserted new stuff. */ 2993 2994static bool 2995insert_into_preds_of_block (basic_block block, unsigned int exprnum, 2996 vec<pre_expr> avail) 2997{ 2998 pre_expr expr = expression_for_id (exprnum); 2999 pre_expr newphi; 3000 unsigned int val = get_expr_value_id (expr); 3001 edge pred; 3002 bool insertions = false; 3003 bool nophi = false; 3004 basic_block bprime; 3005 pre_expr eprime; 3006 edge_iterator ei; 3007 tree type = get_expr_type (expr); 3008 tree temp; 3009 gphi *phi; 3010 3011 /* Make sure we aren't creating an induction variable. */ 3012 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2) 3013 { 3014 bool firstinsideloop = false; 3015 bool secondinsideloop = false; 3016 firstinsideloop = flow_bb_inside_loop_p (block->loop_father, 3017 EDGE_PRED (block, 0)->src); 3018 secondinsideloop = flow_bb_inside_loop_p (block->loop_father, 3019 EDGE_PRED (block, 1)->src); 3020 /* Induction variables only have one edge inside the loop. */ 3021 if ((firstinsideloop ^ secondinsideloop) 3022 && expr->kind != REFERENCE) 3023 { 3024 if (dump_file && (dump_flags & TDF_DETAILS)) 3025 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n"); 3026 nophi = true; 3027 } 3028 } 3029 3030 /* Make the necessary insertions. */ 3031 FOR_EACH_EDGE (pred, ei, block->preds) 3032 { 3033 gimple_seq stmts = NULL; 3034 tree builtexpr; 3035 bprime = pred->src; 3036 eprime = avail[pred->dest_idx]; 3037 3038 if (eprime->kind != NAME && eprime->kind != CONSTANT) 3039 { 3040 builtexpr = create_expression_by_pieces (bprime, eprime, 3041 &stmts, type); 3042 gcc_assert (!(pred->flags & EDGE_ABNORMAL)); 3043 gsi_insert_seq_on_edge (pred, stmts); 3044 if (!builtexpr) 3045 { 3046 /* We cannot insert a PHI node if we failed to insert 3047 on one edge. */ 3048 nophi = true; 3049 continue; 3050 } 3051 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr); 3052 insertions = true; 3053 } 3054 else if (eprime->kind == CONSTANT) 3055 { 3056 /* Constants may not have the right type, fold_convert 3057 should give us back a constant with the right type. */ 3058 tree constant = PRE_EXPR_CONSTANT (eprime); 3059 if (!useless_type_conversion_p (type, TREE_TYPE (constant))) 3060 { 3061 tree builtexpr = fold_convert (type, constant); 3062 if (!is_gimple_min_invariant (builtexpr)) 3063 { 3064 tree forcedexpr = force_gimple_operand (builtexpr, 3065 &stmts, true, 3066 NULL); 3067 if (!is_gimple_min_invariant (forcedexpr)) 3068 { 3069 if (forcedexpr != builtexpr) 3070 { 3071 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime); 3072 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime); 3073 } 3074 if (stmts) 3075 { 3076 gimple_stmt_iterator gsi; 3077 gsi = gsi_start (stmts); 3078 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 3079 { 3080 gimple stmt = gsi_stmt (gsi); 3081 tree lhs = gimple_get_lhs (stmt); 3082 if (TREE_CODE (lhs) == SSA_NAME) 3083 bitmap_set_bit (inserted_exprs, 3084 SSA_NAME_VERSION (lhs)); 3085 gimple_set_plf (stmt, NECESSARY, false); 3086 } 3087 gsi_insert_seq_on_edge (pred, stmts); 3088 } 3089 avail[pred->dest_idx] 3090 = get_or_alloc_expr_for_name (forcedexpr); 3091 } 3092 } 3093 else 3094 avail[pred->dest_idx] 3095 = get_or_alloc_expr_for_constant (builtexpr); 3096 } 3097 } 3098 else if (eprime->kind == NAME) 3099 { 3100 /* We may have to do a conversion because our value 3101 numbering can look through types in certain cases, but 3102 our IL requires all operands of a phi node have the same 3103 type. */ 3104 tree name = PRE_EXPR_NAME (eprime); 3105 if (!useless_type_conversion_p (type, TREE_TYPE (name))) 3106 { 3107 tree builtexpr; 3108 tree forcedexpr; 3109 builtexpr = fold_convert (type, name); 3110 forcedexpr = force_gimple_operand (builtexpr, 3111 &stmts, true, 3112 NULL); 3113 3114 if (forcedexpr != name) 3115 { 3116 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum; 3117 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id; 3118 } 3119 3120 if (stmts) 3121 { 3122 gimple_stmt_iterator gsi; 3123 gsi = gsi_start (stmts); 3124 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 3125 { 3126 gimple stmt = gsi_stmt (gsi); 3127 tree lhs = gimple_get_lhs (stmt); 3128 if (TREE_CODE (lhs) == SSA_NAME) 3129 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs)); 3130 gimple_set_plf (stmt, NECESSARY, false); 3131 } 3132 gsi_insert_seq_on_edge (pred, stmts); 3133 } 3134 avail[pred->dest_idx] = get_or_alloc_expr_for_name (forcedexpr); 3135 } 3136 } 3137 } 3138 /* If we didn't want a phi node, and we made insertions, we still have 3139 inserted new stuff, and thus return true. If we didn't want a phi node, 3140 and didn't make insertions, we haven't added anything new, so return 3141 false. */ 3142 if (nophi && insertions) 3143 return true; 3144 else if (nophi && !insertions) 3145 return false; 3146 3147 /* Now build a phi for the new variable. */ 3148 temp = make_temp_ssa_name (type, NULL, "prephitmp"); 3149 phi = create_phi_node (temp, block); 3150 3151 gimple_set_plf (phi, NECESSARY, false); 3152 VN_INFO_GET (temp)->value_id = val; 3153 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val); 3154 if (VN_INFO (temp)->valnum == NULL_TREE) 3155 VN_INFO (temp)->valnum = temp; 3156 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp)); 3157 FOR_EACH_EDGE (pred, ei, block->preds) 3158 { 3159 pre_expr ae = avail[pred->dest_idx]; 3160 gcc_assert (get_expr_type (ae) == type 3161 || useless_type_conversion_p (type, get_expr_type (ae))); 3162 if (ae->kind == CONSTANT) 3163 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)), 3164 pred, UNKNOWN_LOCATION); 3165 else 3166 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION); 3167 } 3168 3169 newphi = get_or_alloc_expr_for_name (temp); 3170 add_to_value (val, newphi); 3171 3172 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing 3173 this insertion, since we test for the existence of this value in PHI_GEN 3174 before proceeding with the partial redundancy checks in insert_aux. 3175 3176 The value may exist in AVAIL_OUT, in particular, it could be represented 3177 by the expression we are trying to eliminate, in which case we want the 3178 replacement to occur. If it's not existing in AVAIL_OUT, we want it 3179 inserted there. 3180 3181 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of 3182 this block, because if it did, it would have existed in our dominator's 3183 AVAIL_OUT, and would have been skipped due to the full redundancy check. 3184 */ 3185 3186 bitmap_insert_into_set (PHI_GEN (block), newphi); 3187 bitmap_value_replace_in_set (AVAIL_OUT (block), 3188 newphi); 3189 bitmap_insert_into_set (NEW_SETS (block), 3190 newphi); 3191 3192 /* If we insert a PHI node for a conversion of another PHI node 3193 in the same basic-block try to preserve range information. 3194 This is important so that followup loop passes receive optimal 3195 number of iteration analysis results. See PR61743. */ 3196 if (expr->kind == NARY 3197 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode) 3198 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME 3199 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block 3200 && INTEGRAL_TYPE_P (type) 3201 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0])) 3202 && (TYPE_PRECISION (type) 3203 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0]))) 3204 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0])) 3205 { 3206 wide_int min, max; 3207 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE 3208 && !wi::neg_p (min, SIGNED) 3209 && !wi::neg_p (max, SIGNED)) 3210 /* Just handle extension and sign-changes of all-positive ranges. */ 3211 set_range_info (temp, 3212 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]), 3213 wide_int_storage::from (min, TYPE_PRECISION (type), 3214 TYPE_SIGN (type)), 3215 wide_int_storage::from (max, TYPE_PRECISION (type), 3216 TYPE_SIGN (type))); 3217 } 3218 3219 if (dump_file && (dump_flags & TDF_DETAILS)) 3220 { 3221 fprintf (dump_file, "Created phi "); 3222 print_gimple_stmt (dump_file, phi, 0, 0); 3223 fprintf (dump_file, " in block %d (%04d)\n", block->index, val); 3224 } 3225 pre_stats.phis++; 3226 return true; 3227} 3228 3229 3230 3231/* Perform insertion of partially redundant values. 3232 For BLOCK, do the following: 3233 1. Propagate the NEW_SETS of the dominator into the current block. 3234 If the block has multiple predecessors, 3235 2a. Iterate over the ANTIC expressions for the block to see if 3236 any of them are partially redundant. 3237 2b. If so, insert them into the necessary predecessors to make 3238 the expression fully redundant. 3239 2c. Insert a new PHI merging the values of the predecessors. 3240 2d. Insert the new PHI, and the new expressions, into the 3241 NEW_SETS set. 3242 3. Recursively call ourselves on the dominator children of BLOCK. 3243 3244 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by 3245 do_regular_insertion and do_partial_insertion. 3246 3247*/ 3248 3249static bool 3250do_regular_insertion (basic_block block, basic_block dom) 3251{ 3252 bool new_stuff = false; 3253 vec<pre_expr> exprs; 3254 pre_expr expr; 3255 auto_vec<pre_expr> avail; 3256 int i; 3257 3258 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block)); 3259 avail.safe_grow (EDGE_COUNT (block->preds)); 3260 3261 FOR_EACH_VEC_ELT (exprs, i, expr) 3262 { 3263 if (expr->kind == NARY 3264 || expr->kind == REFERENCE) 3265 { 3266 unsigned int val; 3267 bool by_some = false; 3268 bool cant_insert = false; 3269 bool all_same = true; 3270 pre_expr first_s = NULL; 3271 edge pred; 3272 basic_block bprime; 3273 pre_expr eprime = NULL; 3274 edge_iterator ei; 3275 pre_expr edoubleprime = NULL; 3276 bool do_insertion = false; 3277 3278 val = get_expr_value_id (expr); 3279 if (bitmap_set_contains_value (PHI_GEN (block), val)) 3280 continue; 3281 if (bitmap_set_contains_value (AVAIL_OUT (dom), val)) 3282 { 3283 if (dump_file && (dump_flags & TDF_DETAILS)) 3284 { 3285 fprintf (dump_file, "Found fully redundant value: "); 3286 print_pre_expr (dump_file, expr); 3287 fprintf (dump_file, "\n"); 3288 } 3289 continue; 3290 } 3291 3292 FOR_EACH_EDGE (pred, ei, block->preds) 3293 { 3294 unsigned int vprime; 3295 3296 /* We should never run insertion for the exit block 3297 and so not come across fake pred edges. */ 3298 gcc_assert (!(pred->flags & EDGE_FAKE)); 3299 bprime = pred->src; 3300 eprime = phi_translate (expr, ANTIC_IN (block), NULL, 3301 bprime, block); 3302 3303 /* eprime will generally only be NULL if the 3304 value of the expression, translated 3305 through the PHI for this predecessor, is 3306 undefined. If that is the case, we can't 3307 make the expression fully redundant, 3308 because its value is undefined along a 3309 predecessor path. We can thus break out 3310 early because it doesn't matter what the 3311 rest of the results are. */ 3312 if (eprime == NULL) 3313 { 3314 avail[pred->dest_idx] = NULL; 3315 cant_insert = true; 3316 break; 3317 } 3318 3319 eprime = fully_constant_expression (eprime); 3320 vprime = get_expr_value_id (eprime); 3321 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), 3322 vprime); 3323 if (edoubleprime == NULL) 3324 { 3325 avail[pred->dest_idx] = eprime; 3326 all_same = false; 3327 } 3328 else 3329 { 3330 avail[pred->dest_idx] = edoubleprime; 3331 by_some = true; 3332 /* We want to perform insertions to remove a redundancy on 3333 a path in the CFG we want to optimize for speed. */ 3334 if (optimize_edge_for_speed_p (pred)) 3335 do_insertion = true; 3336 if (first_s == NULL) 3337 first_s = edoubleprime; 3338 else if (!pre_expr_d::equal (first_s, edoubleprime)) 3339 all_same = false; 3340 } 3341 } 3342 /* If we can insert it, it's not the same value 3343 already existing along every predecessor, and 3344 it's defined by some predecessor, it is 3345 partially redundant. */ 3346 if (!cant_insert && !all_same && by_some) 3347 { 3348 if (!do_insertion) 3349 { 3350 if (dump_file && (dump_flags & TDF_DETAILS)) 3351 { 3352 fprintf (dump_file, "Skipping partial redundancy for " 3353 "expression "); 3354 print_pre_expr (dump_file, expr); 3355 fprintf (dump_file, " (%04d), no redundancy on to be " 3356 "optimized for speed edge\n", val); 3357 } 3358 } 3359 else if (dbg_cnt (treepre_insert)) 3360 { 3361 if (dump_file && (dump_flags & TDF_DETAILS)) 3362 { 3363 fprintf (dump_file, "Found partial redundancy for " 3364 "expression "); 3365 print_pre_expr (dump_file, expr); 3366 fprintf (dump_file, " (%04d)\n", 3367 get_expr_value_id (expr)); 3368 } 3369 if (insert_into_preds_of_block (block, 3370 get_expression_id (expr), 3371 avail)) 3372 new_stuff = true; 3373 } 3374 } 3375 /* If all edges produce the same value and that value is 3376 an invariant, then the PHI has the same value on all 3377 edges. Note this. */ 3378 else if (!cant_insert && all_same) 3379 { 3380 gcc_assert (edoubleprime->kind == CONSTANT 3381 || edoubleprime->kind == NAME); 3382 3383 tree temp = make_temp_ssa_name (get_expr_type (expr), 3384 NULL, "pretmp"); 3385 gassign *assign 3386 = gimple_build_assign (temp, 3387 edoubleprime->kind == CONSTANT ? 3388 PRE_EXPR_CONSTANT (edoubleprime) : 3389 PRE_EXPR_NAME (edoubleprime)); 3390 gimple_stmt_iterator gsi = gsi_after_labels (block); 3391 gsi_insert_before (&gsi, assign, GSI_NEW_STMT); 3392 3393 gimple_set_plf (assign, NECESSARY, false); 3394 VN_INFO_GET (temp)->value_id = val; 3395 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val); 3396 if (VN_INFO (temp)->valnum == NULL_TREE) 3397 VN_INFO (temp)->valnum = temp; 3398 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp)); 3399 pre_expr newe = get_or_alloc_expr_for_name (temp); 3400 add_to_value (val, newe); 3401 bitmap_value_replace_in_set (AVAIL_OUT (block), newe); 3402 bitmap_insert_into_set (NEW_SETS (block), newe); 3403 } 3404 } 3405 } 3406 3407 exprs.release (); 3408 return new_stuff; 3409} 3410 3411 3412/* Perform insertion for partially anticipatable expressions. There 3413 is only one case we will perform insertion for these. This case is 3414 if the expression is partially anticipatable, and fully available. 3415 In this case, we know that putting it earlier will enable us to 3416 remove the later computation. */ 3417 3418 3419static bool 3420do_partial_partial_insertion (basic_block block, basic_block dom) 3421{ 3422 bool new_stuff = false; 3423 vec<pre_expr> exprs; 3424 pre_expr expr; 3425 auto_vec<pre_expr> avail; 3426 int i; 3427 3428 exprs = sorted_array_from_bitmap_set (PA_IN (block)); 3429 avail.safe_grow (EDGE_COUNT (block->preds)); 3430 3431 FOR_EACH_VEC_ELT (exprs, i, expr) 3432 { 3433 if (expr->kind == NARY 3434 || expr->kind == REFERENCE) 3435 { 3436 unsigned int val; 3437 bool by_all = true; 3438 bool cant_insert = false; 3439 edge pred; 3440 basic_block bprime; 3441 pre_expr eprime = NULL; 3442 edge_iterator ei; 3443 3444 val = get_expr_value_id (expr); 3445 if (bitmap_set_contains_value (PHI_GEN (block), val)) 3446 continue; 3447 if (bitmap_set_contains_value (AVAIL_OUT (dom), val)) 3448 continue; 3449 3450 FOR_EACH_EDGE (pred, ei, block->preds) 3451 { 3452 unsigned int vprime; 3453 pre_expr edoubleprime; 3454 3455 /* We should never run insertion for the exit block 3456 and so not come across fake pred edges. */ 3457 gcc_assert (!(pred->flags & EDGE_FAKE)); 3458 bprime = pred->src; 3459 eprime = phi_translate (expr, ANTIC_IN (block), 3460 PA_IN (block), 3461 bprime, block); 3462 3463 /* eprime will generally only be NULL if the 3464 value of the expression, translated 3465 through the PHI for this predecessor, is 3466 undefined. If that is the case, we can't 3467 make the expression fully redundant, 3468 because its value is undefined along a 3469 predecessor path. We can thus break out 3470 early because it doesn't matter what the 3471 rest of the results are. */ 3472 if (eprime == NULL) 3473 { 3474 avail[pred->dest_idx] = NULL; 3475 cant_insert = true; 3476 break; 3477 } 3478 3479 eprime = fully_constant_expression (eprime); 3480 vprime = get_expr_value_id (eprime); 3481 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime); 3482 avail[pred->dest_idx] = edoubleprime; 3483 if (edoubleprime == NULL) 3484 { 3485 by_all = false; 3486 break; 3487 } 3488 } 3489 3490 /* If we can insert it, it's not the same value 3491 already existing along every predecessor, and 3492 it's defined by some predecessor, it is 3493 partially redundant. */ 3494 if (!cant_insert && by_all) 3495 { 3496 edge succ; 3497 bool do_insertion = false; 3498 3499 /* Insert only if we can remove a later expression on a path 3500 that we want to optimize for speed. 3501 The phi node that we will be inserting in BLOCK is not free, 3502 and inserting it for the sake of !optimize_for_speed successor 3503 may cause regressions on the speed path. */ 3504 FOR_EACH_EDGE (succ, ei, block->succs) 3505 { 3506 if (bitmap_set_contains_value (PA_IN (succ->dest), val) 3507 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val)) 3508 { 3509 if (optimize_edge_for_speed_p (succ)) 3510 do_insertion = true; 3511 } 3512 } 3513 3514 if (!do_insertion) 3515 { 3516 if (dump_file && (dump_flags & TDF_DETAILS)) 3517 { 3518 fprintf (dump_file, "Skipping partial partial redundancy " 3519 "for expression "); 3520 print_pre_expr (dump_file, expr); 3521 fprintf (dump_file, " (%04d), not (partially) anticipated " 3522 "on any to be optimized for speed edges\n", val); 3523 } 3524 } 3525 else if (dbg_cnt (treepre_insert)) 3526 { 3527 pre_stats.pa_insert++; 3528 if (dump_file && (dump_flags & TDF_DETAILS)) 3529 { 3530 fprintf (dump_file, "Found partial partial redundancy " 3531 "for expression "); 3532 print_pre_expr (dump_file, expr); 3533 fprintf (dump_file, " (%04d)\n", 3534 get_expr_value_id (expr)); 3535 } 3536 if (insert_into_preds_of_block (block, 3537 get_expression_id (expr), 3538 avail)) 3539 new_stuff = true; 3540 } 3541 } 3542 } 3543 } 3544 3545 exprs.release (); 3546 return new_stuff; 3547} 3548 3549static bool 3550insert_aux (basic_block block) 3551{ 3552 basic_block son; 3553 bool new_stuff = false; 3554 3555 if (block) 3556 { 3557 basic_block dom; 3558 dom = get_immediate_dominator (CDI_DOMINATORS, block); 3559 if (dom) 3560 { 3561 unsigned i; 3562 bitmap_iterator bi; 3563 bitmap_set_t newset = NEW_SETS (dom); 3564 if (newset) 3565 { 3566 /* Note that we need to value_replace both NEW_SETS, and 3567 AVAIL_OUT. For both the case of NEW_SETS, the value may be 3568 represented by some non-simple expression here that we want 3569 to replace it with. */ 3570 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi) 3571 { 3572 pre_expr expr = expression_for_id (i); 3573 bitmap_value_replace_in_set (NEW_SETS (block), expr); 3574 bitmap_value_replace_in_set (AVAIL_OUT (block), expr); 3575 } 3576 } 3577 if (!single_pred_p (block)) 3578 { 3579 new_stuff |= do_regular_insertion (block, dom); 3580 if (do_partial_partial) 3581 new_stuff |= do_partial_partial_insertion (block, dom); 3582 } 3583 } 3584 } 3585 for (son = first_dom_son (CDI_DOMINATORS, block); 3586 son; 3587 son = next_dom_son (CDI_DOMINATORS, son)) 3588 { 3589 new_stuff |= insert_aux (son); 3590 } 3591 3592 return new_stuff; 3593} 3594 3595/* Perform insertion of partially redundant values. */ 3596 3597static void 3598insert (void) 3599{ 3600 bool new_stuff = true; 3601 basic_block bb; 3602 int num_iterations = 0; 3603 3604 FOR_ALL_BB_FN (bb, cfun) 3605 NEW_SETS (bb) = bitmap_set_new (); 3606 3607 while (new_stuff) 3608 { 3609 num_iterations++; 3610 if (dump_file && dump_flags & TDF_DETAILS) 3611 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations); 3612 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun)); 3613 3614 /* Clear the NEW sets before the next iteration. We have already 3615 fully propagated its contents. */ 3616 if (new_stuff) 3617 FOR_ALL_BB_FN (bb, cfun) 3618 bitmap_set_free (NEW_SETS (bb)); 3619 } 3620 statistics_histogram_event (cfun, "insert iterations", num_iterations); 3621} 3622 3623 3624/* Compute the AVAIL set for all basic blocks. 3625 3626 This function performs value numbering of the statements in each basic 3627 block. The AVAIL sets are built from information we glean while doing 3628 this value numbering, since the AVAIL sets contain only one entry per 3629 value. 3630 3631 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)]. 3632 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */ 3633 3634static void 3635compute_avail (void) 3636{ 3637 3638 basic_block block, son; 3639 basic_block *worklist; 3640 size_t sp = 0; 3641 unsigned i; 3642 3643 /* We pretend that default definitions are defined in the entry block. 3644 This includes function arguments and the static chain decl. */ 3645 for (i = 1; i < num_ssa_names; ++i) 3646 { 3647 tree name = ssa_name (i); 3648 pre_expr e; 3649 if (!name 3650 || !SSA_NAME_IS_DEFAULT_DEF (name) 3651 || has_zero_uses (name) 3652 || virtual_operand_p (name)) 3653 continue; 3654 3655 e = get_or_alloc_expr_for_name (name); 3656 add_to_value (get_expr_value_id (e), e); 3657 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e); 3658 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)), 3659 e); 3660 } 3661 3662 if (dump_file && (dump_flags & TDF_DETAILS)) 3663 { 3664 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), 3665 "tmp_gen", ENTRY_BLOCK); 3666 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)), 3667 "avail_out", ENTRY_BLOCK); 3668 } 3669 3670 /* Allocate the worklist. */ 3671 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun)); 3672 3673 /* Seed the algorithm by putting the dominator children of the entry 3674 block on the worklist. */ 3675 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun)); 3676 son; 3677 son = next_dom_son (CDI_DOMINATORS, son)) 3678 worklist[sp++] = son; 3679 3680 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun)) 3681 = ssa_default_def (cfun, gimple_vop (cfun)); 3682 3683 /* Loop until the worklist is empty. */ 3684 while (sp) 3685 { 3686 gimple stmt; 3687 basic_block dom; 3688 3689 /* Pick a block from the worklist. */ 3690 block = worklist[--sp]; 3691 3692 /* Initially, the set of available values in BLOCK is that of 3693 its immediate dominator. */ 3694 dom = get_immediate_dominator (CDI_DOMINATORS, block); 3695 if (dom) 3696 { 3697 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom)); 3698 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom); 3699 } 3700 3701 /* Generate values for PHI nodes. */ 3702 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi); 3703 gsi_next (&gsi)) 3704 { 3705 tree result = gimple_phi_result (gsi.phi ()); 3706 3707 /* We have no need for virtual phis, as they don't represent 3708 actual computations. */ 3709 if (virtual_operand_p (result)) 3710 { 3711 BB_LIVE_VOP_ON_EXIT (block) = result; 3712 continue; 3713 } 3714 3715 pre_expr e = get_or_alloc_expr_for_name (result); 3716 add_to_value (get_expr_value_id (e), e); 3717 bitmap_value_insert_into_set (AVAIL_OUT (block), e); 3718 bitmap_insert_into_set (PHI_GEN (block), e); 3719 } 3720 3721 BB_MAY_NOTRETURN (block) = 0; 3722 3723 /* Now compute value numbers and populate value sets with all 3724 the expressions computed in BLOCK. */ 3725 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi); 3726 gsi_next (&gsi)) 3727 { 3728 ssa_op_iter iter; 3729 tree op; 3730 3731 stmt = gsi_stmt (gsi); 3732 3733 /* Cache whether the basic-block has any non-visible side-effect 3734 or control flow. 3735 If this isn't a call or it is the last stmt in the 3736 basic-block then the CFG represents things correctly. */ 3737 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt)) 3738 { 3739 /* Non-looping const functions always return normally. 3740 Otherwise the call might not return or have side-effects 3741 that forbids hoisting possibly trapping expressions 3742 before it. */ 3743 int flags = gimple_call_flags (stmt); 3744 if (!(flags & ECF_CONST) 3745 || (flags & ECF_LOOPING_CONST_OR_PURE)) 3746 BB_MAY_NOTRETURN (block) = 1; 3747 } 3748 3749 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF) 3750 { 3751 pre_expr e = get_or_alloc_expr_for_name (op); 3752 3753 add_to_value (get_expr_value_id (e), e); 3754 bitmap_insert_into_set (TMP_GEN (block), e); 3755 bitmap_value_insert_into_set (AVAIL_OUT (block), e); 3756 } 3757 3758 if (gimple_vdef (stmt)) 3759 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt); 3760 3761 if (gimple_has_side_effects (stmt) 3762 || stmt_could_throw_p (stmt) 3763 || is_gimple_debug (stmt)) 3764 continue; 3765 3766 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) 3767 { 3768 if (ssa_undefined_value_p (op)) 3769 continue; 3770 pre_expr e = get_or_alloc_expr_for_name (op); 3771 bitmap_value_insert_into_set (EXP_GEN (block), e); 3772 } 3773 3774 switch (gimple_code (stmt)) 3775 { 3776 case GIMPLE_RETURN: 3777 continue; 3778 3779 case GIMPLE_CALL: 3780 { 3781 vn_reference_t ref; 3782 vn_reference_s ref1; 3783 pre_expr result = NULL; 3784 3785 /* We can value number only calls to real functions. */ 3786 if (gimple_call_internal_p (stmt)) 3787 continue; 3788 3789 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1); 3790 if (!ref) 3791 continue; 3792 3793 /* If the value of the call is not invalidated in 3794 this block until it is computed, add the expression 3795 to EXP_GEN. */ 3796 if (!gimple_vuse (stmt) 3797 || gimple_code 3798 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI 3799 || gimple_bb (SSA_NAME_DEF_STMT 3800 (gimple_vuse (stmt))) != block) 3801 { 3802 result = (pre_expr) pool_alloc (pre_expr_pool); 3803 result->kind = REFERENCE; 3804 result->id = 0; 3805 PRE_EXPR_REFERENCE (result) = ref; 3806 3807 get_or_alloc_expression_id (result); 3808 add_to_value (get_expr_value_id (result), result); 3809 bitmap_value_insert_into_set (EXP_GEN (block), result); 3810 } 3811 continue; 3812 } 3813 3814 case GIMPLE_ASSIGN: 3815 { 3816 pre_expr result = NULL; 3817 switch (vn_get_stmt_kind (stmt)) 3818 { 3819 case VN_NARY: 3820 { 3821 enum tree_code code = gimple_assign_rhs_code (stmt); 3822 vn_nary_op_t nary; 3823 3824 /* COND_EXPR and VEC_COND_EXPR are awkward in 3825 that they contain an embedded complex expression. 3826 Don't even try to shove those through PRE. */ 3827 if (code == COND_EXPR 3828 || code == VEC_COND_EXPR) 3829 continue; 3830 3831 vn_nary_op_lookup_stmt (stmt, &nary); 3832 if (!nary) 3833 continue; 3834 3835 /* If the NARY traps and there was a preceding 3836 point in the block that might not return avoid 3837 adding the nary to EXP_GEN. */ 3838 if (BB_MAY_NOTRETURN (block) 3839 && vn_nary_may_trap (nary)) 3840 continue; 3841 3842 result = (pre_expr) pool_alloc (pre_expr_pool); 3843 result->kind = NARY; 3844 result->id = 0; 3845 PRE_EXPR_NARY (result) = nary; 3846 break; 3847 } 3848 3849 case VN_REFERENCE: 3850 { 3851 vn_reference_t ref; 3852 vn_reference_lookup (gimple_assign_rhs1 (stmt), 3853 gimple_vuse (stmt), 3854 VN_WALK, &ref, true); 3855 if (!ref) 3856 continue; 3857 3858 /* If the value of the reference is not invalidated in 3859 this block until it is computed, add the expression 3860 to EXP_GEN. */ 3861 if (gimple_vuse (stmt)) 3862 { 3863 gimple def_stmt; 3864 bool ok = true; 3865 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt)); 3866 while (!gimple_nop_p (def_stmt) 3867 && gimple_code (def_stmt) != GIMPLE_PHI 3868 && gimple_bb (def_stmt) == block) 3869 { 3870 if (stmt_may_clobber_ref_p 3871 (def_stmt, gimple_assign_rhs1 (stmt))) 3872 { 3873 ok = false; 3874 break; 3875 } 3876 def_stmt 3877 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt)); 3878 } 3879 if (!ok) 3880 continue; 3881 } 3882 3883 result = (pre_expr) pool_alloc (pre_expr_pool); 3884 result->kind = REFERENCE; 3885 result->id = 0; 3886 PRE_EXPR_REFERENCE (result) = ref; 3887 break; 3888 } 3889 3890 default: 3891 continue; 3892 } 3893 3894 get_or_alloc_expression_id (result); 3895 add_to_value (get_expr_value_id (result), result); 3896 bitmap_value_insert_into_set (EXP_GEN (block), result); 3897 continue; 3898 } 3899 default: 3900 break; 3901 } 3902 } 3903 3904 if (dump_file && (dump_flags & TDF_DETAILS)) 3905 { 3906 print_bitmap_set (dump_file, EXP_GEN (block), 3907 "exp_gen", block->index); 3908 print_bitmap_set (dump_file, PHI_GEN (block), 3909 "phi_gen", block->index); 3910 print_bitmap_set (dump_file, TMP_GEN (block), 3911 "tmp_gen", block->index); 3912 print_bitmap_set (dump_file, AVAIL_OUT (block), 3913 "avail_out", block->index); 3914 } 3915 3916 /* Put the dominator children of BLOCK on the worklist of blocks 3917 to compute available sets for. */ 3918 for (son = first_dom_son (CDI_DOMINATORS, block); 3919 son; 3920 son = next_dom_son (CDI_DOMINATORS, son)) 3921 worklist[sp++] = son; 3922 } 3923 3924 free (worklist); 3925} 3926 3927 3928/* Local state for the eliminate domwalk. */ 3929static vec<gimple> el_to_remove; 3930static vec<gimple> el_to_fixup; 3931static unsigned int el_todo; 3932static vec<tree> el_avail; 3933static vec<tree> el_avail_stack; 3934 3935/* Return a leader for OP that is available at the current point of the 3936 eliminate domwalk. */ 3937 3938static tree 3939eliminate_avail (tree op) 3940{ 3941 tree valnum = VN_INFO (op)->valnum; 3942 if (TREE_CODE (valnum) == SSA_NAME) 3943 { 3944 if (SSA_NAME_IS_DEFAULT_DEF (valnum)) 3945 return valnum; 3946 if (el_avail.length () > SSA_NAME_VERSION (valnum)) 3947 return el_avail[SSA_NAME_VERSION (valnum)]; 3948 } 3949 else if (is_gimple_min_invariant (valnum)) 3950 return valnum; 3951 return NULL_TREE; 3952} 3953 3954/* At the current point of the eliminate domwalk make OP available. */ 3955 3956static void 3957eliminate_push_avail (tree op) 3958{ 3959 tree valnum = VN_INFO (op)->valnum; 3960 if (TREE_CODE (valnum) == SSA_NAME) 3961 { 3962 if (el_avail.length () <= SSA_NAME_VERSION (valnum)) 3963 el_avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1); 3964 tree pushop = op; 3965 if (el_avail[SSA_NAME_VERSION (valnum)]) 3966 pushop = el_avail[SSA_NAME_VERSION (valnum)]; 3967 el_avail_stack.safe_push (pushop); 3968 el_avail[SSA_NAME_VERSION (valnum)] = op; 3969 } 3970} 3971 3972/* Insert the expression recorded by SCCVN for VAL at *GSI. Returns 3973 the leader for the expression if insertion was successful. */ 3974 3975static tree 3976eliminate_insert (gimple_stmt_iterator *gsi, tree val) 3977{ 3978 tree expr = vn_get_expr_for (val); 3979 if (!CONVERT_EXPR_P (expr) 3980 && TREE_CODE (expr) != VIEW_CONVERT_EXPR) 3981 return NULL_TREE; 3982 3983 tree op = TREE_OPERAND (expr, 0); 3984 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op; 3985 if (!leader) 3986 return NULL_TREE; 3987 3988 tree res = make_temp_ssa_name (TREE_TYPE (val), NULL, "pretmp"); 3989 gassign *tem = gimple_build_assign (res, 3990 fold_build1 (TREE_CODE (expr), 3991 TREE_TYPE (expr), leader)); 3992 gsi_insert_before (gsi, tem, GSI_SAME_STMT); 3993 VN_INFO_GET (res)->valnum = val; 3994 3995 if (TREE_CODE (leader) == SSA_NAME) 3996 gimple_set_plf (SSA_NAME_DEF_STMT (leader), NECESSARY, true); 3997 3998 pre_stats.insertions++; 3999 if (dump_file && (dump_flags & TDF_DETAILS)) 4000 { 4001 fprintf (dump_file, "Inserted "); 4002 print_gimple_stmt (dump_file, tem, 0, 0); 4003 } 4004 4005 return res; 4006} 4007 4008class eliminate_dom_walker : public dom_walker 4009{ 4010public: 4011 eliminate_dom_walker (cdi_direction direction, bool do_pre_) 4012 : dom_walker (direction), do_pre (do_pre_) {} 4013 4014 virtual void before_dom_children (basic_block); 4015 virtual void after_dom_children (basic_block); 4016 4017 bool do_pre; 4018}; 4019 4020/* Perform elimination for the basic-block B during the domwalk. */ 4021 4022void 4023eliminate_dom_walker::before_dom_children (basic_block b) 4024{ 4025 /* Mark new bb. */ 4026 el_avail_stack.safe_push (NULL_TREE); 4027 4028 /* ??? If we do nothing for unreachable blocks then this will confuse 4029 tailmerging. Eventually we can reduce its reliance on SCCVN now 4030 that we fully copy/constant-propagate (most) things. */ 4031 4032 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);) 4033 { 4034 gphi *phi = gsi.phi (); 4035 tree res = PHI_RESULT (phi); 4036 4037 if (virtual_operand_p (res)) 4038 { 4039 gsi_next (&gsi); 4040 continue; 4041 } 4042 4043 tree sprime = eliminate_avail (res); 4044 if (sprime 4045 && sprime != res) 4046 { 4047 if (dump_file && (dump_flags & TDF_DETAILS)) 4048 { 4049 fprintf (dump_file, "Replaced redundant PHI node defining "); 4050 print_generic_expr (dump_file, res, 0); 4051 fprintf (dump_file, " with "); 4052 print_generic_expr (dump_file, sprime, 0); 4053 fprintf (dump_file, "\n"); 4054 } 4055 4056 /* If we inserted this PHI node ourself, it's not an elimination. */ 4057 if (inserted_exprs 4058 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))) 4059 pre_stats.phis--; 4060 else 4061 pre_stats.eliminations++; 4062 4063 /* If we will propagate into all uses don't bother to do 4064 anything. */ 4065 if (may_propagate_copy (res, sprime)) 4066 { 4067 /* Mark the PHI for removal. */ 4068 el_to_remove.safe_push (phi); 4069 gsi_next (&gsi); 4070 continue; 4071 } 4072 4073 remove_phi_node (&gsi, false); 4074 4075 if (inserted_exprs 4076 && !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)) 4077 && TREE_CODE (sprime) == SSA_NAME) 4078 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true); 4079 4080 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime))) 4081 sprime = fold_convert (TREE_TYPE (res), sprime); 4082 gimple stmt = gimple_build_assign (res, sprime); 4083 /* ??? It cannot yet be necessary (DOM walk). */ 4084 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY)); 4085 4086 gimple_stmt_iterator gsi2 = gsi_after_labels (b); 4087 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT); 4088 continue; 4089 } 4090 4091 eliminate_push_avail (res); 4092 gsi_next (&gsi); 4093 } 4094 4095 for (gimple_stmt_iterator gsi = gsi_start_bb (b); 4096 !gsi_end_p (gsi); 4097 gsi_next (&gsi)) 4098 { 4099 tree sprime = NULL_TREE; 4100 gimple stmt = gsi_stmt (gsi); 4101 tree lhs = gimple_get_lhs (stmt); 4102 if (lhs && TREE_CODE (lhs) == SSA_NAME 4103 && !gimple_has_volatile_ops (stmt) 4104 /* See PR43491. Do not replace a global register variable when 4105 it is a the RHS of an assignment. Do replace local register 4106 variables since gcc does not guarantee a local variable will 4107 be allocated in register. 4108 ??? The fix isn't effective here. This should instead 4109 be ensured by not value-numbering them the same but treating 4110 them like volatiles? */ 4111 && !(gimple_assign_single_p (stmt) 4112 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL 4113 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt)) 4114 && is_global_var (gimple_assign_rhs1 (stmt))))) 4115 { 4116 sprime = eliminate_avail (lhs); 4117 if (!sprime) 4118 { 4119 /* If there is no existing usable leader but SCCVN thinks 4120 it has an expression it wants to use as replacement, 4121 insert that. */ 4122 tree val = VN_INFO (lhs)->valnum; 4123 if (val != VN_TOP 4124 && TREE_CODE (val) == SSA_NAME 4125 && VN_INFO (val)->needs_insertion 4126 && VN_INFO (val)->expr != NULL_TREE 4127 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE) 4128 eliminate_push_avail (sprime); 4129 } 4130 4131 /* If this now constitutes a copy duplicate points-to 4132 and range info appropriately. This is especially 4133 important for inserted code. See tree-ssa-copy.c 4134 for similar code. */ 4135 if (sprime 4136 && TREE_CODE (sprime) == SSA_NAME) 4137 { 4138 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime)); 4139 if (POINTER_TYPE_P (TREE_TYPE (lhs)) 4140 && SSA_NAME_PTR_INFO (lhs) 4141 && !SSA_NAME_PTR_INFO (sprime)) 4142 { 4143 duplicate_ssa_name_ptr_info (sprime, 4144 SSA_NAME_PTR_INFO (lhs)); 4145 if (b != sprime_b) 4146 mark_ptr_info_alignment_unknown 4147 (SSA_NAME_PTR_INFO (sprime)); 4148 } 4149 else if (!POINTER_TYPE_P (TREE_TYPE (lhs)) 4150 && SSA_NAME_RANGE_INFO (lhs) 4151 && !SSA_NAME_RANGE_INFO (sprime) 4152 && b == sprime_b) 4153 duplicate_ssa_name_range_info (sprime, 4154 SSA_NAME_RANGE_TYPE (lhs), 4155 SSA_NAME_RANGE_INFO (lhs)); 4156 } 4157 4158 /* Inhibit the use of an inserted PHI on a loop header when 4159 the address of the memory reference is a simple induction 4160 variable. In other cases the vectorizer won't do anything 4161 anyway (either it's loop invariant or a complicated 4162 expression). */ 4163 if (sprime 4164 && TREE_CODE (sprime) == SSA_NAME 4165 && do_pre 4166 && flag_tree_loop_vectorize 4167 && loop_outer (b->loop_father) 4168 && has_zero_uses (sprime) 4169 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime)) 4170 && gimple_assign_load_p (stmt)) 4171 { 4172 gimple def_stmt = SSA_NAME_DEF_STMT (sprime); 4173 basic_block def_bb = gimple_bb (def_stmt); 4174 if (gimple_code (def_stmt) == GIMPLE_PHI 4175 && b->loop_father->header == def_bb) 4176 { 4177 ssa_op_iter iter; 4178 tree op; 4179 bool found = false; 4180 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) 4181 { 4182 affine_iv iv; 4183 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op)); 4184 if (def_bb 4185 && flow_bb_inside_loop_p (b->loop_father, def_bb) 4186 && simple_iv (b->loop_father, 4187 b->loop_father, op, &iv, true)) 4188 { 4189 found = true; 4190 break; 4191 } 4192 } 4193 if (found) 4194 { 4195 if (dump_file && (dump_flags & TDF_DETAILS)) 4196 { 4197 fprintf (dump_file, "Not replacing "); 4198 print_gimple_expr (dump_file, stmt, 0, 0); 4199 fprintf (dump_file, " with "); 4200 print_generic_expr (dump_file, sprime, 0); 4201 fprintf (dump_file, " which would add a loop" 4202 " carried dependence to loop %d\n", 4203 b->loop_father->num); 4204 } 4205 /* Don't keep sprime available. */ 4206 sprime = NULL_TREE; 4207 } 4208 } 4209 } 4210 4211 if (sprime) 4212 { 4213 /* If we can propagate the value computed for LHS into 4214 all uses don't bother doing anything with this stmt. */ 4215 if (may_propagate_copy (lhs, sprime)) 4216 { 4217 /* Mark it for removal. */ 4218 el_to_remove.safe_push (stmt); 4219 4220 /* ??? Don't count copy/constant propagations. */ 4221 if (gimple_assign_single_p (stmt) 4222 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME 4223 || gimple_assign_rhs1 (stmt) == sprime)) 4224 continue; 4225 4226 if (dump_file && (dump_flags & TDF_DETAILS)) 4227 { 4228 fprintf (dump_file, "Replaced "); 4229 print_gimple_expr (dump_file, stmt, 0, 0); 4230 fprintf (dump_file, " with "); 4231 print_generic_expr (dump_file, sprime, 0); 4232 fprintf (dump_file, " in all uses of "); 4233 print_gimple_stmt (dump_file, stmt, 0, 0); 4234 } 4235 4236 pre_stats.eliminations++; 4237 continue; 4238 } 4239 4240 /* If this is an assignment from our leader (which 4241 happens in the case the value-number is a constant) 4242 then there is nothing to do. */ 4243 if (gimple_assign_single_p (stmt) 4244 && sprime == gimple_assign_rhs1 (stmt)) 4245 continue; 4246 4247 /* Else replace its RHS. */ 4248 bool can_make_abnormal_goto 4249 = is_gimple_call (stmt) 4250 && stmt_can_make_abnormal_goto (stmt); 4251 4252 if (dump_file && (dump_flags & TDF_DETAILS)) 4253 { 4254 fprintf (dump_file, "Replaced "); 4255 print_gimple_expr (dump_file, stmt, 0, 0); 4256 fprintf (dump_file, " with "); 4257 print_generic_expr (dump_file, sprime, 0); 4258 fprintf (dump_file, " in "); 4259 print_gimple_stmt (dump_file, stmt, 0, 0); 4260 } 4261 4262 if (TREE_CODE (sprime) == SSA_NAME) 4263 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), 4264 NECESSARY, true); 4265 4266 pre_stats.eliminations++; 4267 gimple orig_stmt = stmt; 4268 if (!useless_type_conversion_p (TREE_TYPE (lhs), 4269 TREE_TYPE (sprime))) 4270 sprime = fold_convert (TREE_TYPE (lhs), sprime); 4271 tree vdef = gimple_vdef (stmt); 4272 tree vuse = gimple_vuse (stmt); 4273 propagate_tree_value_into_stmt (&gsi, sprime); 4274 stmt = gsi_stmt (gsi); 4275 update_stmt (stmt); 4276 if (vdef != gimple_vdef (stmt)) 4277 VN_INFO (vdef)->valnum = vuse; 4278 4279 /* If we removed EH side-effects from the statement, clean 4280 its EH information. */ 4281 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt)) 4282 { 4283 bitmap_set_bit (need_eh_cleanup, 4284 gimple_bb (stmt)->index); 4285 if (dump_file && (dump_flags & TDF_DETAILS)) 4286 fprintf (dump_file, " Removed EH side-effects.\n"); 4287 } 4288 4289 /* Likewise for AB side-effects. */ 4290 if (can_make_abnormal_goto 4291 && !stmt_can_make_abnormal_goto (stmt)) 4292 { 4293 bitmap_set_bit (need_ab_cleanup, 4294 gimple_bb (stmt)->index); 4295 if (dump_file && (dump_flags & TDF_DETAILS)) 4296 fprintf (dump_file, " Removed AB side-effects.\n"); 4297 } 4298 4299 continue; 4300 } 4301 } 4302 4303 /* If the statement is a scalar store, see if the expression 4304 has the same value number as its rhs. If so, the store is 4305 dead. */ 4306 if (gimple_assign_single_p (stmt) 4307 && !gimple_has_volatile_ops (stmt) 4308 && !is_gimple_reg (gimple_assign_lhs (stmt)) 4309 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME 4310 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))) 4311 { 4312 tree val; 4313 tree rhs = gimple_assign_rhs1 (stmt); 4314 val = vn_reference_lookup (gimple_assign_lhs (stmt), 4315 gimple_vuse (stmt), VN_WALK, NULL, false); 4316 if (TREE_CODE (rhs) == SSA_NAME) 4317 rhs = VN_INFO (rhs)->valnum; 4318 if (val 4319 && operand_equal_p (val, rhs, 0)) 4320 { 4321 if (dump_file && (dump_flags & TDF_DETAILS)) 4322 { 4323 fprintf (dump_file, "Deleted redundant store "); 4324 print_gimple_stmt (dump_file, stmt, 0, 0); 4325 } 4326 4327 /* Queue stmt for removal. */ 4328 el_to_remove.safe_push (stmt); 4329 continue; 4330 } 4331 } 4332 4333 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt); 4334 bool was_noreturn = (is_gimple_call (stmt) 4335 && gimple_call_noreturn_p (stmt)); 4336 tree vdef = gimple_vdef (stmt); 4337 tree vuse = gimple_vuse (stmt); 4338 4339 /* If we didn't replace the whole stmt (or propagate the result 4340 into all uses), replace all uses on this stmt with their 4341 leaders. */ 4342 use_operand_p use_p; 4343 ssa_op_iter iter; 4344 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) 4345 { 4346 tree use = USE_FROM_PTR (use_p); 4347 /* ??? The call code above leaves stmt operands un-updated. */ 4348 if (TREE_CODE (use) != SSA_NAME) 4349 continue; 4350 tree sprime = eliminate_avail (use); 4351 if (sprime && sprime != use 4352 && may_propagate_copy (use, sprime) 4353 /* We substitute into debug stmts to avoid excessive 4354 debug temporaries created by removed stmts, but we need 4355 to avoid doing so for inserted sprimes as we never want 4356 to create debug temporaries for them. */ 4357 && (!inserted_exprs 4358 || TREE_CODE (sprime) != SSA_NAME 4359 || !is_gimple_debug (stmt) 4360 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime)))) 4361 { 4362 propagate_value (use_p, sprime); 4363 gimple_set_modified (stmt, true); 4364 if (TREE_CODE (sprime) == SSA_NAME 4365 && !is_gimple_debug (stmt)) 4366 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), 4367 NECESSARY, true); 4368 } 4369 } 4370 4371 /* Visit indirect calls and turn them into direct calls if 4372 possible using the devirtualization machinery. */ 4373 if (gcall *call_stmt = dyn_cast <gcall *> (stmt)) 4374 { 4375 tree fn = gimple_call_fn (call_stmt); 4376 if (fn 4377 && flag_devirtualize 4378 && virtual_method_call_p (fn)) 4379 { 4380 tree otr_type = obj_type_ref_class (fn); 4381 tree instance; 4382 ipa_polymorphic_call_context context (current_function_decl, fn, stmt, &instance); 4383 bool final; 4384 4385 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn), otr_type, stmt); 4386 4387 vec <cgraph_node *>targets 4388 = possible_polymorphic_call_targets (obj_type_ref_class (fn), 4389 tree_to_uhwi 4390 (OBJ_TYPE_REF_TOKEN (fn)), 4391 context, 4392 &final); 4393 if (dump_file) 4394 dump_possible_polymorphic_call_targets (dump_file, 4395 obj_type_ref_class (fn), 4396 tree_to_uhwi 4397 (OBJ_TYPE_REF_TOKEN (fn)), 4398 context); 4399 if (final && targets.length () <= 1 && dbg_cnt (devirt)) 4400 { 4401 tree fn; 4402 if (targets.length () == 1) 4403 fn = targets[0]->decl; 4404 else 4405 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE); 4406 if (dump_enabled_p ()) 4407 { 4408 location_t loc = gimple_location_safe (stmt); 4409 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc, 4410 "converting indirect call to " 4411 "function %s\n", 4412 cgraph_node::get (fn)->name ()); 4413 } 4414 gimple_call_set_fndecl (call_stmt, fn); 4415 maybe_remove_unused_call_args (cfun, call_stmt); 4416 gimple_set_modified (stmt, true); 4417 } 4418 } 4419 } 4420 4421 if (gimple_modified_p (stmt)) 4422 { 4423 /* If a formerly non-invariant ADDR_EXPR is turned into an 4424 invariant one it was on a separate stmt. */ 4425 if (gimple_assign_single_p (stmt) 4426 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR) 4427 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt)); 4428 gimple old_stmt = stmt; 4429 if (is_gimple_call (stmt)) 4430 { 4431 /* ??? Only fold calls inplace for now, this may create new 4432 SSA names which in turn will confuse free_scc_vn SSA name 4433 release code. */ 4434 fold_stmt_inplace (&gsi); 4435 /* When changing a call into a noreturn call, cfg cleanup 4436 is needed to fix up the noreturn call. */ 4437 if (!was_noreturn && gimple_call_noreturn_p (stmt)) 4438 el_to_fixup.safe_push (stmt); 4439 } 4440 else 4441 { 4442 fold_stmt (&gsi); 4443 stmt = gsi_stmt (gsi); 4444 if ((gimple_code (stmt) == GIMPLE_COND 4445 && (gimple_cond_true_p (as_a <gcond *> (stmt)) 4446 || gimple_cond_false_p (as_a <gcond *> (stmt)))) 4447 || (gimple_code (stmt) == GIMPLE_SWITCH 4448 && TREE_CODE (gimple_switch_index ( 4449 as_a <gswitch *> (stmt))) 4450 == INTEGER_CST)) 4451 el_todo |= TODO_cleanup_cfg; 4452 } 4453 /* If we removed EH side-effects from the statement, clean 4454 its EH information. */ 4455 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)) 4456 { 4457 bitmap_set_bit (need_eh_cleanup, 4458 gimple_bb (stmt)->index); 4459 if (dump_file && (dump_flags & TDF_DETAILS)) 4460 fprintf (dump_file, " Removed EH side-effects.\n"); 4461 } 4462 /* Likewise for AB side-effects. */ 4463 if (can_make_abnormal_goto 4464 && !stmt_can_make_abnormal_goto (stmt)) 4465 { 4466 bitmap_set_bit (need_ab_cleanup, 4467 gimple_bb (stmt)->index); 4468 if (dump_file && (dump_flags & TDF_DETAILS)) 4469 fprintf (dump_file, " Removed AB side-effects.\n"); 4470 } 4471 update_stmt (stmt); 4472 if (vdef != gimple_vdef (stmt)) 4473 VN_INFO (vdef)->valnum = vuse; 4474 } 4475 4476 /* Make new values available - for fully redundant LHS we 4477 continue with the next stmt above and skip this. */ 4478 def_operand_p defp; 4479 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF) 4480 eliminate_push_avail (DEF_FROM_PTR (defp)); 4481 } 4482 4483 /* Replace destination PHI arguments. */ 4484 edge_iterator ei; 4485 edge e; 4486 FOR_EACH_EDGE (e, ei, b->succs) 4487 { 4488 for (gphi_iterator gsi = gsi_start_phis (e->dest); 4489 !gsi_end_p (gsi); 4490 gsi_next (&gsi)) 4491 { 4492 gphi *phi = gsi.phi (); 4493 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e); 4494 tree arg = USE_FROM_PTR (use_p); 4495 if (TREE_CODE (arg) != SSA_NAME 4496 || virtual_operand_p (arg)) 4497 continue; 4498 tree sprime = eliminate_avail (arg); 4499 if (sprime && may_propagate_copy (arg, sprime)) 4500 { 4501 propagate_value (use_p, sprime); 4502 if (TREE_CODE (sprime) == SSA_NAME) 4503 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true); 4504 } 4505 } 4506 } 4507} 4508 4509/* Make no longer available leaders no longer available. */ 4510 4511void 4512eliminate_dom_walker::after_dom_children (basic_block) 4513{ 4514 tree entry; 4515 while ((entry = el_avail_stack.pop ()) != NULL_TREE) 4516 { 4517 tree valnum = VN_INFO (entry)->valnum; 4518 tree old = el_avail[SSA_NAME_VERSION (valnum)]; 4519 if (old == entry) 4520 el_avail[SSA_NAME_VERSION (valnum)] = NULL_TREE; 4521 else 4522 el_avail[SSA_NAME_VERSION (valnum)] = entry; 4523 } 4524} 4525 4526/* Eliminate fully redundant computations. */ 4527 4528static unsigned int 4529eliminate (bool do_pre) 4530{ 4531 gimple_stmt_iterator gsi; 4532 gimple stmt; 4533 4534 need_eh_cleanup = BITMAP_ALLOC (NULL); 4535 need_ab_cleanup = BITMAP_ALLOC (NULL); 4536 4537 el_to_remove.create (0); 4538 el_to_fixup.create (0); 4539 el_todo = 0; 4540 el_avail.create (num_ssa_names); 4541 el_avail_stack.create (0); 4542 4543 eliminate_dom_walker (CDI_DOMINATORS, 4544 do_pre).walk (cfun->cfg->x_entry_block_ptr); 4545 4546 el_avail.release (); 4547 el_avail_stack.release (); 4548 4549 /* We cannot remove stmts during BB walk, especially not release SSA 4550 names there as this confuses the VN machinery. The stmts ending 4551 up in el_to_remove are either stores or simple copies. 4552 Remove stmts in reverse order to make debug stmt creation possible. */ 4553 while (!el_to_remove.is_empty ()) 4554 { 4555 stmt = el_to_remove.pop (); 4556 4557 if (dump_file && (dump_flags & TDF_DETAILS)) 4558 { 4559 fprintf (dump_file, "Removing dead stmt "); 4560 print_gimple_stmt (dump_file, stmt, 0, 0); 4561 } 4562 4563 tree lhs; 4564 if (gimple_code (stmt) == GIMPLE_PHI) 4565 lhs = gimple_phi_result (stmt); 4566 else 4567 lhs = gimple_get_lhs (stmt); 4568 4569 if (inserted_exprs 4570 && TREE_CODE (lhs) == SSA_NAME) 4571 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs)); 4572 4573 gsi = gsi_for_stmt (stmt); 4574 if (gimple_code (stmt) == GIMPLE_PHI) 4575 remove_phi_node (&gsi, true); 4576 else 4577 { 4578 basic_block bb = gimple_bb (stmt); 4579 unlink_stmt_vdef (stmt); 4580 if (gsi_remove (&gsi, true)) 4581 bitmap_set_bit (need_eh_cleanup, bb->index); 4582 release_defs (stmt); 4583 } 4584 4585 /* Removing a stmt may expose a forwarder block. */ 4586 el_todo |= TODO_cleanup_cfg; 4587 } 4588 el_to_remove.release (); 4589 4590 /* Fixup stmts that became noreturn calls. This may require splitting 4591 blocks and thus isn't possible during the dominator walk. Do this 4592 in reverse order so we don't inadvertedly remove a stmt we want to 4593 fixup by visiting a dominating now noreturn call first. */ 4594 while (!el_to_fixup.is_empty ()) 4595 { 4596 stmt = el_to_fixup.pop (); 4597 4598 if (dump_file && (dump_flags & TDF_DETAILS)) 4599 { 4600 fprintf (dump_file, "Fixing up noreturn call "); 4601 print_gimple_stmt (dump_file, stmt, 0, 0); 4602 } 4603 4604 if (fixup_noreturn_call (stmt)) 4605 el_todo |= TODO_cleanup_cfg; 4606 } 4607 el_to_fixup.release (); 4608 4609 return el_todo; 4610} 4611 4612/* Perform CFG cleanups made necessary by elimination. */ 4613 4614static unsigned 4615fini_eliminate (void) 4616{ 4617 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup); 4618 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup); 4619 4620 if (do_eh_cleanup) 4621 gimple_purge_all_dead_eh_edges (need_eh_cleanup); 4622 4623 if (do_ab_cleanup) 4624 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup); 4625 4626 BITMAP_FREE (need_eh_cleanup); 4627 BITMAP_FREE (need_ab_cleanup); 4628 4629 if (do_eh_cleanup || do_ab_cleanup) 4630 return TODO_cleanup_cfg; 4631 return 0; 4632} 4633 4634/* Borrow a bit of tree-ssa-dce.c for the moment. 4635 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though 4636 this may be a bit faster, and we may want critical edges kept split. */ 4637 4638/* If OP's defining statement has not already been determined to be necessary, 4639 mark that statement necessary. Return the stmt, if it is newly 4640 necessary. */ 4641 4642static inline gimple 4643mark_operand_necessary (tree op) 4644{ 4645 gimple stmt; 4646 4647 gcc_assert (op); 4648 4649 if (TREE_CODE (op) != SSA_NAME) 4650 return NULL; 4651 4652 stmt = SSA_NAME_DEF_STMT (op); 4653 gcc_assert (stmt); 4654 4655 if (gimple_plf (stmt, NECESSARY) 4656 || gimple_nop_p (stmt)) 4657 return NULL; 4658 4659 gimple_set_plf (stmt, NECESSARY, true); 4660 return stmt; 4661} 4662 4663/* Because we don't follow exactly the standard PRE algorithm, and decide not 4664 to insert PHI nodes sometimes, and because value numbering of casts isn't 4665 perfect, we sometimes end up inserting dead code. This simple DCE-like 4666 pass removes any insertions we made that weren't actually used. */ 4667 4668static void 4669remove_dead_inserted_code (void) 4670{ 4671 bitmap worklist; 4672 unsigned i; 4673 bitmap_iterator bi; 4674 gimple t; 4675 4676 worklist = BITMAP_ALLOC (NULL); 4677 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi) 4678 { 4679 t = SSA_NAME_DEF_STMT (ssa_name (i)); 4680 if (gimple_plf (t, NECESSARY)) 4681 bitmap_set_bit (worklist, i); 4682 } 4683 while (!bitmap_empty_p (worklist)) 4684 { 4685 i = bitmap_first_set_bit (worklist); 4686 bitmap_clear_bit (worklist, i); 4687 t = SSA_NAME_DEF_STMT (ssa_name (i)); 4688 4689 /* PHI nodes are somewhat special in that each PHI alternative has 4690 data and control dependencies. All the statements feeding the 4691 PHI node's arguments are always necessary. */ 4692 if (gimple_code (t) == GIMPLE_PHI) 4693 { 4694 unsigned k; 4695 4696 for (k = 0; k < gimple_phi_num_args (t); k++) 4697 { 4698 tree arg = PHI_ARG_DEF (t, k); 4699 if (TREE_CODE (arg) == SSA_NAME) 4700 { 4701 gimple n = mark_operand_necessary (arg); 4702 if (n) 4703 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg)); 4704 } 4705 } 4706 } 4707 else 4708 { 4709 /* Propagate through the operands. Examine all the USE, VUSE and 4710 VDEF operands in this statement. Mark all the statements 4711 which feed this statement's uses as necessary. */ 4712 ssa_op_iter iter; 4713 tree use; 4714 4715 /* The operands of VDEF expressions are also needed as they 4716 represent potential definitions that may reach this 4717 statement (VDEF operands allow us to follow def-def 4718 links). */ 4719 4720 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES) 4721 { 4722 gimple n = mark_operand_necessary (use); 4723 if (n) 4724 bitmap_set_bit (worklist, SSA_NAME_VERSION (use)); 4725 } 4726 } 4727 } 4728 4729 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi) 4730 { 4731 t = SSA_NAME_DEF_STMT (ssa_name (i)); 4732 if (!gimple_plf (t, NECESSARY)) 4733 { 4734 gimple_stmt_iterator gsi; 4735 4736 if (dump_file && (dump_flags & TDF_DETAILS)) 4737 { 4738 fprintf (dump_file, "Removing unnecessary insertion:"); 4739 print_gimple_stmt (dump_file, t, 0, 0); 4740 } 4741 4742 gsi = gsi_for_stmt (t); 4743 if (gimple_code (t) == GIMPLE_PHI) 4744 remove_phi_node (&gsi, true); 4745 else 4746 { 4747 gsi_remove (&gsi, true); 4748 release_defs (t); 4749 } 4750 } 4751 } 4752 BITMAP_FREE (worklist); 4753} 4754 4755 4756/* Initialize data structures used by PRE. */ 4757 4758static void 4759init_pre (void) 4760{ 4761 basic_block bb; 4762 4763 next_expression_id = 1; 4764 expressions.create (0); 4765 expressions.safe_push (NULL); 4766 value_expressions.create (get_max_value_id () + 1); 4767 value_expressions.safe_grow_cleared (get_max_value_id () + 1); 4768 name_to_id.create (0); 4769 4770 inserted_exprs = BITMAP_ALLOC (NULL); 4771 4772 connect_infinite_loops_to_exit (); 4773 memset (&pre_stats, 0, sizeof (pre_stats)); 4774 4775 /* For ANTIC computation we need a postorder that also guarantees that 4776 a block with a single successor is visited after its successor. 4777 RPO on the inverted CFG has this property. */ 4778 postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun)); 4779 postorder_num = inverted_post_order_compute (postorder); 4780 4781 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets)); 4782 4783 calculate_dominance_info (CDI_DOMINATORS); 4784 4785 bitmap_obstack_initialize (&grand_bitmap_obstack); 4786 phi_translate_table = new hash_table<expr_pred_trans_d> (5110); 4787 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3); 4788 bitmap_set_pool = create_alloc_pool ("Bitmap sets", 4789 sizeof (struct bitmap_set), 30); 4790 pre_expr_pool = create_alloc_pool ("pre_expr nodes", 4791 sizeof (struct pre_expr_d), 30); 4792 FOR_ALL_BB_FN (bb, cfun) 4793 { 4794 EXP_GEN (bb) = bitmap_set_new (); 4795 PHI_GEN (bb) = bitmap_set_new (); 4796 TMP_GEN (bb) = bitmap_set_new (); 4797 AVAIL_OUT (bb) = bitmap_set_new (); 4798 } 4799} 4800 4801 4802/* Deallocate data structures used by PRE. */ 4803 4804static void 4805fini_pre () 4806{ 4807 free (postorder); 4808 value_expressions.release (); 4809 BITMAP_FREE (inserted_exprs); 4810 bitmap_obstack_release (&grand_bitmap_obstack); 4811 free_alloc_pool (bitmap_set_pool); 4812 free_alloc_pool (pre_expr_pool); 4813 delete phi_translate_table; 4814 phi_translate_table = NULL; 4815 delete expression_to_id; 4816 expression_to_id = NULL; 4817 name_to_id.release (); 4818 4819 free_aux_for_blocks (); 4820} 4821 4822namespace { 4823 4824const pass_data pass_data_pre = 4825{ 4826 GIMPLE_PASS, /* type */ 4827 "pre", /* name */ 4828 OPTGROUP_NONE, /* optinfo_flags */ 4829 TV_TREE_PRE, /* tv_id */ 4830 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before 4831 pass_pre. */ 4832 ( PROP_no_crit_edges | PROP_cfg | PROP_ssa ), /* properties_required */ 4833 0, /* properties_provided */ 4834 PROP_no_crit_edges, /* properties_destroyed */ 4835 TODO_rebuild_alias, /* todo_flags_start */ 4836 0, /* todo_flags_finish */ 4837}; 4838 4839class pass_pre : public gimple_opt_pass 4840{ 4841public: 4842 pass_pre (gcc::context *ctxt) 4843 : gimple_opt_pass (pass_data_pre, ctxt) 4844 {} 4845 4846 /* opt_pass methods: */ 4847 virtual bool gate (function *) { return flag_tree_pre != 0; } 4848 virtual unsigned int execute (function *); 4849 4850}; // class pass_pre 4851 4852unsigned int 4853pass_pre::execute (function *fun) 4854{ 4855 unsigned int todo = 0; 4856 4857 do_partial_partial = 4858 flag_tree_partial_pre && optimize_function_for_speed_p (fun); 4859 4860 /* This has to happen before SCCVN runs because 4861 loop_optimizer_init may create new phis, etc. */ 4862 loop_optimizer_init (LOOPS_NORMAL); 4863 4864 if (!run_scc_vn (VN_WALK)) 4865 { 4866 loop_optimizer_finalize (); 4867 return 0; 4868 } 4869 4870 init_pre (); 4871 scev_initialize (); 4872 4873 /* Collect and value number expressions computed in each basic block. */ 4874 compute_avail (); 4875 4876 /* Insert can get quite slow on an incredibly large number of basic 4877 blocks due to some quadratic behavior. Until this behavior is 4878 fixed, don't run it when he have an incredibly large number of 4879 bb's. If we aren't going to run insert, there is no point in 4880 computing ANTIC, either, even though it's plenty fast. */ 4881 if (n_basic_blocks_for_fn (fun) < 4000) 4882 { 4883 compute_antic (); 4884 insert (); 4885 } 4886 4887 /* Make sure to remove fake edges before committing our inserts. 4888 This makes sure we don't end up with extra critical edges that 4889 we would need to split. */ 4890 remove_fake_exit_edges (); 4891 gsi_commit_edge_inserts (); 4892 4893 /* Eliminate folds statements which might (should not...) end up 4894 not keeping virtual operands up-to-date. */ 4895 gcc_assert (!need_ssa_update_p (fun)); 4896 4897 /* Remove all the redundant expressions. */ 4898 todo |= eliminate (true); 4899 4900 statistics_counter_event (fun, "Insertions", pre_stats.insertions); 4901 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert); 4902 statistics_counter_event (fun, "New PHIs", pre_stats.phis); 4903 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations); 4904 4905 clear_expression_ids (); 4906 remove_dead_inserted_code (); 4907 4908 scev_finalize (); 4909 fini_pre (); 4910 todo |= fini_eliminate (); 4911 loop_optimizer_finalize (); 4912 4913 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which 4914 case we can merge the block with the remaining predecessor of the block. 4915 It should either: 4916 - call merge_blocks after each tail merge iteration 4917 - call merge_blocks after all tail merge iterations 4918 - mark TODO_cleanup_cfg when necessary 4919 - share the cfg cleanup with fini_pre. */ 4920 todo |= tail_merge_optimize (todo); 4921 4922 free_scc_vn (); 4923 4924 /* Tail merging invalidates the virtual SSA web, together with 4925 cfg-cleanup opportunities exposed by PRE this will wreck the 4926 SSA updating machinery. So make sure to run update-ssa 4927 manually, before eventually scheduling cfg-cleanup as part of 4928 the todo. */ 4929 update_ssa (TODO_update_ssa_only_virtuals); 4930 4931 return todo; 4932} 4933 4934} // anon namespace 4935 4936gimple_opt_pass * 4937make_pass_pre (gcc::context *ctxt) 4938{ 4939 return new pass_pre (ctxt); 4940} 4941 4942namespace { 4943 4944const pass_data pass_data_fre = 4945{ 4946 GIMPLE_PASS, /* type */ 4947 "fre", /* name */ 4948 OPTGROUP_NONE, /* optinfo_flags */ 4949 TV_TREE_FRE, /* tv_id */ 4950 ( PROP_cfg | PROP_ssa ), /* properties_required */ 4951 0, /* properties_provided */ 4952 0, /* properties_destroyed */ 4953 0, /* todo_flags_start */ 4954 0, /* todo_flags_finish */ 4955}; 4956 4957class pass_fre : public gimple_opt_pass 4958{ 4959public: 4960 pass_fre (gcc::context *ctxt) 4961 : gimple_opt_pass (pass_data_fre, ctxt) 4962 {} 4963 4964 /* opt_pass methods: */ 4965 opt_pass * clone () { return new pass_fre (m_ctxt); } 4966 virtual bool gate (function *) { return flag_tree_fre != 0; } 4967 virtual unsigned int execute (function *); 4968 4969}; // class pass_fre 4970 4971unsigned int 4972pass_fre::execute (function *fun) 4973{ 4974 unsigned int todo = 0; 4975 4976 if (!run_scc_vn (VN_WALKREWRITE)) 4977 return 0; 4978 4979 memset (&pre_stats, 0, sizeof (pre_stats)); 4980 4981 /* Remove all the redundant expressions. */ 4982 todo |= eliminate (false); 4983 4984 todo |= fini_eliminate (); 4985 4986 free_scc_vn (); 4987 4988 statistics_counter_event (fun, "Insertions", pre_stats.insertions); 4989 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations); 4990 4991 return todo; 4992} 4993 4994} // anon namespace 4995 4996gimple_opt_pass * 4997make_pass_fre (gcc::context *ctxt) 4998{ 4999 return new pass_fre (ctxt); 5000} 5001