tree-ssa-threadedge.c revision 169689
1169689Skan/* SSA Jump Threading 2169689Skan Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc. 3169689Skan Contributed by Jeff Law <law@redhat.com> 4169689Skan 5169689SkanThis file is part of GCC. 6169689Skan 7169689SkanGCC is free software; you can redistribute it and/or modify 8169689Skanit under the terms of the GNU General Public License as published by 9169689Skanthe Free Software Foundation; either version 2, or (at your option) 10169689Skanany later version. 11169689Skan 12169689SkanGCC is distributed in the hope that it will be useful, 13169689Skanbut WITHOUT ANY WARRANTY; without even the implied warranty of 14169689SkanMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15169689SkanGNU General Public License for more details. 16169689Skan 17169689SkanYou should have received a copy of the GNU General Public License 18169689Skanalong with GCC; see the file COPYING. If not, write to 19169689Skanthe Free Software Foundation, 51 Franklin Street, Fifth Floor, 20169689SkanBoston, MA 02110-1301, USA. */ 21169689Skan 22169689Skan#include "config.h" 23169689Skan#include "system.h" 24169689Skan#include "coretypes.h" 25169689Skan#include "tm.h" 26169689Skan#include "tree.h" 27169689Skan#include "flags.h" 28169689Skan#include "rtl.h" 29169689Skan#include "tm_p.h" 30169689Skan#include "ggc.h" 31169689Skan#include "basic-block.h" 32169689Skan#include "cfgloop.h" 33169689Skan#include "output.h" 34169689Skan#include "expr.h" 35169689Skan#include "function.h" 36169689Skan#include "diagnostic.h" 37169689Skan#include "timevar.h" 38169689Skan#include "tree-dump.h" 39169689Skan#include "tree-flow.h" 40169689Skan#include "domwalk.h" 41169689Skan#include "real.h" 42169689Skan#include "tree-pass.h" 43169689Skan#include "tree-ssa-propagate.h" 44169689Skan#include "langhooks.h" 45169689Skan#include "params.h" 46169689Skan 47169689Skan/* To avoid code explosion due to jump threading, we limit the 48169689Skan number of statements we are going to copy. This variable 49169689Skan holds the number of statements currently seen that we'll have 50169689Skan to copy as part of the jump threading process. */ 51169689Skanstatic int stmt_count; 52169689Skan 53169689Skan/* Return TRUE if we may be able to thread an incoming edge into 54169689Skan BB to an outgoing edge from BB. Return FALSE otherwise. */ 55169689Skan 56169689Skanbool 57169689Skanpotentially_threadable_block (basic_block bb) 58169689Skan{ 59169689Skan block_stmt_iterator bsi; 60169689Skan 61169689Skan /* If BB has a single successor or a single predecessor, then 62169689Skan there is no threading opportunity. */ 63169689Skan if (single_succ_p (bb) || single_pred_p (bb)) 64169689Skan return false; 65169689Skan 66169689Skan /* If BB does not end with a conditional, switch or computed goto, 67169689Skan then there is no threading opportunity. */ 68169689Skan bsi = bsi_last (bb); 69169689Skan if (bsi_end_p (bsi) 70169689Skan || ! bsi_stmt (bsi) 71169689Skan || (TREE_CODE (bsi_stmt (bsi)) != COND_EXPR 72169689Skan && TREE_CODE (bsi_stmt (bsi)) != GOTO_EXPR 73169689Skan && TREE_CODE (bsi_stmt (bsi)) != SWITCH_EXPR)) 74169689Skan return false; 75169689Skan 76169689Skan return true; 77169689Skan} 78169689Skan 79169689Skan/* Return the LHS of any ASSERT_EXPR where OP appears as the first 80169689Skan argument to the ASSERT_EXPR and in which the ASSERT_EXPR dominates 81169689Skan BB. If no such ASSERT_EXPR is found, return OP. */ 82169689Skan 83169689Skanstatic tree 84169689Skanlhs_of_dominating_assert (tree op, basic_block bb, tree stmt) 85169689Skan{ 86169689Skan imm_use_iterator imm_iter; 87169689Skan tree use_stmt; 88169689Skan use_operand_p use_p; 89169689Skan 90169689Skan FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op) 91169689Skan { 92169689Skan use_stmt = USE_STMT (use_p); 93169689Skan if (use_stmt != stmt 94169689Skan && TREE_CODE (use_stmt) == MODIFY_EXPR 95169689Skan && TREE_CODE (TREE_OPERAND (use_stmt, 1)) == ASSERT_EXPR 96169689Skan && TREE_OPERAND (TREE_OPERAND (use_stmt, 1), 0) == op 97169689Skan && dominated_by_p (CDI_DOMINATORS, bb, bb_for_stmt (use_stmt))) 98169689Skan { 99169689Skan return TREE_OPERAND (use_stmt, 0); 100169689Skan } 101169689Skan } 102169689Skan return op; 103169689Skan} 104169689Skan 105169689Skan 106169689Skan/* We record temporary equivalences created by PHI nodes or 107169689Skan statements within the target block. Doing so allows us to 108169689Skan identify more jump threading opportunities, even in blocks 109169689Skan with side effects. 110169689Skan 111169689Skan We keep track of those temporary equivalences in a stack 112169689Skan structure so that we can unwind them when we're done processing 113169689Skan a particular edge. This routine handles unwinding the data 114169689Skan structures. */ 115169689Skan 116169689Skanstatic void 117169689Skanremove_temporary_equivalences (VEC(tree, heap) **stack) 118169689Skan{ 119169689Skan while (VEC_length (tree, *stack) > 0) 120169689Skan { 121169689Skan tree prev_value, dest; 122169689Skan 123169689Skan dest = VEC_pop (tree, *stack); 124169689Skan 125169689Skan /* A NULL value indicates we should stop unwinding, otherwise 126169689Skan pop off the next entry as they're recorded in pairs. */ 127169689Skan if (dest == NULL) 128169689Skan break; 129169689Skan 130169689Skan prev_value = VEC_pop (tree, *stack); 131169689Skan SSA_NAME_VALUE (dest) = prev_value; 132169689Skan } 133169689Skan} 134169689Skan 135169689Skan/* Record a temporary equivalence, saving enough information so that 136169689Skan we can restore the state of recorded equivalences when we're 137169689Skan done processing the current edge. */ 138169689Skan 139169689Skanstatic void 140169689Skanrecord_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack) 141169689Skan{ 142169689Skan tree prev_x = SSA_NAME_VALUE (x); 143169689Skan 144169689Skan if (TREE_CODE (y) == SSA_NAME) 145169689Skan { 146169689Skan tree tmp = SSA_NAME_VALUE (y); 147169689Skan y = tmp ? tmp : y; 148169689Skan } 149169689Skan 150169689Skan SSA_NAME_VALUE (x) = y; 151169689Skan VEC_reserve (tree, heap, *stack, 2); 152169689Skan VEC_quick_push (tree, *stack, prev_x); 153169689Skan VEC_quick_push (tree, *stack, x); 154169689Skan} 155169689Skan 156169689Skan/* Record temporary equivalences created by PHIs at the target of the 157169689Skan edge E. Record unwind information for the equivalences onto STACK. 158169689Skan 159169689Skan If a PHI which prevents threading is encountered, then return FALSE 160169689Skan indicating we should not thread this edge, else return TRUE. */ 161169689Skan 162169689Skanstatic bool 163169689Skanrecord_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack) 164169689Skan{ 165169689Skan tree phi; 166169689Skan 167169689Skan /* Each PHI creates a temporary equivalence, record them. 168169689Skan These are context sensitive equivalences and will be removed 169169689Skan later. */ 170169689Skan for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi)) 171169689Skan { 172169689Skan tree src = PHI_ARG_DEF_FROM_EDGE (phi, e); 173169689Skan tree dst = PHI_RESULT (phi); 174169689Skan 175169689Skan /* If the desired argument is not the same as this PHI's result 176169689Skan and it is set by a PHI in E->dest, then we can not thread 177169689Skan through E->dest. */ 178169689Skan if (src != dst 179169689Skan && TREE_CODE (src) == SSA_NAME 180169689Skan && TREE_CODE (SSA_NAME_DEF_STMT (src)) == PHI_NODE 181169689Skan && bb_for_stmt (SSA_NAME_DEF_STMT (src)) == e->dest) 182169689Skan return false; 183169689Skan 184169689Skan /* We consider any non-virtual PHI as a statement since it 185169689Skan count result in a constant assignment or copy operation. */ 186169689Skan if (is_gimple_reg (dst)) 187169689Skan stmt_count++; 188169689Skan 189169689Skan record_temporary_equivalence (dst, src, stack); 190169689Skan } 191169689Skan return true; 192169689Skan} 193169689Skan 194169689Skan/* Try to simplify each statement in E->dest, ultimately leading to 195169689Skan a simplification of the COND_EXPR at the end of E->dest. 196169689Skan 197169689Skan Record unwind information for temporary equivalences onto STACK. 198169689Skan 199169689Skan Use SIMPLIFY (a pointer to a callback function) to further simplify 200169689Skan statements using pass specific information. 201169689Skan 202169689Skan We might consider marking just those statements which ultimately 203169689Skan feed the COND_EXPR. It's not clear if the overhead of bookkeeping 204169689Skan would be recovered by trying to simplify fewer statements. 205169689Skan 206169689Skan If we are able to simplify a statement into the form 207169689Skan SSA_NAME = (SSA_NAME | gimple invariant), then we can record 208169689Skan a context sensitive equivalency which may help us simplify 209169689Skan later statements in E->dest. */ 210169689Skan 211169689Skanstatic tree 212169689Skanrecord_temporary_equivalences_from_stmts_at_dest (edge e, 213169689Skan VEC(tree, heap) **stack, 214169689Skan tree (*simplify) (tree, 215169689Skan tree)) 216169689Skan{ 217169689Skan block_stmt_iterator bsi; 218169689Skan tree stmt = NULL; 219169689Skan int max_stmt_count; 220169689Skan 221169689Skan max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS); 222169689Skan 223169689Skan /* Walk through each statement in the block recording equivalences 224169689Skan we discover. Note any equivalences we discover are context 225169689Skan sensitive (ie, are dependent on traversing E) and must be unwound 226169689Skan when we're finished processing E. */ 227169689Skan for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi)) 228169689Skan { 229169689Skan tree cached_lhs = NULL; 230169689Skan 231169689Skan stmt = bsi_stmt (bsi); 232169689Skan 233169689Skan /* Ignore empty statements and labels. */ 234169689Skan if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR) 235169689Skan continue; 236169689Skan 237169689Skan /* If the statement has volatile operands, then we assume we 238169689Skan can not thread through this block. This is overly 239169689Skan conservative in some ways. */ 240169689Skan if (TREE_CODE (stmt) == ASM_EXPR && ASM_VOLATILE_P (stmt)) 241169689Skan return NULL; 242169689Skan 243169689Skan /* If duplicating this block is going to cause too much code 244169689Skan expansion, then do not thread through this block. */ 245169689Skan stmt_count++; 246169689Skan if (stmt_count > max_stmt_count) 247169689Skan return NULL; 248169689Skan 249169689Skan /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new 250169689Skan value, then do not try to simplify this statement as it will 251169689Skan not simplify in any way that is helpful for jump threading. */ 252169689Skan if (TREE_CODE (stmt) != MODIFY_EXPR 253169689Skan || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME) 254169689Skan continue; 255169689Skan 256169689Skan /* At this point we have a statement which assigns an RHS to an 257169689Skan SSA_VAR on the LHS. We want to try and simplify this statement 258169689Skan to expose more context sensitive equivalences which in turn may 259169689Skan allow us to simplify the condition at the end of the loop. 260169689Skan 261169689Skan Handle simple copy operations as well as implied copies from 262169689Skan ASSERT_EXPRs. */ 263169689Skan if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME) 264169689Skan cached_lhs = TREE_OPERAND (stmt, 1); 265169689Skan else if (TREE_CODE (TREE_OPERAND (stmt, 1)) == ASSERT_EXPR) 266169689Skan cached_lhs = TREE_OPERAND (TREE_OPERAND (stmt, 1), 0); 267169689Skan else 268169689Skan { 269169689Skan /* A statement that is not a trivial copy or ASSERT_EXPR. 270169689Skan We're going to temporarily copy propagate the operands 271169689Skan and see if that allows us to simplify this statement. */ 272169689Skan tree *copy, pre_fold_expr; 273169689Skan ssa_op_iter iter; 274169689Skan use_operand_p use_p; 275169689Skan unsigned int num, i = 0; 276169689Skan 277169689Skan num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE)); 278169689Skan copy = XCNEWVEC (tree, num); 279169689Skan 280169689Skan /* Make a copy of the uses & vuses into USES_COPY, then cprop into 281169689Skan the operands. */ 282169689Skan FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE) 283169689Skan { 284169689Skan tree tmp = NULL; 285169689Skan tree use = USE_FROM_PTR (use_p); 286169689Skan 287169689Skan copy[i++] = use; 288169689Skan if (TREE_CODE (use) == SSA_NAME) 289169689Skan tmp = SSA_NAME_VALUE (use); 290169689Skan if (tmp && TREE_CODE (tmp) != VALUE_HANDLE) 291169689Skan SET_USE (use_p, tmp); 292169689Skan } 293169689Skan 294169689Skan /* Try to fold/lookup the new expression. Inserting the 295169689Skan expression into the hash table is unlikely to help 296169689Skan Sadly, we have to handle conditional assignments specially 297169689Skan here, because fold expects all the operands of an expression 298169689Skan to be folded before the expression itself is folded, but we 299169689Skan can't just substitute the folded condition here. */ 300169689Skan if (TREE_CODE (TREE_OPERAND (stmt, 1)) == COND_EXPR) 301169689Skan { 302169689Skan tree cond = COND_EXPR_COND (TREE_OPERAND (stmt, 1)); 303169689Skan cond = fold (cond); 304169689Skan if (cond == boolean_true_node) 305169689Skan pre_fold_expr = COND_EXPR_THEN (TREE_OPERAND (stmt, 1)); 306169689Skan else if (cond == boolean_false_node) 307169689Skan pre_fold_expr = COND_EXPR_ELSE (TREE_OPERAND (stmt, 1)); 308169689Skan else 309169689Skan pre_fold_expr = TREE_OPERAND (stmt, 1); 310169689Skan } 311169689Skan else 312169689Skan pre_fold_expr = TREE_OPERAND (stmt, 1); 313169689Skan 314169689Skan if (pre_fold_expr) 315169689Skan { 316169689Skan cached_lhs = fold (pre_fold_expr); 317169689Skan if (TREE_CODE (cached_lhs) != SSA_NAME 318169689Skan && !is_gimple_min_invariant (cached_lhs)) 319169689Skan cached_lhs = (*simplify) (stmt, stmt); 320169689Skan } 321169689Skan 322169689Skan /* Restore the statement's original uses/defs. */ 323169689Skan i = 0; 324169689Skan FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE) 325169689Skan SET_USE (use_p, copy[i++]); 326169689Skan 327169689Skan free (copy); 328169689Skan } 329169689Skan 330169689Skan /* Record the context sensitive equivalence if we were able 331169689Skan to simplify this statement. */ 332169689Skan if (cached_lhs 333169689Skan && (TREE_CODE (cached_lhs) == SSA_NAME 334169689Skan || is_gimple_min_invariant (cached_lhs))) 335169689Skan record_temporary_equivalence (TREE_OPERAND (stmt, 0), 336169689Skan cached_lhs, 337169689Skan stack); 338169689Skan } 339169689Skan return stmt; 340169689Skan} 341169689Skan 342169689Skan/* Simplify the control statement at the end of the block E->dest. 343169689Skan 344169689Skan To avoid allocating memory unnecessarily, a scratch COND_EXPR 345169689Skan is available to use/clobber in DUMMY_COND. 346169689Skan 347169689Skan Use SIMPLIFY (a pointer to a callback function) to further simplify 348169689Skan a condition using pass specific information. 349169689Skan 350169689Skan Return the simplified condition or NULL if simplification could 351169689Skan not be performed. */ 352169689Skan 353169689Skanstatic tree 354169689Skansimplify_control_stmt_condition (edge e, 355169689Skan tree stmt, 356169689Skan tree dummy_cond, 357169689Skan tree (*simplify) (tree, tree), 358169689Skan bool handle_dominating_asserts) 359169689Skan{ 360169689Skan tree cond, cached_lhs; 361169689Skan 362169689Skan if (TREE_CODE (stmt) == COND_EXPR) 363169689Skan cond = COND_EXPR_COND (stmt); 364169689Skan else if (TREE_CODE (stmt) == GOTO_EXPR) 365169689Skan cond = GOTO_DESTINATION (stmt); 366169689Skan else 367169689Skan cond = SWITCH_COND (stmt); 368169689Skan 369169689Skan /* For comparisons, we have to update both operands, then try 370169689Skan to simplify the comparison. */ 371169689Skan if (COMPARISON_CLASS_P (cond)) 372169689Skan { 373169689Skan tree op0, op1; 374169689Skan enum tree_code cond_code; 375169689Skan 376169689Skan op0 = TREE_OPERAND (cond, 0); 377169689Skan op1 = TREE_OPERAND (cond, 1); 378169689Skan cond_code = TREE_CODE (cond); 379169689Skan 380169689Skan /* Get the current value of both operands. */ 381169689Skan if (TREE_CODE (op0) == SSA_NAME) 382169689Skan { 383169689Skan tree tmp = SSA_NAME_VALUE (op0); 384169689Skan if (tmp && TREE_CODE (tmp) != VALUE_HANDLE) 385169689Skan op0 = tmp; 386169689Skan } 387169689Skan 388169689Skan if (TREE_CODE (op1) == SSA_NAME) 389169689Skan { 390169689Skan tree tmp = SSA_NAME_VALUE (op1); 391169689Skan if (tmp && TREE_CODE (tmp) != VALUE_HANDLE) 392169689Skan op1 = tmp; 393169689Skan } 394169689Skan 395169689Skan if (handle_dominating_asserts) 396169689Skan { 397169689Skan /* Now see if the operand was consumed by an ASSERT_EXPR 398169689Skan which dominates E->src. If so, we want to replace the 399169689Skan operand with the LHS of the ASSERT_EXPR. */ 400169689Skan if (TREE_CODE (op0) == SSA_NAME) 401169689Skan op0 = lhs_of_dominating_assert (op0, e->src, stmt); 402169689Skan 403169689Skan if (TREE_CODE (op1) == SSA_NAME) 404169689Skan op1 = lhs_of_dominating_assert (op1, e->src, stmt); 405169689Skan } 406169689Skan 407169689Skan /* We may need to canonicalize the comparison. For 408169689Skan example, op0 might be a constant while op1 is an 409169689Skan SSA_NAME. Failure to canonicalize will cause us to 410169689Skan miss threading opportunities. */ 411169689Skan if (cond_code != SSA_NAME 412169689Skan && tree_swap_operands_p (op0, op1, false)) 413169689Skan { 414169689Skan tree tmp; 415169689Skan cond_code = swap_tree_comparison (TREE_CODE (cond)); 416169689Skan tmp = op0; 417169689Skan op0 = op1; 418169689Skan op1 = tmp; 419169689Skan } 420169689Skan 421169689Skan /* Stuff the operator and operands into our dummy conditional 422169689Skan expression. */ 423169689Skan TREE_SET_CODE (COND_EXPR_COND (dummy_cond), cond_code); 424169689Skan TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op0; 425169689Skan TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) = op1; 426169689Skan 427169689Skan /* We absolutely do not care about any type conversions 428169689Skan we only care about a zero/nonzero value. */ 429169689Skan fold_defer_overflow_warnings (); 430169689Skan 431169689Skan cached_lhs = fold (COND_EXPR_COND (dummy_cond)); 432169689Skan while (TREE_CODE (cached_lhs) == NOP_EXPR 433169689Skan || TREE_CODE (cached_lhs) == CONVERT_EXPR 434169689Skan || TREE_CODE (cached_lhs) == NON_LVALUE_EXPR) 435169689Skan cached_lhs = TREE_OPERAND (cached_lhs, 0); 436169689Skan 437169689Skan fold_undefer_overflow_warnings (is_gimple_min_invariant (cached_lhs), 438169689Skan stmt, WARN_STRICT_OVERFLOW_CONDITIONAL); 439169689Skan 440169689Skan /* If we have not simplified the condition down to an invariant, 441169689Skan then use the pass specific callback to simplify the condition. */ 442169689Skan if (! is_gimple_min_invariant (cached_lhs)) 443169689Skan cached_lhs = (*simplify) (dummy_cond, stmt); 444169689Skan } 445169689Skan 446169689Skan /* We can have conditionals which just test the state of a variable 447169689Skan rather than use a relational operator. These are simpler to handle. */ 448169689Skan else if (TREE_CODE (cond) == SSA_NAME) 449169689Skan { 450169689Skan cached_lhs = cond; 451169689Skan 452169689Skan /* Get the variable's current value from the equivalency chains. 453169689Skan 454169689Skan It is possible to get loops in the SSA_NAME_VALUE chains 455169689Skan (consider threading the backedge of a loop where we have 456169689Skan a loop invariant SSA_NAME used in the condition. */ 457169689Skan if (cached_lhs 458169689Skan && TREE_CODE (cached_lhs) == SSA_NAME 459169689Skan && SSA_NAME_VALUE (cached_lhs)) 460169689Skan cached_lhs = SSA_NAME_VALUE (cached_lhs); 461169689Skan 462169689Skan /* If we're dominated by a suitable ASSERT_EXPR, then 463169689Skan update CACHED_LHS appropriately. */ 464169689Skan if (handle_dominating_asserts && TREE_CODE (cached_lhs) == SSA_NAME) 465169689Skan cached_lhs = lhs_of_dominating_assert (cached_lhs, e->src, stmt); 466169689Skan 467169689Skan /* If we haven't simplified to an invariant yet, then use the 468169689Skan pass specific callback to try and simplify it further. */ 469169689Skan if (cached_lhs && ! is_gimple_min_invariant (cached_lhs)) 470169689Skan cached_lhs = (*simplify) (stmt, stmt); 471169689Skan } 472169689Skan else 473169689Skan cached_lhs = NULL; 474169689Skan 475169689Skan return cached_lhs; 476169689Skan} 477169689Skan 478169689Skan/* We are exiting E->src, see if E->dest ends with a conditional 479169689Skan jump which has a known value when reached via E. 480169689Skan 481169689Skan Special care is necessary if E is a back edge in the CFG as we 482169689Skan may have already recorded equivalences for E->dest into our 483169689Skan various tables, including the result of the conditional at 484169689Skan the end of E->dest. Threading opportunities are severely 485169689Skan limited in that case to avoid short-circuiting the loop 486169689Skan incorrectly. 487169689Skan 488169689Skan Note it is quite common for the first block inside a loop to 489169689Skan end with a conditional which is either always true or always 490169689Skan false when reached via the loop backedge. Thus we do not want 491169689Skan to blindly disable threading across a loop backedge. */ 492169689Skan 493169689Skanvoid 494169689Skanthread_across_edge (tree dummy_cond, 495169689Skan edge e, 496169689Skan bool handle_dominating_asserts, 497169689Skan VEC(tree, heap) **stack, 498169689Skan tree (*simplify) (tree, tree)) 499169689Skan{ 500169689Skan tree stmt; 501169689Skan 502169689Skan /* If E is a backedge, then we want to verify that the COND_EXPR, 503169689Skan SWITCH_EXPR or GOTO_EXPR at the end of e->dest is not affected 504169689Skan by any statements in e->dest. If it is affected, then it is not 505169689Skan safe to thread this edge. */ 506169689Skan if (e->flags & EDGE_DFS_BACK) 507169689Skan { 508169689Skan ssa_op_iter iter; 509169689Skan use_operand_p use_p; 510169689Skan tree last = bsi_stmt (bsi_last (e->dest)); 511169689Skan 512169689Skan FOR_EACH_SSA_USE_OPERAND (use_p, last, iter, SSA_OP_USE | SSA_OP_VUSE) 513169689Skan { 514169689Skan tree use = USE_FROM_PTR (use_p); 515169689Skan 516169689Skan if (TREE_CODE (use) == SSA_NAME 517169689Skan && TREE_CODE (SSA_NAME_DEF_STMT (use)) != PHI_NODE 518169689Skan && bb_for_stmt (SSA_NAME_DEF_STMT (use)) == e->dest) 519169689Skan goto fail; 520169689Skan } 521169689Skan } 522169689Skan 523169689Skan stmt_count = 0; 524169689Skan 525169689Skan /* PHIs create temporary equivalences. */ 526169689Skan if (!record_temporary_equivalences_from_phis (e, stack)) 527169689Skan goto fail; 528169689Skan 529169689Skan /* Now walk each statement recording any context sensitive 530169689Skan temporary equivalences we can detect. */ 531169689Skan stmt = record_temporary_equivalences_from_stmts_at_dest (e, stack, simplify); 532169689Skan if (!stmt) 533169689Skan goto fail; 534169689Skan 535169689Skan /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm 536169689Skan will be taken. */ 537169689Skan if (TREE_CODE (stmt) == COND_EXPR 538169689Skan || TREE_CODE (stmt) == GOTO_EXPR 539169689Skan || TREE_CODE (stmt) == SWITCH_EXPR) 540169689Skan { 541169689Skan tree cond; 542169689Skan 543169689Skan /* Extract and simplify the condition. */ 544169689Skan cond = simplify_control_stmt_condition (e, stmt, dummy_cond, simplify, handle_dominating_asserts); 545169689Skan 546169689Skan if (cond && is_gimple_min_invariant (cond)) 547169689Skan { 548169689Skan edge taken_edge = find_taken_edge (e->dest, cond); 549169689Skan basic_block dest = (taken_edge ? taken_edge->dest : NULL); 550169689Skan 551169689Skan if (dest == e->dest) 552169689Skan goto fail; 553169689Skan 554169689Skan remove_temporary_equivalences (stack); 555169689Skan register_jump_thread (e, taken_edge); 556169689Skan } 557169689Skan } 558169689Skan 559169689Skan fail: 560169689Skan remove_temporary_equivalences (stack); 561169689Skan} 562