1/* CFG cleanup for trees. 2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. 3 4This file is part of GCC. 5 6GCC is free software; you can redistribute it and/or modify 7it under the terms of the GNU General Public License as published by 8the Free Software Foundation; either version 2, or (at your option) 9any later version. 10 11GCC is distributed in the hope that it will be useful, 12but WITHOUT ANY WARRANTY; without even the implied warranty of 13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14GNU General Public License for more details. 15 16You should have received a copy of the GNU General Public License 17along with GCC; see the file COPYING. If not, write to 18the Free Software Foundation, 51 Franklin Street, Fifth Floor, 19Boston, MA 02110-1301, USA. */ 20 21#include "config.h" 22#include "system.h" 23#include "coretypes.h" 24#include "tm.h" 25#include "tree.h" 26#include "rtl.h" 27#include "tm_p.h" 28#include "hard-reg-set.h" 29#include "basic-block.h" 30#include "output.h" 31#include "errors.h" 32#include "flags.h" 33#include "function.h" 34#include "expr.h" 35#include "ggc.h" 36#include "langhooks.h" 37#include "diagnostic.h" 38#include "tree-flow.h" 39#include "timevar.h" 40#include "tree-dump.h" 41#include "tree-pass.h" 42#include "toplev.h" 43#include "except.h" 44#include "cfgloop.h" 45#include "cfglayout.h" 46#include "hashtab.h" 47#include "tree-ssa-propagate.h" 48#include "tree-scalar-evolution.h" 49 50/* Remove any fallthru edge from EV. Return true if an edge was removed. */ 51 52static bool 53remove_fallthru_edge (VEC(edge,gc) *ev) 54{ 55 edge_iterator ei; 56 edge e; 57 58 FOR_EACH_EDGE (e, ei, ev) 59 if ((e->flags & EDGE_FALLTHRU) != 0) 60 { 61 remove_edge (e); 62 return true; 63 } 64 return false; 65} 66 67/* Disconnect an unreachable block in the control expression starting 68 at block BB. */ 69 70static bool 71cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi) 72{ 73 edge taken_edge; 74 bool retval = false; 75 tree expr = bsi_stmt (bsi), val; 76 77 if (!single_succ_p (bb)) 78 { 79 edge e; 80 edge_iterator ei; 81 82 switch (TREE_CODE (expr)) 83 { 84 case COND_EXPR: 85 val = fold (COND_EXPR_COND (expr)); 86 break; 87 88 case SWITCH_EXPR: 89 val = fold (SWITCH_COND (expr)); 90 if (TREE_CODE (val) != INTEGER_CST) 91 return false; 92 break; 93 94 default: 95 gcc_unreachable (); 96 } 97 98 taken_edge = find_taken_edge (bb, val); 99 if (!taken_edge) 100 return false; 101 102 /* Remove all the edges except the one that is always executed. */ 103 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 104 { 105 if (e != taken_edge) 106 { 107 taken_edge->probability += e->probability; 108 taken_edge->count += e->count; 109 remove_edge (e); 110 retval = true; 111 } 112 else 113 ei_next (&ei); 114 } 115 if (taken_edge->probability > REG_BR_PROB_BASE) 116 taken_edge->probability = REG_BR_PROB_BASE; 117 } 118 else 119 taken_edge = single_succ_edge (bb); 120 121 bsi_remove (&bsi); 122 taken_edge->flags = EDGE_FALLTHRU; 123 124 /* We removed some paths from the cfg. */ 125 free_dominance_info (CDI_DOMINATORS); 126 127 return retval; 128} 129 130/* A list of all the noreturn calls passed to modify_stmt. 131 cleanup_control_flow uses it to detect cases where a mid-block 132 indirect call has been turned into a noreturn call. When this 133 happens, all the instructions after the call are no longer 134 reachable and must be deleted as dead. */ 135 136VEC(tree,gc) *modified_noreturn_calls; 137 138/* Try to remove superfluous control structures. */ 139 140static bool 141cleanup_control_flow (void) 142{ 143 basic_block bb; 144 block_stmt_iterator bsi; 145 bool retval = false; 146 tree stmt; 147 148 /* Detect cases where a mid-block call is now known not to return. */ 149 while (VEC_length (tree, modified_noreturn_calls)) 150 { 151 stmt = VEC_pop (tree, modified_noreturn_calls); 152 bb = bb_for_stmt (stmt); 153 if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt)) 154 split_block (bb, stmt); 155 } 156 157 FOR_EACH_BB (bb) 158 { 159 bsi = bsi_last (bb); 160 161 /* If the last statement of the block could throw and now cannot, 162 we need to prune cfg. */ 163 retval |= tree_purge_dead_eh_edges (bb); 164 165 if (bsi_end_p (bsi)) 166 continue; 167 168 stmt = bsi_stmt (bsi); 169 170 if (TREE_CODE (stmt) == COND_EXPR 171 || TREE_CODE (stmt) == SWITCH_EXPR) 172 retval |= cleanup_control_expr_graph (bb, bsi); 173 /* If we had a computed goto which has a compile-time determinable 174 destination, then we can eliminate the goto. */ 175 else if (TREE_CODE (stmt) == GOTO_EXPR 176 && TREE_CODE (GOTO_DESTINATION (stmt)) == ADDR_EXPR 177 && (TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt), 0)) 178 == LABEL_DECL)) 179 { 180 edge e; 181 tree label; 182 edge_iterator ei; 183 basic_block target_block; 184 bool removed_edge = false; 185 186 /* First look at all the outgoing edges. Delete any outgoing 187 edges which do not go to the right block. For the one 188 edge which goes to the right block, fix up its flags. */ 189 label = TREE_OPERAND (GOTO_DESTINATION (stmt), 0); 190 target_block = label_to_block (label); 191 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 192 { 193 if (e->dest != target_block) 194 { 195 removed_edge = true; 196 remove_edge (e); 197 } 198 else 199 { 200 /* Turn off the EDGE_ABNORMAL flag. */ 201 e->flags &= ~EDGE_ABNORMAL; 202 203 /* And set EDGE_FALLTHRU. */ 204 e->flags |= EDGE_FALLTHRU; 205 ei_next (&ei); 206 } 207 } 208 209 /* If we removed one or more edges, then we will need to fix the 210 dominators. It may be possible to incrementally update them. */ 211 if (removed_edge) 212 free_dominance_info (CDI_DOMINATORS); 213 214 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the 215 relevant information we need. */ 216 bsi_remove (&bsi); 217 retval = true; 218 } 219 220 /* Check for indirect calls that have been turned into 221 noreturn calls. */ 222 else if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs)) 223 { 224 free_dominance_info (CDI_DOMINATORS); 225 retval = true; 226 } 227 } 228 return retval; 229} 230 231/* Return true if basic block BB does nothing except pass control 232 flow to another block and that we can safely insert a label at 233 the start of the successor block. 234 235 As a precondition, we require that BB be not equal to 236 ENTRY_BLOCK_PTR. */ 237 238static bool 239tree_forwarder_block_p (basic_block bb, bool phi_wanted) 240{ 241 block_stmt_iterator bsi; 242 edge_iterator ei; 243 edge e, succ; 244 basic_block dest; 245 246 /* BB must have a single outgoing edge. */ 247 if (single_succ_p (bb) != 1 248 /* If PHI_WANTED is false, BB must not have any PHI nodes. 249 Otherwise, BB must have PHI nodes. */ 250 || (phi_nodes (bb) != NULL_TREE) != phi_wanted 251 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */ 252 || single_succ (bb) == EXIT_BLOCK_PTR 253 /* Nor should this be an infinite loop. */ 254 || single_succ (bb) == bb 255 /* BB may not have an abnormal outgoing edge. */ 256 || (single_succ_edge (bb)->flags & EDGE_ABNORMAL)) 257 return false; 258 259#if ENABLE_CHECKING 260 gcc_assert (bb != ENTRY_BLOCK_PTR); 261#endif 262 263 /* Now walk through the statements backward. We can ignore labels, 264 anything else means this is not a forwarder block. */ 265 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi)) 266 { 267 tree stmt = bsi_stmt (bsi); 268 269 switch (TREE_CODE (stmt)) 270 { 271 case LABEL_EXPR: 272 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))) 273 return false; 274 break; 275 276 default: 277 return false; 278 } 279 } 280 281 if (find_edge (ENTRY_BLOCK_PTR, bb)) 282 return false; 283 284 if (current_loops) 285 { 286 basic_block dest; 287 /* Protect loop latches, headers and preheaders. */ 288 if (bb->loop_father->header == bb) 289 return false; 290 dest = EDGE_SUCC (bb, 0)->dest; 291 292 if (dest->loop_father->header == dest) 293 return false; 294 } 295 296 /* If we have an EH edge leaving this block, make sure that the 297 destination of this block has only one predecessor. This ensures 298 that we don't get into the situation where we try to remove two 299 forwarders that go to the same basic block but are handlers for 300 different EH regions. */ 301 succ = single_succ_edge (bb); 302 dest = succ->dest; 303 FOR_EACH_EDGE (e, ei, bb->preds) 304 { 305 if (e->flags & EDGE_EH) 306 { 307 if (!single_pred_p (dest)) 308 return false; 309 } 310 } 311 312 return true; 313} 314 315/* Return true if BB has at least one abnormal incoming edge. */ 316 317static inline bool 318has_abnormal_incoming_edge_p (basic_block bb) 319{ 320 edge e; 321 edge_iterator ei; 322 323 FOR_EACH_EDGE (e, ei, bb->preds) 324 if (e->flags & EDGE_ABNORMAL) 325 return true; 326 327 return false; 328} 329 330/* If all the PHI nodes in DEST have alternatives for E1 and E2 and 331 those alternatives are equal in each of the PHI nodes, then return 332 true, else return false. */ 333 334static bool 335phi_alternatives_equal (basic_block dest, edge e1, edge e2) 336{ 337 int n1 = e1->dest_idx; 338 int n2 = e2->dest_idx; 339 tree phi; 340 341 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi)) 342 { 343 tree val1 = PHI_ARG_DEF (phi, n1); 344 tree val2 = PHI_ARG_DEF (phi, n2); 345 346 gcc_assert (val1 != NULL_TREE); 347 gcc_assert (val2 != NULL_TREE); 348 349 if (!operand_equal_for_phi_arg_p (val1, val2)) 350 return false; 351 } 352 353 return true; 354} 355 356/* Removes forwarder block BB. Returns false if this failed. If a new 357 forwarder block is created due to redirection of edges, it is 358 stored to worklist. */ 359 360static bool 361remove_forwarder_block (basic_block bb, basic_block **worklist) 362{ 363 edge succ = single_succ_edge (bb), e, s; 364 basic_block dest = succ->dest; 365 tree label; 366 tree phi; 367 edge_iterator ei; 368 block_stmt_iterator bsi, bsi_to; 369 bool seen_abnormal_edge = false; 370 371 /* We check for infinite loops already in tree_forwarder_block_p. 372 However it may happen that the infinite loop is created 373 afterwards due to removal of forwarders. */ 374 if (dest == bb) 375 return false; 376 377 /* If the destination block consists of a nonlocal label, do not merge 378 it. */ 379 label = first_stmt (dest); 380 if (label 381 && TREE_CODE (label) == LABEL_EXPR 382 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label))) 383 return false; 384 385 /* If there is an abnormal edge to basic block BB, but not into 386 dest, problems might occur during removal of the phi node at out 387 of ssa due to overlapping live ranges of registers. 388 389 If there is an abnormal edge in DEST, the problems would occur 390 anyway since cleanup_dead_labels would then merge the labels for 391 two different eh regions, and rest of exception handling code 392 does not like it. 393 394 So if there is an abnormal edge to BB, proceed only if there is 395 no abnormal edge to DEST and there are no phi nodes in DEST. */ 396 if (has_abnormal_incoming_edge_p (bb)) 397 { 398 seen_abnormal_edge = true; 399 400 if (has_abnormal_incoming_edge_p (dest) 401 || phi_nodes (dest) != NULL_TREE) 402 return false; 403 } 404 405 /* If there are phi nodes in DEST, and some of the blocks that are 406 predecessors of BB are also predecessors of DEST, check that the 407 phi node arguments match. */ 408 if (phi_nodes (dest)) 409 { 410 FOR_EACH_EDGE (e, ei, bb->preds) 411 { 412 s = find_edge (e->src, dest); 413 if (!s) 414 continue; 415 416 if (!phi_alternatives_equal (dest, succ, s)) 417 return false; 418 } 419 } 420 421 /* Redirect the edges. */ 422 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) 423 { 424 if (e->flags & EDGE_ABNORMAL) 425 { 426 /* If there is an abnormal edge, redirect it anyway, and 427 move the labels to the new block to make it legal. */ 428 s = redirect_edge_succ_nodup (e, dest); 429 } 430 else 431 s = redirect_edge_and_branch (e, dest); 432 433 if (s == e) 434 { 435 /* Create arguments for the phi nodes, since the edge was not 436 here before. */ 437 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi)) 438 add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s); 439 } 440 else 441 { 442 /* The source basic block might become a forwarder. We know 443 that it was not a forwarder before, since it used to have 444 at least two outgoing edges, so we may just add it to 445 worklist. */ 446 if (tree_forwarder_block_p (s->src, false)) 447 *(*worklist)++ = s->src; 448 } 449 } 450 451 if (seen_abnormal_edge) 452 { 453 /* Move the labels to the new block, so that the redirection of 454 the abnormal edges works. */ 455 456 bsi_to = bsi_start (dest); 457 for (bsi = bsi_start (bb); !bsi_end_p (bsi); ) 458 { 459 label = bsi_stmt (bsi); 460 gcc_assert (TREE_CODE (label) == LABEL_EXPR); 461 bsi_remove (&bsi); 462 bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING); 463 } 464 } 465 466 /* Update the dominators. */ 467 if (dom_info_available_p (CDI_DOMINATORS)) 468 { 469 basic_block dom, dombb, domdest; 470 471 dombb = get_immediate_dominator (CDI_DOMINATORS, bb); 472 domdest = get_immediate_dominator (CDI_DOMINATORS, dest); 473 if (domdest == bb) 474 { 475 /* Shortcut to avoid calling (relatively expensive) 476 nearest_common_dominator unless necessary. */ 477 dom = dombb; 478 } 479 else 480 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb); 481 482 set_immediate_dominator (CDI_DOMINATORS, dest, dom); 483 } 484 485 /* And kill the forwarder block. */ 486 delete_basic_block (bb); 487 488 return true; 489} 490 491/* Removes forwarder blocks. */ 492 493static bool 494cleanup_forwarder_blocks (void) 495{ 496 basic_block bb; 497 bool changed = false; 498 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks); 499 basic_block *current = worklist; 500 501 FOR_EACH_BB (bb) 502 { 503 if (tree_forwarder_block_p (bb, false)) 504 *current++ = bb; 505 } 506 507 while (current != worklist) 508 { 509 bb = *--current; 510 changed |= remove_forwarder_block (bb, ¤t); 511 } 512 513 free (worklist); 514 return changed; 515} 516 517/* Do one round of CFG cleanup. */ 518 519static bool 520cleanup_tree_cfg_1 (void) 521{ 522 bool retval; 523 524 retval = cleanup_control_flow (); 525 retval |= delete_unreachable_blocks (); 526 527 /* Forwarder blocks can carry line number information which is 528 useful when debugging, so we only clean them up when 529 optimizing. */ 530 531 if (optimize > 0) 532 { 533 /* cleanup_forwarder_blocks can redirect edges out of 534 SWITCH_EXPRs, which can get expensive. So we want to enable 535 recording of edge to CASE_LABEL_EXPR mappings around the call 536 to cleanup_forwarder_blocks. */ 537 start_recording_case_labels (); 538 retval |= cleanup_forwarder_blocks (); 539 end_recording_case_labels (); 540 } 541 542 /* Merging the blocks may create new opportunities for folding 543 conditional branches (due to the elimination of single-valued PHI 544 nodes). */ 545 retval |= merge_seq_blocks (); 546 547 return retval; 548} 549 550 551/* Remove unreachable blocks and other miscellaneous clean up work. 552 Return true if the flowgraph was modified, false otherwise. */ 553 554bool 555cleanup_tree_cfg (void) 556{ 557 bool retval, changed; 558 559 timevar_push (TV_TREE_CLEANUP_CFG); 560 561 /* Iterate until there are no more cleanups left to do. If any 562 iteration changed the flowgraph, set CHANGED to true. */ 563 changed = false; 564 do 565 { 566 retval = cleanup_tree_cfg_1 (); 567 changed |= retval; 568 } 569 while (retval); 570 571 compact_blocks (); 572 573#ifdef ENABLE_CHECKING 574 verify_flow_info (); 575#endif 576 577 timevar_pop (TV_TREE_CLEANUP_CFG); 578 579 return changed; 580} 581 582/* Cleanup cfg and repair loop structures. */ 583 584void 585cleanup_tree_cfg_loop (void) 586{ 587 bool changed = cleanup_tree_cfg (); 588 589 if (changed) 590 { 591 bitmap changed_bbs = BITMAP_ALLOC (NULL); 592 fix_loop_structure (current_loops, changed_bbs); 593 calculate_dominance_info (CDI_DOMINATORS); 594 595 /* This usually does nothing. But sometimes parts of cfg that originally 596 were inside a loop get out of it due to edge removal (since they 597 become unreachable by back edges from latch). */ 598 rewrite_into_loop_closed_ssa (changed_bbs, TODO_update_ssa); 599 600 BITMAP_FREE (changed_bbs); 601 602#ifdef ENABLE_CHECKING 603 verify_loop_structure (current_loops); 604#endif 605 scev_reset (); 606 } 607} 608 609/* Merge the PHI nodes at BB into those at BB's sole successor. */ 610 611static void 612remove_forwarder_block_with_phi (basic_block bb) 613{ 614 edge succ = single_succ_edge (bb); 615 basic_block dest = succ->dest; 616 tree label; 617 basic_block dombb, domdest, dom; 618 619 /* We check for infinite loops already in tree_forwarder_block_p. 620 However it may happen that the infinite loop is created 621 afterwards due to removal of forwarders. */ 622 if (dest == bb) 623 return; 624 625 /* If the destination block consists of a nonlocal label, do not 626 merge it. */ 627 label = first_stmt (dest); 628 if (label 629 && TREE_CODE (label) == LABEL_EXPR 630 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label))) 631 return; 632 633 /* Redirect each incoming edge to BB to DEST. */ 634 while (EDGE_COUNT (bb->preds) > 0) 635 { 636 edge e = EDGE_PRED (bb, 0), s; 637 tree phi; 638 639 s = find_edge (e->src, dest); 640 if (s) 641 { 642 /* We already have an edge S from E->src to DEST. If S and 643 E->dest's sole successor edge have the same PHI arguments 644 at DEST, redirect S to DEST. */ 645 if (phi_alternatives_equal (dest, s, succ)) 646 { 647 e = redirect_edge_and_branch (e, dest); 648 PENDING_STMT (e) = NULL_TREE; 649 continue; 650 } 651 652 /* PHI arguments are different. Create a forwarder block by 653 splitting E so that we can merge PHI arguments on E to 654 DEST. */ 655 e = single_succ_edge (split_edge (e)); 656 } 657 658 s = redirect_edge_and_branch (e, dest); 659 660 /* redirect_edge_and_branch must not create a new edge. */ 661 gcc_assert (s == e); 662 663 /* Add to the PHI nodes at DEST each PHI argument removed at the 664 destination of E. */ 665 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi)) 666 { 667 tree def = PHI_ARG_DEF (phi, succ->dest_idx); 668 669 if (TREE_CODE (def) == SSA_NAME) 670 { 671 tree var; 672 673 /* If DEF is one of the results of PHI nodes removed during 674 redirection, replace it with the PHI argument that used 675 to be on E. */ 676 for (var = PENDING_STMT (e); var; var = TREE_CHAIN (var)) 677 { 678 tree old_arg = TREE_PURPOSE (var); 679 tree new_arg = TREE_VALUE (var); 680 681 if (def == old_arg) 682 { 683 def = new_arg; 684 break; 685 } 686 } 687 } 688 689 add_phi_arg (phi, def, s); 690 } 691 692 PENDING_STMT (e) = NULL; 693 } 694 695 /* Update the dominators. */ 696 dombb = get_immediate_dominator (CDI_DOMINATORS, bb); 697 domdest = get_immediate_dominator (CDI_DOMINATORS, dest); 698 if (domdest == bb) 699 { 700 /* Shortcut to avoid calling (relatively expensive) 701 nearest_common_dominator unless necessary. */ 702 dom = dombb; 703 } 704 else 705 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb); 706 707 set_immediate_dominator (CDI_DOMINATORS, dest, dom); 708 709 /* Remove BB since all of BB's incoming edges have been redirected 710 to DEST. */ 711 delete_basic_block (bb); 712} 713 714/* This pass merges PHI nodes if one feeds into another. For example, 715 suppose we have the following: 716 717 goto <bb 9> (<L9>); 718 719<L8>:; 720 tem_17 = foo (); 721 722 # tem_6 = PHI <tem_17(8), tem_23(7)>; 723<L9>:; 724 725 # tem_3 = PHI <tem_6(9), tem_2(5)>; 726<L10>:; 727 728 Then we merge the first PHI node into the second one like so: 729 730 goto <bb 9> (<L10>); 731 732<L8>:; 733 tem_17 = foo (); 734 735 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>; 736<L10>:; 737*/ 738 739static void 740merge_phi_nodes (void) 741{ 742 basic_block *worklist = xmalloc (sizeof (basic_block) * n_basic_blocks); 743 basic_block *current = worklist; 744 basic_block bb; 745 746 calculate_dominance_info (CDI_DOMINATORS); 747 748 /* Find all PHI nodes that we may be able to merge. */ 749 FOR_EACH_BB (bb) 750 { 751 basic_block dest; 752 753 /* Look for a forwarder block with PHI nodes. */ 754 if (!tree_forwarder_block_p (bb, true)) 755 continue; 756 757 dest = single_succ (bb); 758 759 /* We have to feed into another basic block with PHI 760 nodes. */ 761 if (!phi_nodes (dest) 762 /* We don't want to deal with a basic block with 763 abnormal edges. */ 764 || has_abnormal_incoming_edge_p (bb)) 765 continue; 766 767 if (!dominated_by_p (CDI_DOMINATORS, dest, bb)) 768 { 769 /* If BB does not dominate DEST, then the PHI nodes at 770 DEST must be the only users of the results of the PHI 771 nodes at BB. */ 772 *current++ = bb; 773 } 774 } 775 776 /* Now let's drain WORKLIST. */ 777 while (current != worklist) 778 { 779 bb = *--current; 780 remove_forwarder_block_with_phi (bb); 781 } 782 783 free (worklist); 784} 785 786static bool 787gate_merge_phi (void) 788{ 789 return 1; 790} 791 792struct tree_opt_pass pass_merge_phi = { 793 "mergephi", /* name */ 794 gate_merge_phi, /* gate */ 795 merge_phi_nodes, /* execute */ 796 NULL, /* sub */ 797 NULL, /* next */ 798 0, /* static_pass_number */ 799 TV_TREE_MERGE_PHI, /* tv_id */ 800 PROP_cfg | PROP_ssa, /* properties_required */ 801 0, /* properties_provided */ 802 0, /* properties_destroyed */ 803 0, /* todo_flags_start */ 804 TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */ 805 | TODO_verify_ssa, 806 0 /* letter */ 807}; 808