1/* Detect paths through the CFG which can never be executed in a conforming 2 program and isolate them. 3 4 Copyright (C) 2013-2015 Free Software Foundation, Inc. 5 6This file is part of GCC. 7 8GCC is free software; you can redistribute it and/or modify 9it under the terms of the GNU General Public License as published by 10the Free Software Foundation; either version 3, or (at your option) 11any later version. 12 13GCC is distributed in the hope that it will be useful, 14but WITHOUT ANY WARRANTY; without even the implied warranty of 15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16GNU General Public License for more details. 17 18You should have received a copy of the GNU General Public License 19along with GCC; see the file COPYING3. If not see 20<http://www.gnu.org/licenses/>. */ 21 22#include "config.h" 23#include "system.h" 24#include "coretypes.h" 25#include "hash-set.h" 26#include "machmode.h" 27#include "vec.h" 28#include "double-int.h" 29#include "input.h" 30#include "alias.h" 31#include "symtab.h" 32#include "options.h" 33#include "wide-int.h" 34#include "inchash.h" 35#include "tree.h" 36#include "fold-const.h" 37#include "flags.h" 38#include "predict.h" 39#include "tm.h" 40#include "hard-reg-set.h" 41#include "input.h" 42#include "function.h" 43#include "dominance.h" 44#include "cfg.h" 45#include "basic-block.h" 46#include "tree-ssa-alias.h" 47#include "internal-fn.h" 48#include "gimple-expr.h" 49#include "is-a.h" 50#include "gimple.h" 51#include "gimple-iterator.h" 52#include "gimple-walk.h" 53#include "tree-ssa.h" 54#include "stringpool.h" 55#include "tree-ssanames.h" 56#include "gimple-ssa.h" 57#include "tree-ssa-operands.h" 58#include "tree-phinodes.h" 59#include "ssa-iterators.h" 60#include "cfgloop.h" 61#include "tree-pass.h" 62#include "tree-cfg.h" 63#include "diagnostic-core.h" 64#include "intl.h" 65 66 67static bool cfg_altered; 68 69/* Callback for walk_stmt_load_store_ops. 70 71 Return TRUE if OP will dereference the tree stored in DATA, FALSE 72 otherwise. 73 74 This routine only makes a superficial check for a dereference. Thus, 75 it must only be used if it is safe to return a false negative. */ 76static bool 77check_loadstore (gimple stmt, tree op, tree, void *data) 78{ 79 if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF) 80 && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0)) 81 { 82 TREE_THIS_VOLATILE (op) = 1; 83 TREE_SIDE_EFFECTS (op) = 1; 84 update_stmt (stmt); 85 return true; 86 } 87 return false; 88} 89 90/* Insert a trap after SI and remove SI and all statements after the trap. */ 91 92static void 93insert_trap_and_remove_trailing_statements (gimple_stmt_iterator *si_p, tree op) 94{ 95 /* We want the NULL pointer dereference to actually occur so that 96 code that wishes to catch the signal can do so. 97 98 If the dereference is a load, then there's nothing to do as the 99 LHS will be a throw-away SSA_NAME and the RHS is the NULL dereference. 100 101 If the dereference is a store and we can easily transform the RHS, 102 then simplify the RHS to enable more DCE. Note that we require the 103 statement to be a GIMPLE_ASSIGN which filters out calls on the RHS. */ 104 gimple stmt = gsi_stmt (*si_p); 105 if (walk_stmt_load_store_ops (stmt, (void *)op, NULL, check_loadstore) 106 && is_gimple_assign (stmt) 107 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt)))) 108 { 109 /* We just need to turn the RHS into zero converted to the proper 110 type. */ 111 tree type = TREE_TYPE (gimple_assign_lhs (stmt)); 112 gimple_assign_set_rhs_code (stmt, INTEGER_CST); 113 gimple_assign_set_rhs1 (stmt, fold_convert (type, integer_zero_node)); 114 update_stmt (stmt); 115 } 116 117 gcall *new_stmt 118 = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); 119 gimple_seq seq = NULL; 120 gimple_seq_add_stmt (&seq, new_stmt); 121 122 /* If we had a NULL pointer dereference, then we want to insert the 123 __builtin_trap after the statement, for the other cases we want 124 to insert before the statement. */ 125 if (walk_stmt_load_store_ops (stmt, (void *)op, 126 check_loadstore, 127 check_loadstore)) 128 { 129 gsi_insert_after (si_p, seq, GSI_NEW_STMT); 130 if (stmt_ends_bb_p (stmt)) 131 { 132 split_block (gimple_bb (stmt), stmt); 133 return; 134 } 135 } 136 else 137 gsi_insert_before (si_p, seq, GSI_NEW_STMT); 138 139 /* We must remove statements from the end of the block so that we 140 never reference a released SSA_NAME. */ 141 basic_block bb = gimple_bb (gsi_stmt (*si_p)); 142 for (gimple_stmt_iterator si = gsi_last_bb (bb); 143 gsi_stmt (si) != gsi_stmt (*si_p); 144 si = gsi_last_bb (bb)) 145 { 146 stmt = gsi_stmt (si); 147 unlink_stmt_vdef (stmt); 148 gsi_remove (&si, true); 149 release_defs (stmt); 150 } 151} 152 153/* BB when reached via incoming edge E will exhibit undefined behaviour 154 at STMT. Isolate and optimize the path which exhibits undefined 155 behaviour. 156 157 Isolation is simple. Duplicate BB and redirect E to BB'. 158 159 Optimization is simple as well. Replace STMT in BB' with an 160 unconditional trap and remove all outgoing edges from BB'. 161 162 If RET_ZERO, do not trap, only return NULL. 163 164 DUPLICATE is a pre-existing duplicate, use it as BB' if it exists. 165 166 Return BB'. */ 167 168basic_block 169isolate_path (basic_block bb, basic_block duplicate, 170 edge e, gimple stmt, tree op, bool ret_zero) 171{ 172 gimple_stmt_iterator si, si2; 173 edge_iterator ei; 174 edge e2; 175 176 /* First duplicate BB if we have not done so already and remove all 177 the duplicate's outgoing edges as duplicate is going to unconditionally 178 trap. Removing the outgoing edges is both an optimization and ensures 179 we don't need to do any PHI node updates. */ 180 if (!duplicate) 181 { 182 duplicate = duplicate_block (bb, NULL, NULL); 183 if (!ret_zero) 184 for (ei = ei_start (duplicate->succs); (e2 = ei_safe_edge (ei)); ) 185 remove_edge (e2); 186 } 187 188 /* Complete the isolation step by redirecting E to reach DUPLICATE. */ 189 e2 = redirect_edge_and_branch (e, duplicate); 190 if (e2) 191 flush_pending_stmts (e2); 192 193 194 /* There may be more than one statement in DUPLICATE which exhibits 195 undefined behaviour. Ultimately we want the first such statement in 196 DUPLCIATE so that we're able to delete as much code as possible. 197 198 So each time we discover undefined behaviour in DUPLICATE, search for 199 the statement which triggers undefined behaviour. If found, then 200 transform the statement into a trap and delete everything after the 201 statement. If not found, then this particular instance was subsumed by 202 an earlier instance of undefined behaviour and there's nothing to do. 203 204 This is made more complicated by the fact that we have STMT, which is in 205 BB rather than in DUPLICATE. So we set up two iterators, one for each 206 block and walk forward looking for STMT in BB, advancing each iterator at 207 each step. 208 209 When we find STMT the second iterator should point to STMT's equivalent in 210 duplicate. If DUPLICATE ends before STMT is found in BB, then there's 211 nothing to do. 212 213 Ignore labels and debug statements. */ 214 si = gsi_start_nondebug_after_labels_bb (bb); 215 si2 = gsi_start_nondebug_after_labels_bb (duplicate); 216 while (!gsi_end_p (si) && !gsi_end_p (si2) && gsi_stmt (si) != stmt) 217 { 218 gsi_next_nondebug (&si); 219 gsi_next_nondebug (&si2); 220 } 221 222 /* This would be an indicator that we never found STMT in BB, which should 223 never happen. */ 224 gcc_assert (!gsi_end_p (si)); 225 226 /* If we did not run to the end of DUPLICATE, then SI points to STMT and 227 SI2 points to the duplicate of STMT in DUPLICATE. Insert a trap 228 before SI2 and remove SI2 and all trailing statements. */ 229 if (!gsi_end_p (si2)) 230 { 231 if (ret_zero) 232 { 233 greturn *ret = as_a <greturn *> (gsi_stmt (si2)); 234 tree zero = build_zero_cst (TREE_TYPE (gimple_return_retval (ret))); 235 gimple_return_set_retval (ret, zero); 236 update_stmt (ret); 237 } 238 else 239 insert_trap_and_remove_trailing_statements (&si2, op); 240 } 241 242 return duplicate; 243} 244 245/* Look for PHI nodes which feed statements in the same block where 246 the value of the PHI node implies the statement is erroneous. 247 248 For example, a NULL PHI arg value which then feeds a pointer 249 dereference. 250 251 When found isolate and optimize the path associated with the PHI 252 argument feeding the erroneous statement. */ 253static void 254find_implicit_erroneous_behaviour (void) 255{ 256 basic_block bb; 257 258 FOR_EACH_BB_FN (bb, cfun) 259 { 260 gphi_iterator si; 261 262 /* Out of an abundance of caution, do not isolate paths to a 263 block where the block has any abnormal outgoing edges. 264 265 We might be able to relax this in the future. We have to detect 266 when we have to split the block with the NULL dereference and 267 the trap we insert. We have to preserve abnormal edges out 268 of the isolated block which in turn means updating PHIs at 269 the targets of those abnormal outgoing edges. */ 270 if (has_abnormal_or_eh_outgoing_edge_p (bb)) 271 continue; 272 273 /* First look for a PHI which sets a pointer to NULL and which 274 is then dereferenced within BB. This is somewhat overly 275 conservative, but probably catches most of the interesting 276 cases. */ 277 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) 278 { 279 gphi *phi = si.phi (); 280 tree lhs = gimple_phi_result (phi); 281 282 /* If the result is not a pointer, then there is no need to 283 examine the arguments. */ 284 if (!POINTER_TYPE_P (TREE_TYPE (lhs))) 285 continue; 286 287 /* PHI produces a pointer result. See if any of the PHI's 288 arguments are NULL. 289 290 When we remove an edge, we want to reprocess the current 291 index, hence the ugly way we update I for each iteration. */ 292 basic_block duplicate = NULL; 293 for (unsigned i = 0, next_i = 0; 294 i < gimple_phi_num_args (phi); 295 i = next_i) 296 { 297 tree op = gimple_phi_arg_def (phi, i); 298 edge e = gimple_phi_arg_edge (phi, i); 299 imm_use_iterator iter; 300 gimple use_stmt; 301 302 next_i = i + 1; 303 304 if (TREE_CODE (op) == ADDR_EXPR) 305 { 306 tree valbase = get_base_address (TREE_OPERAND (op, 0)); 307 if ((TREE_CODE (valbase) == VAR_DECL 308 && !is_global_var (valbase)) 309 || TREE_CODE (valbase) == PARM_DECL) 310 { 311 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs) 312 { 313 greturn *return_stmt 314 = dyn_cast <greturn *> (use_stmt); 315 if (!return_stmt) 316 continue; 317 318 if (gimple_return_retval (return_stmt) != lhs) 319 continue; 320 321 if (warning_at (gimple_location (use_stmt), 322 OPT_Wreturn_local_addr, 323 "function may return address " 324 "of local variable")) 325 inform (DECL_SOURCE_LOCATION(valbase), 326 "declared here"); 327 328 if (gimple_bb (use_stmt) == bb) 329 { 330 duplicate = isolate_path (bb, duplicate, e, 331 use_stmt, lhs, true); 332 333 /* When we remove an incoming edge, we need to 334 reprocess the Ith element. */ 335 next_i = i; 336 cfg_altered = true; 337 } 338 } 339 } 340 } 341 342 if (!integer_zerop (op)) 343 continue; 344 345 /* We've got a NULL PHI argument. Now see if the 346 PHI's result is dereferenced within BB. */ 347 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs) 348 { 349 /* We only care about uses in BB. Catching cases in 350 in other blocks would require more complex path 351 isolation code. */ 352 if (gimple_bb (use_stmt) != bb) 353 continue; 354 355 if (infer_nonnull_range (use_stmt, lhs, 356 flag_isolate_erroneous_paths_dereference, 357 flag_isolate_erroneous_paths_attribute)) 358 359 { 360 duplicate = isolate_path (bb, duplicate, e, 361 use_stmt, lhs, false); 362 363 /* When we remove an incoming edge, we need to 364 reprocess the Ith element. */ 365 next_i = i; 366 cfg_altered = true; 367 } 368 } 369 } 370 } 371 } 372} 373 374/* Look for statements which exhibit erroneous behaviour. For example 375 a NULL pointer dereference. 376 377 When found, optimize the block containing the erroneous behaviour. */ 378static void 379find_explicit_erroneous_behaviour (void) 380{ 381 basic_block bb; 382 383 FOR_EACH_BB_FN (bb, cfun) 384 { 385 gimple_stmt_iterator si; 386 387 /* Out of an abundance of caution, do not isolate paths to a 388 block where the block has any abnormal outgoing edges. 389 390 We might be able to relax this in the future. We have to detect 391 when we have to split the block with the NULL dereference and 392 the trap we insert. We have to preserve abnormal edges out 393 of the isolated block which in turn means updating PHIs at 394 the targets of those abnormal outgoing edges. */ 395 if (has_abnormal_or_eh_outgoing_edge_p (bb)) 396 continue; 397 398 /* Now look at the statements in the block and see if any of 399 them explicitly dereference a NULL pointer. This happens 400 because of jump threading and constant propagation. */ 401 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) 402 { 403 gimple stmt = gsi_stmt (si); 404 405 /* By passing null_pointer_node, we can use infer_nonnull_range 406 to detect explicit NULL pointer dereferences and other uses 407 where a non-NULL value is required. */ 408 if (infer_nonnull_range (stmt, null_pointer_node, 409 flag_isolate_erroneous_paths_dereference, 410 flag_isolate_erroneous_paths_attribute)) 411 { 412 insert_trap_and_remove_trailing_statements (&si, 413 null_pointer_node); 414 415 /* And finally, remove all outgoing edges from BB. */ 416 edge e; 417 for (edge_iterator ei = ei_start (bb->succs); 418 (e = ei_safe_edge (ei)); ) 419 remove_edge (e); 420 421 /* Ignore any more operands on this statement and 422 continue the statement iterator (which should 423 terminate its loop immediately. */ 424 cfg_altered = true; 425 break; 426 } 427 428 /* Detect returning the address of a local variable. This only 429 becomes undefined behavior if the result is used, so we do not 430 insert a trap and only return NULL instead. */ 431 if (greturn *return_stmt = dyn_cast <greturn *> (stmt)) 432 { 433 tree val = gimple_return_retval (return_stmt); 434 if (val && TREE_CODE (val) == ADDR_EXPR) 435 { 436 tree valbase = get_base_address (TREE_OPERAND (val, 0)); 437 if ((TREE_CODE (valbase) == VAR_DECL 438 && !is_global_var (valbase)) 439 || TREE_CODE (valbase) == PARM_DECL) 440 { 441 /* We only need it for this particular case. */ 442 calculate_dominance_info (CDI_POST_DOMINATORS); 443 const char* msg; 444 bool always_executed = dominated_by_p 445 (CDI_POST_DOMINATORS, 446 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)), bb); 447 if (always_executed) 448 msg = N_("function returns address of local variable"); 449 else 450 msg = N_("function may return address of " 451 "local variable"); 452 453 if (warning_at (gimple_location (stmt), 454 OPT_Wreturn_local_addr, msg)) 455 inform (DECL_SOURCE_LOCATION(valbase), "declared here"); 456 tree zero = build_zero_cst (TREE_TYPE (val)); 457 gimple_return_set_retval (return_stmt, zero); 458 update_stmt (stmt); 459 } 460 } 461 } 462 } 463 } 464} 465 466/* Search the function for statements which, if executed, would cause 467 the program to fault such as a dereference of a NULL pointer. 468 469 Such a program can't be valid if such a statement was to execute 470 according to ISO standards. 471 472 We detect explicit NULL pointer dereferences as well as those implied 473 by a PHI argument having a NULL value which unconditionally flows into 474 a dereference in the same block as the PHI. 475 476 In the former case we replace the offending statement with an 477 unconditional trap and eliminate the outgoing edges from the statement's 478 basic block. This may expose secondary optimization opportunities. 479 480 In the latter case, we isolate the path(s) with the NULL PHI 481 feeding the dereference. We can then replace the offending statement 482 and eliminate the outgoing edges in the duplicate. Again, this may 483 expose secondary optimization opportunities. 484 485 A warning for both cases may be advisable as well. 486 487 Other statically detectable violations of the ISO standard could be 488 handled in a similar way, such as out-of-bounds array indexing. */ 489 490static unsigned int 491gimple_ssa_isolate_erroneous_paths (void) 492{ 493 initialize_original_copy_tables (); 494 495 /* Search all the blocks for edges which, if traversed, will 496 result in undefined behaviour. */ 497 cfg_altered = false; 498 499 /* First handle cases where traversal of a particular edge 500 triggers undefined behaviour. These cases require creating 501 duplicate blocks and thus new SSA_NAMEs. 502 503 We want that process complete prior to the phase where we start 504 removing edges from the CFG. Edge removal may ultimately result in 505 removal of PHI nodes and thus releasing SSA_NAMEs back to the 506 name manager. 507 508 If the two processes run in parallel we could release an SSA_NAME 509 back to the manager but we could still have dangling references 510 to the released SSA_NAME in unreachable blocks. 511 that any released names not have dangling references in the IL. */ 512 find_implicit_erroneous_behaviour (); 513 find_explicit_erroneous_behaviour (); 514 515 free_original_copy_tables (); 516 517 /* We scramble the CFG and loop structures a bit, clean up 518 appropriately. We really should incrementally update the 519 loop structures, in theory it shouldn't be that hard. */ 520 free_dominance_info (CDI_POST_DOMINATORS); 521 if (cfg_altered) 522 { 523 free_dominance_info (CDI_DOMINATORS); 524 loops_state_set (LOOPS_NEED_FIXUP); 525 return TODO_cleanup_cfg | TODO_update_ssa; 526 } 527 return 0; 528} 529 530namespace { 531const pass_data pass_data_isolate_erroneous_paths = 532{ 533 GIMPLE_PASS, /* type */ 534 "isolate-paths", /* name */ 535 OPTGROUP_NONE, /* optinfo_flags */ 536 TV_ISOLATE_ERRONEOUS_PATHS, /* tv_id */ 537 ( PROP_cfg | PROP_ssa ), /* properties_required */ 538 0, /* properties_provided */ 539 0, /* properties_destroyed */ 540 0, /* todo_flags_start */ 541 0, /* todo_flags_finish */ 542}; 543 544class pass_isolate_erroneous_paths : public gimple_opt_pass 545{ 546public: 547 pass_isolate_erroneous_paths (gcc::context *ctxt) 548 : gimple_opt_pass (pass_data_isolate_erroneous_paths, ctxt) 549 {} 550 551 /* opt_pass methods: */ 552 opt_pass * clone () { return new pass_isolate_erroneous_paths (m_ctxt); } 553 virtual bool gate (function *) 554 { 555 /* If we do not have a suitable builtin function for the trap statement, 556 then do not perform the optimization. */ 557 return (flag_isolate_erroneous_paths_dereference != 0 558 || flag_isolate_erroneous_paths_attribute != 0); 559 } 560 561 virtual unsigned int execute (function *) 562 { 563 return gimple_ssa_isolate_erroneous_paths (); 564 } 565 566}; // class pass_isolate_erroneous_paths 567} 568 569gimple_opt_pass * 570make_pass_isolate_erroneous_paths (gcc::context *ctxt) 571{ 572 return new pass_isolate_erroneous_paths (ctxt); 573} 574