1/* Control flow functions for trees.
2   Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3   Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to
19the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20Boston, MA 02110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "output.h"
32#include "flags.h"
33#include "function.h"
34#include "expr.h"
35#include "ggc.h"
36#include "langhooks.h"
37#include "diagnostic.h"
38#include "tree-flow.h"
39#include "timevar.h"
40#include "tree-dump.h"
41#include "tree-pass.h"
42#include "toplev.h"
43#include "except.h"
44#include "cfgloop.h"
45#include "cfglayout.h"
46#include "hashtab.h"
47#include "tree-ssa-propagate.h"
48
49/* This file contains functions for building the Control Flow Graph (CFG)
50   for a function tree.  */
51
52/* Local declarations.  */
53
54/* Initial capacity for the basic block array.  */
55static const int initial_cfg_capacity = 20;
56
57/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58   which use a particular edge.  The CASE_LABEL_EXPRs are chained together
59   via their TREE_CHAIN field, which we clear after we're done with the
60   hash table to prevent problems with duplication of SWITCH_EXPRs.
61
62   Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63   update the case vector in response to edge redirections.
64
65   Right now this table is set up and torn down at key points in the
66   compilation process.  It would be nice if we could make the table
67   more persistent.  The key is getting notification of changes to
68   the CFG (particularly edge removal, creation and redirection).  */
69
70struct edge_to_cases_elt
71{
72  /* The edge itself.  Necessary for hashing and equality tests.  */
73  edge e;
74
75  /* The case labels associated with this edge.  We link these up via
76     their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77     when we destroy the hash table.  This prevents problems when copying
78     SWITCH_EXPRs.  */
79  tree case_labels;
80};
81
82static htab_t edge_to_cases;
83
84/* CFG statistics.  */
85struct cfg_stats_d
86{
87  long num_merged_labels;
88};
89
90static struct cfg_stats_d cfg_stats;
91
92/* Nonzero if we found a computed goto while building basic blocks.  */
93static bool found_computed_goto;
94
95/* Basic blocks and flowgraphs.  */
96static basic_block create_bb (void *, void *, basic_block);
97static void make_blocks (tree);
98static void factor_computed_gotos (void);
99
100/* Edges.  */
101static void make_edges (void);
102static void make_ctrl_stmt_edges (basic_block);
103static void make_exit_edges (basic_block);
104static void make_cond_expr_edges (basic_block);
105static void make_switch_expr_edges (basic_block);
106static void make_goto_expr_edges (basic_block);
107static edge tree_redirect_edge_and_branch (edge, basic_block);
108static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
109static void split_critical_edges (void);
110
111/* Various helpers.  */
112static inline bool stmt_starts_bb_p (tree, tree);
113static int tree_verify_flow_info (void);
114static void tree_make_forwarder_block (edge);
115static void tree_cfg2vcg (FILE *);
116
117/* Flowgraph optimization and cleanup.  */
118static void tree_merge_blocks (basic_block, basic_block);
119static bool tree_can_merge_blocks_p (basic_block, basic_block);
120static void remove_bb (basic_block);
121static edge find_taken_edge_computed_goto (basic_block, tree);
122static edge find_taken_edge_cond_expr (basic_block, tree);
123static edge find_taken_edge_switch_expr (basic_block, tree);
124static tree find_case_label_for_value (tree, tree);
125
126void
127init_empty_tree_cfg (void)
128{
129  /* Initialize the basic block array.  */
130  init_flow ();
131  profile_status = PROFILE_ABSENT;
132  n_basic_blocks = 0;
133  last_basic_block = 0;
134  VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
135
136  /* Build a mapping of labels to their associated blocks.  */
137  VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
138		  "label to block map");
139
140  ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
141  EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
142}
143
144/*---------------------------------------------------------------------------
145			      Create basic blocks
146---------------------------------------------------------------------------*/
147
148/* Entry point to the CFG builder for trees.  TP points to the list of
149   statements to be added to the flowgraph.  */
150
151static void
152build_tree_cfg (tree *tp)
153{
154  /* Register specific tree functions.  */
155  tree_register_cfg_hooks ();
156
157  memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
158
159  init_empty_tree_cfg ();
160
161  found_computed_goto = 0;
162  make_blocks (*tp);
163
164  /* Computed gotos are hell to deal with, especially if there are
165     lots of them with a large number of destinations.  So we factor
166     them to a common computed goto location before we build the
167     edge list.  After we convert back to normal form, we will un-factor
168     the computed gotos since factoring introduces an unwanted jump.  */
169  if (found_computed_goto)
170    factor_computed_gotos ();
171
172  /* Make sure there is always at least one block, even if it's empty.  */
173  if (n_basic_blocks == 0)
174    create_empty_bb (ENTRY_BLOCK_PTR);
175
176  /* Adjust the size of the array.  */
177  VARRAY_GROW (basic_block_info, n_basic_blocks);
178
179  /* To speed up statement iterator walks, we first purge dead labels.  */
180  cleanup_dead_labels ();
181
182  /* Group case nodes to reduce the number of edges.
183     We do this after cleaning up dead labels because otherwise we miss
184     a lot of obvious case merging opportunities.  */
185  group_case_labels ();
186
187  /* Create the edges of the flowgraph.  */
188  make_edges ();
189
190  /* Debugging dumps.  */
191
192  /* Write the flowgraph to a VCG file.  */
193  {
194    int local_dump_flags;
195    FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
196    if (dump_file)
197      {
198	tree_cfg2vcg (dump_file);
199	dump_end (TDI_vcg, dump_file);
200      }
201  }
202
203#ifdef ENABLE_CHECKING
204  verify_stmts ();
205#endif
206
207  /* Dump a textual representation of the flowgraph.  */
208  if (dump_file)
209    dump_tree_cfg (dump_file, dump_flags);
210}
211
212static void
213execute_build_cfg (void)
214{
215  build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
216}
217
218struct tree_opt_pass pass_build_cfg =
219{
220  "cfg",				/* name */
221  NULL,					/* gate */
222  execute_build_cfg,			/* execute */
223  NULL,					/* sub */
224  NULL,					/* next */
225  0,					/* static_pass_number */
226  TV_TREE_CFG,				/* tv_id */
227  PROP_gimple_leh,			/* properties_required */
228  PROP_cfg,				/* properties_provided */
229  0,					/* properties_destroyed */
230  0,					/* todo_flags_start */
231  TODO_verify_stmts,			/* todo_flags_finish */
232  0					/* letter */
233};
234
235/* Search the CFG for any computed gotos.  If found, factor them to a
236   common computed goto site.  Also record the location of that site so
237   that we can un-factor the gotos after we have converted back to
238   normal form.  */
239
240static void
241factor_computed_gotos (void)
242{
243  basic_block bb;
244  tree factored_label_decl = NULL;
245  tree var = NULL;
246  tree factored_computed_goto_label = NULL;
247  tree factored_computed_goto = NULL;
248
249  /* We know there are one or more computed gotos in this function.
250     Examine the last statement in each basic block to see if the block
251     ends with a computed goto.  */
252
253  FOR_EACH_BB (bb)
254    {
255      block_stmt_iterator bsi = bsi_last (bb);
256      tree last;
257
258      if (bsi_end_p (bsi))
259	continue;
260      last = bsi_stmt (bsi);
261
262      /* Ignore the computed goto we create when we factor the original
263	 computed gotos.  */
264      if (last == factored_computed_goto)
265	continue;
266
267      /* If the last statement is a computed goto, factor it.  */
268      if (computed_goto_p (last))
269	{
270	  tree assignment;
271
272	  /* The first time we find a computed goto we need to create
273	     the factored goto block and the variable each original
274	     computed goto will use for their goto destination.  */
275	  if (! factored_computed_goto)
276	    {
277	      basic_block new_bb = create_empty_bb (bb);
278	      block_stmt_iterator new_bsi = bsi_start (new_bb);
279
280	      /* Create the destination of the factored goto.  Each original
281		 computed goto will put its desired destination into this
282		 variable and jump to the label we create immediately
283		 below.  */
284	      var = create_tmp_var (ptr_type_node, "gotovar");
285
286	      /* Build a label for the new block which will contain the
287		 factored computed goto.  */
288	      factored_label_decl = create_artificial_label ();
289	      factored_computed_goto_label
290		= build1 (LABEL_EXPR, void_type_node, factored_label_decl);
291	      bsi_insert_after (&new_bsi, factored_computed_goto_label,
292				BSI_NEW_STMT);
293
294	      /* Build our new computed goto.  */
295	      factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
296	      bsi_insert_after (&new_bsi, factored_computed_goto,
297				BSI_NEW_STMT);
298	    }
299
300	  /* Copy the original computed goto's destination into VAR.  */
301	  assignment = build (MODIFY_EXPR, ptr_type_node,
302			      var, GOTO_DESTINATION (last));
303	  bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
304
305	  /* And re-vector the computed goto to the new destination.  */
306	  GOTO_DESTINATION (last) = factored_label_decl;
307	}
308    }
309}
310
311
312/* Build a flowgraph for the statement_list STMT_LIST.  */
313
314static void
315make_blocks (tree stmt_list)
316{
317  tree_stmt_iterator i = tsi_start (stmt_list);
318  tree stmt = NULL;
319  bool start_new_block = true;
320  bool first_stmt_of_list = true;
321  basic_block bb = ENTRY_BLOCK_PTR;
322
323  while (!tsi_end_p (i))
324    {
325      tree prev_stmt;
326
327      prev_stmt = stmt;
328      stmt = tsi_stmt (i);
329
330      /* If the statement starts a new basic block or if we have determined
331	 in a previous pass that we need to create a new block for STMT, do
332	 so now.  */
333      if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
334	{
335	  if (!first_stmt_of_list)
336	    stmt_list = tsi_split_statement_list_before (&i);
337	  bb = create_basic_block (stmt_list, NULL, bb);
338	  start_new_block = false;
339	}
340
341      /* Now add STMT to BB and create the subgraphs for special statement
342	 codes.  */
343      set_bb_for_stmt (stmt, bb);
344
345      if (computed_goto_p (stmt))
346	found_computed_goto = true;
347
348      /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
349	 next iteration.  */
350      if (stmt_ends_bb_p (stmt))
351	start_new_block = true;
352
353      tsi_next (&i);
354      first_stmt_of_list = false;
355    }
356}
357
358
359/* Create and return a new empty basic block after bb AFTER.  */
360
361static basic_block
362create_bb (void *h, void *e, basic_block after)
363{
364  basic_block bb;
365
366  gcc_assert (!e);
367
368  /* Create and initialize a new basic block.  Since alloc_block uses
369     ggc_alloc_cleared to allocate a basic block, we do not have to
370     clear the newly allocated basic block here.  */
371  bb = alloc_block ();
372
373  bb->index = last_basic_block;
374  bb->flags = BB_NEW;
375  bb->stmt_list = h ? h : alloc_stmt_list ();
376
377  /* Add the new block to the linked list of blocks.  */
378  link_block (bb, after);
379
380  /* Grow the basic block array if needed.  */
381  if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
382    {
383      size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
384      VARRAY_GROW (basic_block_info, new_size);
385    }
386
387  /* Add the newly created block to the array.  */
388  BASIC_BLOCK (last_basic_block) = bb;
389
390  n_basic_blocks++;
391  last_basic_block++;
392
393  return bb;
394}
395
396
397/*---------------------------------------------------------------------------
398				 Edge creation
399---------------------------------------------------------------------------*/
400
401/* Fold COND_EXPR_COND of each COND_EXPR.  */
402
403void
404fold_cond_expr_cond (void)
405{
406  basic_block bb;
407
408  FOR_EACH_BB (bb)
409    {
410      tree stmt = last_stmt (bb);
411
412      if (stmt
413	  && TREE_CODE (stmt) == COND_EXPR)
414	{
415	  tree cond = fold (COND_EXPR_COND (stmt));
416	  if (integer_zerop (cond))
417	    COND_EXPR_COND (stmt) = boolean_false_node;
418	  else if (integer_onep (cond))
419	    COND_EXPR_COND (stmt) = boolean_true_node;
420	}
421    }
422}
423
424/* Join all the blocks in the flowgraph.  */
425
426static void
427make_edges (void)
428{
429  basic_block bb;
430
431  /* Create an edge from entry to the first block with executable
432     statements in it.  */
433  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
434
435  /* Traverse the basic block array placing edges.  */
436  FOR_EACH_BB (bb)
437    {
438      tree first = first_stmt (bb);
439      tree last = last_stmt (bb);
440
441      if (first)
442	{
443	  /* Edges for statements that always alter flow control.  */
444	  if (is_ctrl_stmt (last))
445	    make_ctrl_stmt_edges (bb);
446
447	  /* Edges for statements that sometimes alter flow control.  */
448	  if (is_ctrl_altering_stmt (last))
449	    make_exit_edges (bb);
450	}
451
452      /* Finally, if no edges were created above, this is a regular
453	 basic block that only needs a fallthru edge.  */
454      if (EDGE_COUNT (bb->succs) == 0)
455	make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
456    }
457
458  /* We do not care about fake edges, so remove any that the CFG
459     builder inserted for completeness.  */
460  remove_fake_exit_edges ();
461
462  /* Fold COND_EXPR_COND of each COND_EXPR.  */
463  fold_cond_expr_cond ();
464
465  /* Clean up the graph and warn for unreachable code.  */
466  cleanup_tree_cfg ();
467}
468
469
470/* Create edges for control statement at basic block BB.  */
471
472static void
473make_ctrl_stmt_edges (basic_block bb)
474{
475  tree last = last_stmt (bb);
476
477  gcc_assert (last);
478  switch (TREE_CODE (last))
479    {
480    case GOTO_EXPR:
481      make_goto_expr_edges (bb);
482      break;
483
484    case RETURN_EXPR:
485      make_edge (bb, EXIT_BLOCK_PTR, 0);
486      break;
487
488    case COND_EXPR:
489      make_cond_expr_edges (bb);
490      break;
491
492    case SWITCH_EXPR:
493      make_switch_expr_edges (bb);
494      break;
495
496    case RESX_EXPR:
497      make_eh_edges (last);
498      /* Yet another NORETURN hack.  */
499      if (EDGE_COUNT (bb->succs) == 0)
500	make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
501      break;
502
503    default:
504      gcc_unreachable ();
505    }
506}
507
508
509/* Create exit edges for statements in block BB that alter the flow of
510   control.  Statements that alter the control flow are 'goto', 'return'
511   and calls to non-returning functions.  */
512
513static void
514make_exit_edges (basic_block bb)
515{
516  tree last = last_stmt (bb), op;
517
518  gcc_assert (last);
519  switch (TREE_CODE (last))
520    {
521    case RESX_EXPR:
522      break;
523    case CALL_EXPR:
524      /* If this function receives a nonlocal goto, then we need to
525	 make edges from this call site to all the nonlocal goto
526	 handlers.  */
527      if (TREE_SIDE_EFFECTS (last)
528	  && current_function_has_nonlocal_label)
529	make_goto_expr_edges (bb);
530
531      /* If this statement has reachable exception handlers, then
532	 create abnormal edges to them.  */
533      make_eh_edges (last);
534
535      /* Some calls are known not to return.  For such calls we create
536	 a fake edge.
537
538	 We really need to revamp how we build edges so that it's not
539	 such a bloody pain to avoid creating edges for this case since
540	 all we do is remove these edges when we're done building the
541	 CFG.  */
542      if (call_expr_flags (last) & ECF_NORETURN)
543	{
544	  make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
545	  return;
546	}
547
548      /* Don't forget the fall-thru edge.  */
549      make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
550      break;
551
552    case MODIFY_EXPR:
553      /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
554	 may have an abnormal edge.  Search the RHS for this case and
555	 create any required edges.  */
556      op = get_call_expr_in (last);
557      if (op && TREE_SIDE_EFFECTS (op)
558	  && current_function_has_nonlocal_label)
559	make_goto_expr_edges (bb);
560
561      make_eh_edges (last);
562      make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
563      break;
564
565    default:
566      gcc_unreachable ();
567    }
568}
569
570
571/* Create the edges for a COND_EXPR starting at block BB.
572   At this point, both clauses must contain only simple gotos.  */
573
574static void
575make_cond_expr_edges (basic_block bb)
576{
577  tree entry = last_stmt (bb);
578  basic_block then_bb, else_bb;
579  tree then_label, else_label;
580  edge e;
581
582  gcc_assert (entry);
583  gcc_assert (TREE_CODE (entry) == COND_EXPR);
584
585  /* Entry basic blocks for each component.  */
586  then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
587  else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
588  then_bb = label_to_block (then_label);
589  else_bb = label_to_block (else_label);
590
591  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
592#ifdef USE_MAPPED_LOCATION
593  e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
594#else
595  e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
596#endif
597  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
598  if (e)
599    {
600#ifdef USE_MAPPED_LOCATION
601      e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
602#else
603      e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
604#endif
605    }
606}
607
608/* Hashing routine for EDGE_TO_CASES.  */
609
610static hashval_t
611edge_to_cases_hash (const void *p)
612{
613  edge e = ((struct edge_to_cases_elt *)p)->e;
614
615  /* Hash on the edge itself (which is a pointer).  */
616  return htab_hash_pointer (e);
617}
618
619/* Equality routine for EDGE_TO_CASES, edges are unique, so testing
620   for equality is just a pointer comparison.  */
621
622static int
623edge_to_cases_eq (const void *p1, const void *p2)
624{
625  edge e1 = ((struct edge_to_cases_elt *)p1)->e;
626  edge e2 = ((struct edge_to_cases_elt *)p2)->e;
627
628  return e1 == e2;
629}
630
631/* Called for each element in the hash table (P) as we delete the
632   edge to cases hash table.
633
634   Clear all the TREE_CHAINs to prevent problems with copying of
635   SWITCH_EXPRs and structure sharing rules, then free the hash table
636   element.  */
637
638static void
639edge_to_cases_cleanup (void *p)
640{
641  struct edge_to_cases_elt *elt = p;
642  tree t, next;
643
644  for (t = elt->case_labels; t; t = next)
645    {
646      next = TREE_CHAIN (t);
647      TREE_CHAIN (t) = NULL;
648    }
649  free (p);
650}
651
652/* Start recording information mapping edges to case labels.  */
653
654void
655start_recording_case_labels (void)
656{
657  gcc_assert (edge_to_cases == NULL);
658
659  edge_to_cases = htab_create (37,
660			       edge_to_cases_hash,
661			       edge_to_cases_eq,
662			       edge_to_cases_cleanup);
663}
664
665/* Return nonzero if we are recording information for case labels.  */
666
667static bool
668recording_case_labels_p (void)
669{
670  return (edge_to_cases != NULL);
671}
672
673/* Stop recording information mapping edges to case labels and
674   remove any information we have recorded.  */
675void
676end_recording_case_labels (void)
677{
678  htab_delete (edge_to_cases);
679  edge_to_cases = NULL;
680}
681
682/* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E.  */
683
684static void
685record_switch_edge (edge e, tree case_label)
686{
687  struct edge_to_cases_elt *elt;
688  void **slot;
689
690  /* Build a hash table element so we can see if E is already
691     in the table.  */
692  elt = xmalloc (sizeof (struct edge_to_cases_elt));
693  elt->e = e;
694  elt->case_labels = case_label;
695
696  slot = htab_find_slot (edge_to_cases, elt, INSERT);
697
698  if (*slot == NULL)
699    {
700      /* E was not in the hash table.  Install E into the hash table.  */
701      *slot = (void *)elt;
702    }
703  else
704    {
705      /* E was already in the hash table.  Free ELT as we do not need it
706	 anymore.  */
707      free (elt);
708
709      /* Get the entry stored in the hash table.  */
710      elt = (struct edge_to_cases_elt *) *slot;
711
712      /* Add it to the chain of CASE_LABEL_EXPRs referencing E.  */
713      TREE_CHAIN (case_label) = elt->case_labels;
714      elt->case_labels = case_label;
715    }
716}
717
718/* If we are inside a {start,end}_recording_cases block, then return
719   a chain of CASE_LABEL_EXPRs from T which reference E.
720
721   Otherwise return NULL.  */
722
723static tree
724get_cases_for_edge (edge e, tree t)
725{
726  struct edge_to_cases_elt elt, *elt_p;
727  void **slot;
728  size_t i, n;
729  tree vec;
730
731  /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
732     chains available.  Return NULL so the caller can detect this case.  */
733  if (!recording_case_labels_p ())
734    return NULL;
735
736restart:
737  elt.e = e;
738  elt.case_labels = NULL;
739  slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
740
741  if (slot)
742    {
743      elt_p = (struct edge_to_cases_elt *)*slot;
744      return elt_p->case_labels;
745    }
746
747  /* If we did not find E in the hash table, then this must be the first
748     time we have been queried for information about E & T.  Add all the
749     elements from T to the hash table then perform the query again.  */
750
751  vec = SWITCH_LABELS (t);
752  n = TREE_VEC_LENGTH (vec);
753  for (i = 0; i < n; i++)
754    {
755      tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
756      basic_block label_bb = label_to_block (lab);
757      record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
758    }
759  goto restart;
760}
761
762/* Create the edges for a SWITCH_EXPR starting at block BB.
763   At this point, the switch body has been lowered and the
764   SWITCH_LABELS filled in, so this is in effect a multi-way branch.  */
765
766static void
767make_switch_expr_edges (basic_block bb)
768{
769  tree entry = last_stmt (bb);
770  size_t i, n;
771  tree vec;
772
773  vec = SWITCH_LABELS (entry);
774  n = TREE_VEC_LENGTH (vec);
775
776  for (i = 0; i < n; ++i)
777    {
778      tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
779      basic_block label_bb = label_to_block (lab);
780      make_edge (bb, label_bb, 0);
781    }
782}
783
784
785/* Return the basic block holding label DEST.  */
786
787basic_block
788label_to_block_fn (struct function *ifun, tree dest)
789{
790  int uid = LABEL_DECL_UID (dest);
791
792  /* We would die hard when faced by an undefined label.  Emit a label to
793     the very first basic block.  This will hopefully make even the dataflow
794     and undefined variable warnings quite right.  */
795  if ((errorcount || sorrycount) && uid < 0)
796    {
797      block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
798      tree stmt;
799
800      stmt = build1 (LABEL_EXPR, void_type_node, dest);
801      bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
802      uid = LABEL_DECL_UID (dest);
803    }
804  if (VARRAY_SIZE (ifun->cfg->x_label_to_block_map) <= (unsigned int)uid)
805    return NULL;
806  return VARRAY_BB (ifun->cfg->x_label_to_block_map, uid);
807}
808
809/* Create edges for a goto statement at block BB.  */
810
811static void
812make_goto_expr_edges (basic_block bb)
813{
814  tree goto_t;
815  basic_block target_bb;
816  int for_call;
817  block_stmt_iterator last = bsi_last (bb);
818
819  goto_t = bsi_stmt (last);
820
821  /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
822     CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
823     from a nonlocal goto.  */
824  if (TREE_CODE (goto_t) != GOTO_EXPR)
825    for_call = 1;
826  else
827    {
828      tree dest = GOTO_DESTINATION (goto_t);
829      for_call = 0;
830
831      /* A GOTO to a local label creates normal edges.  */
832      if (simple_goto_p (goto_t))
833	{
834	  edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
835#ifdef USE_MAPPED_LOCATION
836	  e->goto_locus = EXPR_LOCATION (goto_t);
837#else
838	  e->goto_locus = EXPR_LOCUS (goto_t);
839#endif
840	  bsi_remove (&last);
841	  return;
842	}
843
844      /* Nothing more to do for nonlocal gotos.  */
845      if (TREE_CODE (dest) == LABEL_DECL)
846	return;
847
848      /* Computed gotos remain.  */
849    }
850
851  /* Look for the block starting with the destination label.  In the
852     case of a computed goto, make an edge to any label block we find
853     in the CFG.  */
854  FOR_EACH_BB (target_bb)
855    {
856      block_stmt_iterator bsi;
857
858      for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
859	{
860	  tree target = bsi_stmt (bsi);
861
862	  if (TREE_CODE (target) != LABEL_EXPR)
863	    break;
864
865	  if (
866	      /* Computed GOTOs.  Make an edge to every label block that has
867		 been marked as a potential target for a computed goto.  */
868	      (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
869	      /* Nonlocal GOTO target.  Make an edge to every label block
870		 that has been marked as a potential target for a nonlocal
871		 goto.  */
872	      || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
873	    {
874	      make_edge (bb, target_bb, EDGE_ABNORMAL);
875	      break;
876	    }
877	}
878    }
879
880  /* Degenerate case of computed goto with no labels.  */
881  if (!for_call && EDGE_COUNT (bb->succs) == 0)
882    make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
883}
884
885
886/*---------------------------------------------------------------------------
887			       Flowgraph analysis
888---------------------------------------------------------------------------*/
889
890/* Cleanup useless labels in basic blocks.  This is something we wish
891   to do early because it allows us to group case labels before creating
892   the edges for the CFG, and it speeds up block statement iterators in
893   all passes later on.
894   We only run this pass once, running it more than once is probably not
895   profitable.  */
896
897/* A map from basic block index to the leading label of that block.  */
898static tree *label_for_bb;
899
900/* Callback for for_each_eh_region.  Helper for cleanup_dead_labels.  */
901static void
902update_eh_label (struct eh_region *region)
903{
904  tree old_label = get_eh_region_tree_label (region);
905  if (old_label)
906    {
907      tree new_label;
908      basic_block bb = label_to_block (old_label);
909
910      /* ??? After optimizing, there may be EH regions with labels
911	 that have already been removed from the function body, so
912	 there is no basic block for them.  */
913      if (! bb)
914	return;
915
916      new_label = label_for_bb[bb->index];
917      set_eh_region_tree_label (region, new_label);
918    }
919}
920
921/* Given LABEL return the first label in the same basic block.  */
922static tree
923main_block_label (tree label)
924{
925  basic_block bb = label_to_block (label);
926
927  /* label_to_block possibly inserted undefined label into the chain.  */
928  if (!label_for_bb[bb->index])
929    label_for_bb[bb->index] = label;
930  return label_for_bb[bb->index];
931}
932
933/* Cleanup redundant labels.  This is a three-step process:
934     1) Find the leading label for each block.
935     2) Redirect all references to labels to the leading labels.
936     3) Cleanup all useless labels.  */
937
938void
939cleanup_dead_labels (void)
940{
941  basic_block bb;
942  label_for_bb = xcalloc (last_basic_block, sizeof (tree));
943
944  /* Find a suitable label for each block.  We use the first user-defined
945     label if there is one, or otherwise just the first label we see.  */
946  FOR_EACH_BB (bb)
947    {
948      block_stmt_iterator i;
949
950      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
951	{
952	  tree label, stmt = bsi_stmt (i);
953
954	  if (TREE_CODE (stmt) != LABEL_EXPR)
955	    break;
956
957	  label = LABEL_EXPR_LABEL (stmt);
958
959	  /* If we have not yet seen a label for the current block,
960	     remember this one and see if there are more labels.  */
961	  if (! label_for_bb[bb->index])
962	    {
963	      label_for_bb[bb->index] = label;
964	      continue;
965	    }
966
967	  /* If we did see a label for the current block already, but it
968	     is an artificially created label, replace it if the current
969	     label is a user defined label.  */
970	  if (! DECL_ARTIFICIAL (label)
971	      && DECL_ARTIFICIAL (label_for_bb[bb->index]))
972	    {
973	      label_for_bb[bb->index] = label;
974	      break;
975	    }
976	}
977    }
978
979  /* Now redirect all jumps/branches to the selected label.
980     First do so for each block ending in a control statement.  */
981  FOR_EACH_BB (bb)
982    {
983      tree stmt = last_stmt (bb);
984      if (!stmt)
985	continue;
986
987      switch (TREE_CODE (stmt))
988	{
989	case COND_EXPR:
990	  {
991	    tree true_branch, false_branch;
992
993	    true_branch = COND_EXPR_THEN (stmt);
994	    false_branch = COND_EXPR_ELSE (stmt);
995
996	    GOTO_DESTINATION (true_branch)
997	      = main_block_label (GOTO_DESTINATION (true_branch));
998	    GOTO_DESTINATION (false_branch)
999	      = main_block_label (GOTO_DESTINATION (false_branch));
1000
1001	    break;
1002	  }
1003
1004	case SWITCH_EXPR:
1005	  {
1006	    size_t i;
1007	    tree vec = SWITCH_LABELS (stmt);
1008	    size_t n = TREE_VEC_LENGTH (vec);
1009
1010	    /* Replace all destination labels.  */
1011	    for (i = 0; i < n; ++i)
1012	      {
1013		tree elt = TREE_VEC_ELT (vec, i);
1014		tree label = main_block_label (CASE_LABEL (elt));
1015		CASE_LABEL (elt) = label;
1016	      }
1017	    break;
1018	  }
1019
1020	/* We have to handle GOTO_EXPRs until they're removed, and we don't
1021	   remove them until after we've created the CFG edges.  */
1022	case GOTO_EXPR:
1023          if (! computed_goto_p (stmt))
1024	    {
1025	      GOTO_DESTINATION (stmt)
1026		= main_block_label (GOTO_DESTINATION (stmt));
1027	      break;
1028	    }
1029
1030	default:
1031	  break;
1032      }
1033    }
1034
1035  for_each_eh_region (update_eh_label);
1036
1037  /* Finally, purge dead labels.  All user-defined labels and labels that
1038     can be the target of non-local gotos and labels which have their
1039     address taken are preserved.  */
1040  FOR_EACH_BB (bb)
1041    {
1042      block_stmt_iterator i;
1043      tree label_for_this_bb = label_for_bb[bb->index];
1044
1045      if (! label_for_this_bb)
1046	continue;
1047
1048      for (i = bsi_start (bb); !bsi_end_p (i); )
1049	{
1050	  tree label, stmt = bsi_stmt (i);
1051
1052	  if (TREE_CODE (stmt) != LABEL_EXPR)
1053	    break;
1054
1055	  label = LABEL_EXPR_LABEL (stmt);
1056
1057	  if (label == label_for_this_bb
1058	      || ! DECL_ARTIFICIAL (label)
1059	      || DECL_NONLOCAL (label)
1060	      || FORCED_LABEL (label))
1061	    bsi_next (&i);
1062	  else
1063	    bsi_remove (&i);
1064	}
1065    }
1066
1067  free (label_for_bb);
1068}
1069
1070/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1071   and scan the sorted vector of cases.  Combine the ones jumping to the
1072   same label.
1073   Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1074
1075void
1076group_case_labels (void)
1077{
1078  basic_block bb;
1079
1080  FOR_EACH_BB (bb)
1081    {
1082      tree stmt = last_stmt (bb);
1083      if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1084	{
1085	  tree labels = SWITCH_LABELS (stmt);
1086	  int old_size = TREE_VEC_LENGTH (labels);
1087	  int i, j, new_size = old_size;
1088	  tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1089 	  tree default_label;
1090
1091	  /* The default label is always the last case in a switch
1092	     statement after gimplification.  */
1093	  default_label = CASE_LABEL (default_case);
1094
1095	  /* Look for possible opportunities to merge cases.
1096	     Ignore the last element of the label vector because it
1097	     must be the default case.  */
1098          i = 0;
1099	  while (i < old_size - 1)
1100	    {
1101	      tree base_case, base_label, base_high;
1102	      base_case = TREE_VEC_ELT (labels, i);
1103
1104	      gcc_assert (base_case);
1105	      base_label = CASE_LABEL (base_case);
1106
1107	      /* Discard cases that have the same destination as the
1108		 default case.  */
1109	      if (base_label == default_label)
1110		{
1111		  TREE_VEC_ELT (labels, i) = NULL_TREE;
1112		  i++;
1113		  new_size--;
1114		  continue;
1115		}
1116
1117	      base_high = CASE_HIGH (base_case) ?
1118		CASE_HIGH (base_case) : CASE_LOW (base_case);
1119	      i++;
1120	      /* Try to merge case labels.  Break out when we reach the end
1121		 of the label vector or when we cannot merge the next case
1122		 label with the current one.  */
1123	      while (i < old_size - 1)
1124		{
1125		  tree merge_case = TREE_VEC_ELT (labels, i);
1126	          tree merge_label = CASE_LABEL (merge_case);
1127		  tree t = int_const_binop (PLUS_EXPR, base_high,
1128					    integer_one_node, 1);
1129
1130		  /* Merge the cases if they jump to the same place,
1131		     and their ranges are consecutive.  */
1132		  if (merge_label == base_label
1133		      && tree_int_cst_equal (CASE_LOW (merge_case), t))
1134		    {
1135		      base_high = CASE_HIGH (merge_case) ?
1136			CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1137		      CASE_HIGH (base_case) = base_high;
1138		      TREE_VEC_ELT (labels, i) = NULL_TREE;
1139		      new_size--;
1140		      i++;
1141		    }
1142		  else
1143		    break;
1144		}
1145	    }
1146
1147	  /* Compress the case labels in the label vector, and adjust the
1148	     length of the vector.  */
1149	  for (i = 0, j = 0; i < new_size; i++)
1150	    {
1151	      while (! TREE_VEC_ELT (labels, j))
1152		j++;
1153	      TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1154	    }
1155	  TREE_VEC_LENGTH (labels) = new_size;
1156	}
1157    }
1158}
1159
1160/* Checks whether we can merge block B into block A.  */
1161
1162static bool
1163tree_can_merge_blocks_p (basic_block a, basic_block b)
1164{
1165  tree stmt;
1166  block_stmt_iterator bsi;
1167  tree phi;
1168
1169  if (!single_succ_p (a))
1170    return false;
1171
1172  if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1173    return false;
1174
1175  if (single_succ (a) != b)
1176    return false;
1177
1178  if (!single_pred_p (b))
1179    return false;
1180
1181  if (b == EXIT_BLOCK_PTR)
1182    return false;
1183
1184  /* If A ends by a statement causing exceptions or something similar, we
1185     cannot merge the blocks.  */
1186  stmt = last_stmt (a);
1187  if (stmt && stmt_ends_bb_p (stmt))
1188    return false;
1189
1190  /* Do not allow a block with only a non-local label to be merged.  */
1191  if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1192      && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1193    return false;
1194
1195  /* It must be possible to eliminate all phi nodes in B.  If ssa form
1196     is not up-to-date, we cannot eliminate any phis.  */
1197  phi = phi_nodes (b);
1198  if (phi)
1199    {
1200      if (need_ssa_update_p ())
1201	return false;
1202
1203      for (; phi; phi = PHI_CHAIN (phi))
1204	if (!is_gimple_reg (PHI_RESULT (phi))
1205	    && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1206	  return false;
1207    }
1208
1209  /* Do not remove user labels.  */
1210  for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1211    {
1212      stmt = bsi_stmt (bsi);
1213      if (TREE_CODE (stmt) != LABEL_EXPR)
1214	break;
1215      if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1216	return false;
1217    }
1218
1219  /* Protect the loop latches.  */
1220  if (current_loops
1221      && b->loop_father->latch == b)
1222    return false;
1223
1224  return true;
1225}
1226
1227/* Replaces all uses of NAME by VAL.  */
1228
1229void
1230replace_uses_by (tree name, tree val)
1231{
1232  imm_use_iterator imm_iter;
1233  use_operand_p use;
1234  tree stmt;
1235  edge e;
1236  unsigned i;
1237  VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
1238
1239  FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
1240    {
1241      stmt = USE_STMT (use);
1242      replace_exp (use, val);
1243
1244      if (TREE_CODE (stmt) == PHI_NODE)
1245	{
1246	  e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1247	  if (e->flags & EDGE_ABNORMAL)
1248	    {
1249	      /* This can only occur for virtual operands, since
1250		 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1251		 would prevent replacement.  */
1252	      gcc_assert (!is_gimple_reg (name));
1253	      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1254	    }
1255	}
1256      else
1257	VEC_safe_push (tree, heap, stmts, stmt);
1258    }
1259
1260  /* We do not update the statements in the loop above.  Consider
1261     x = w * w;
1262
1263     If we performed the update in the first loop, the statement
1264     would be rescanned after first occurrence of w is replaced,
1265     the new uses would be placed to the beginning of the list,
1266     and we would never process them.  */
1267  for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
1268    {
1269      tree rhs;
1270
1271      fold_stmt_inplace (stmt);
1272
1273      rhs = get_rhs (stmt);
1274      if (TREE_CODE (rhs) == ADDR_EXPR)
1275	recompute_tree_invarant_for_addr_expr (rhs);
1276
1277      maybe_clean_or_replace_eh_stmt (stmt, stmt);
1278      mark_new_vars_to_rename (stmt);
1279    }
1280
1281  VEC_free (tree, heap, stmts);
1282
1283  /* Also update the trees stored in loop structures.  */
1284  if (current_loops)
1285    {
1286      struct loop *loop;
1287
1288      for (i = 0; i < current_loops->num; i++)
1289	{
1290	  loop = current_loops->parray[i];
1291	  if (loop)
1292	    substitute_in_loop_info (loop, name, val);
1293	}
1294    }
1295}
1296
1297/* Merge block B into block A.  */
1298
1299static void
1300tree_merge_blocks (basic_block a, basic_block b)
1301{
1302  block_stmt_iterator bsi;
1303  tree_stmt_iterator last;
1304  tree phi;
1305
1306  if (dump_file)
1307    fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1308
1309  /* Remove all single-valued PHI nodes from block B of the form
1310     V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1311  bsi = bsi_last (a);
1312  for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1313    {
1314      tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1315      tree copy;
1316      bool may_replace_uses = may_propagate_copy (def, use);
1317
1318      /* In case we have loops to care about, do not propagate arguments of
1319	 loop closed ssa phi nodes.  */
1320      if (current_loops
1321	  && is_gimple_reg (def)
1322	  && TREE_CODE (use) == SSA_NAME
1323	  && a->loop_father != b->loop_father)
1324	may_replace_uses = false;
1325
1326      if (!may_replace_uses)
1327	{
1328	  gcc_assert (is_gimple_reg (def));
1329
1330	  /* Note that just emitting the copies is fine -- there is no problem
1331	     with ordering of phi nodes.  This is because A is the single
1332	     predecessor of B, therefore results of the phi nodes cannot
1333	     appear as arguments of the phi nodes.  */
1334	  copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1335	  bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1336	  SET_PHI_RESULT (phi, NULL_TREE);
1337	  SSA_NAME_DEF_STMT (def) = copy;
1338	}
1339      else
1340	replace_uses_by (def, use);
1341
1342      remove_phi_node (phi, NULL);
1343    }
1344
1345  /* Ensure that B follows A.  */
1346  move_block_after (b, a);
1347
1348  gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1349  gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1350
1351  /* Remove labels from B and set bb_for_stmt to A for other statements.  */
1352  for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1353    {
1354      if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1355	{
1356	  tree label = bsi_stmt (bsi);
1357
1358	  bsi_remove (&bsi);
1359	  /* Now that we can thread computed gotos, we might have
1360	     a situation where we have a forced label in block B
1361	     However, the label at the start of block B might still be
1362	     used in other ways (think about the runtime checking for
1363	     Fortran assigned gotos).  So we can not just delete the
1364	     label.  Instead we move the label to the start of block A.  */
1365	  if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1366	    {
1367	      block_stmt_iterator dest_bsi = bsi_start (a);
1368	      bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1369	    }
1370	}
1371      else
1372	{
1373	  set_bb_for_stmt (bsi_stmt (bsi), a);
1374	  bsi_next (&bsi);
1375	}
1376    }
1377
1378  /* Merge the chains.  */
1379  last = tsi_last (a->stmt_list);
1380  tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1381  b->stmt_list = NULL;
1382}
1383
1384
1385/* Return the one of two successors of BB that is not reachable by a
1386   reached by a complex edge, if there is one.  Else, return BB.  We use
1387   this in optimizations that use post-dominators for their heuristics,
1388   to catch the cases in C++ where function calls are involved.  */
1389
1390basic_block
1391single_noncomplex_succ (basic_block bb)
1392{
1393  edge e0, e1;
1394  if (EDGE_COUNT (bb->succs) != 2)
1395    return bb;
1396
1397  e0 = EDGE_SUCC (bb, 0);
1398  e1 = EDGE_SUCC (bb, 1);
1399  if (e0->flags & EDGE_COMPLEX)
1400    return e1->dest;
1401  if (e1->flags & EDGE_COMPLEX)
1402    return e0->dest;
1403
1404  return bb;
1405}
1406
1407
1408
1409/* Walk the function tree removing unnecessary statements.
1410
1411     * Empty statement nodes are removed
1412
1413     * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1414
1415     * Unnecessary COND_EXPRs are removed
1416
1417     * Some unnecessary BIND_EXPRs are removed
1418
1419   Clearly more work could be done.  The trick is doing the analysis
1420   and removal fast enough to be a net improvement in compile times.
1421
1422   Note that when we remove a control structure such as a COND_EXPR
1423   BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1424   to ensure we eliminate all the useless code.  */
1425
1426struct rus_data
1427{
1428  tree *last_goto;
1429  bool repeat;
1430  bool may_throw;
1431  bool may_branch;
1432  bool has_label;
1433};
1434
1435static void remove_useless_stmts_1 (tree *, struct rus_data *);
1436
1437static bool
1438remove_useless_stmts_warn_notreached (tree stmt)
1439{
1440  if (EXPR_HAS_LOCATION (stmt))
1441    {
1442      location_t loc = EXPR_LOCATION (stmt);
1443      if (LOCATION_LINE (loc) > 0)
1444	{
1445	  warning (0, "%Hwill never be executed", &loc);
1446	  return true;
1447	}
1448    }
1449
1450  switch (TREE_CODE (stmt))
1451    {
1452    case STATEMENT_LIST:
1453      {
1454	tree_stmt_iterator i;
1455	for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1456	  if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1457	    return true;
1458      }
1459      break;
1460
1461    case COND_EXPR:
1462      if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1463	return true;
1464      if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1465	return true;
1466      if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1467	return true;
1468      break;
1469
1470    case TRY_FINALLY_EXPR:
1471    case TRY_CATCH_EXPR:
1472      if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1473	return true;
1474      if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1475	return true;
1476      break;
1477
1478    case CATCH_EXPR:
1479      return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1480    case EH_FILTER_EXPR:
1481      return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1482    case BIND_EXPR:
1483      return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1484
1485    default:
1486      /* Not a live container.  */
1487      break;
1488    }
1489
1490  return false;
1491}
1492
1493static void
1494remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1495{
1496  tree then_clause, else_clause, cond;
1497  bool save_has_label, then_has_label, else_has_label;
1498
1499  save_has_label = data->has_label;
1500  data->has_label = false;
1501  data->last_goto = NULL;
1502
1503  remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1504
1505  then_has_label = data->has_label;
1506  data->has_label = false;
1507  data->last_goto = NULL;
1508
1509  remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1510
1511  else_has_label = data->has_label;
1512  data->has_label = save_has_label | then_has_label | else_has_label;
1513
1514  then_clause = COND_EXPR_THEN (*stmt_p);
1515  else_clause = COND_EXPR_ELSE (*stmt_p);
1516  cond = fold (COND_EXPR_COND (*stmt_p));
1517
1518  /* If neither arm does anything at all, we can remove the whole IF.  */
1519  if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1520    {
1521      *stmt_p = build_empty_stmt ();
1522      data->repeat = true;
1523    }
1524
1525  /* If there are no reachable statements in an arm, then we can
1526     zap the entire conditional.  */
1527  else if (integer_nonzerop (cond) && !else_has_label)
1528    {
1529      if (warn_notreached)
1530	remove_useless_stmts_warn_notreached (else_clause);
1531      *stmt_p = then_clause;
1532      data->repeat = true;
1533    }
1534  else if (integer_zerop (cond) && !then_has_label)
1535    {
1536      if (warn_notreached)
1537	remove_useless_stmts_warn_notreached (then_clause);
1538      *stmt_p = else_clause;
1539      data->repeat = true;
1540    }
1541
1542  /* Check a couple of simple things on then/else with single stmts.  */
1543  else
1544    {
1545      tree then_stmt = expr_only (then_clause);
1546      tree else_stmt = expr_only (else_clause);
1547
1548      /* Notice branches to a common destination.  */
1549      if (then_stmt && else_stmt
1550	  && TREE_CODE (then_stmt) == GOTO_EXPR
1551	  && TREE_CODE (else_stmt) == GOTO_EXPR
1552	  && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1553	{
1554	  *stmt_p = then_stmt;
1555	  data->repeat = true;
1556	}
1557
1558      /* If the THEN/ELSE clause merely assigns a value to a variable or
1559	 parameter which is already known to contain that value, then
1560	 remove the useless THEN/ELSE clause.  */
1561      else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1562	{
1563	  if (else_stmt
1564	      && TREE_CODE (else_stmt) == MODIFY_EXPR
1565	      && TREE_OPERAND (else_stmt, 0) == cond
1566	      && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1567	    COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1568	}
1569      else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1570	       && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1571		   || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1572	       && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1573	{
1574	  tree stmt = (TREE_CODE (cond) == EQ_EXPR
1575		       ? then_stmt : else_stmt);
1576	  tree *location = (TREE_CODE (cond) == EQ_EXPR
1577			    ? &COND_EXPR_THEN (*stmt_p)
1578			    : &COND_EXPR_ELSE (*stmt_p));
1579
1580	  if (stmt
1581	      && TREE_CODE (stmt) == MODIFY_EXPR
1582	      && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1583	      && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1584	    *location = alloc_stmt_list ();
1585	}
1586    }
1587
1588  /* Protect GOTOs in the arm of COND_EXPRs from being removed.  They
1589     would be re-introduced during lowering.  */
1590  data->last_goto = NULL;
1591}
1592
1593
1594static void
1595remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1596{
1597  bool save_may_branch, save_may_throw;
1598  bool this_may_branch, this_may_throw;
1599
1600  /* Collect may_branch and may_throw information for the body only.  */
1601  save_may_branch = data->may_branch;
1602  save_may_throw = data->may_throw;
1603  data->may_branch = false;
1604  data->may_throw = false;
1605  data->last_goto = NULL;
1606
1607  remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1608
1609  this_may_branch = data->may_branch;
1610  this_may_throw = data->may_throw;
1611  data->may_branch |= save_may_branch;
1612  data->may_throw |= save_may_throw;
1613  data->last_goto = NULL;
1614
1615  remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1616
1617  /* If the body is empty, then we can emit the FINALLY block without
1618     the enclosing TRY_FINALLY_EXPR.  */
1619  if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1620    {
1621      *stmt_p = TREE_OPERAND (*stmt_p, 1);
1622      data->repeat = true;
1623    }
1624
1625  /* If the handler is empty, then we can emit the TRY block without
1626     the enclosing TRY_FINALLY_EXPR.  */
1627  else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1628    {
1629      *stmt_p = TREE_OPERAND (*stmt_p, 0);
1630      data->repeat = true;
1631    }
1632
1633  /* If the body neither throws, nor branches, then we can safely
1634     string the TRY and FINALLY blocks together.  */
1635  else if (!this_may_branch && !this_may_throw)
1636    {
1637      tree stmt = *stmt_p;
1638      *stmt_p = TREE_OPERAND (stmt, 0);
1639      append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1640      data->repeat = true;
1641    }
1642}
1643
1644
1645static void
1646remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1647{
1648  bool save_may_throw, this_may_throw;
1649  tree_stmt_iterator i;
1650  tree stmt;
1651
1652  /* Collect may_throw information for the body only.  */
1653  save_may_throw = data->may_throw;
1654  data->may_throw = false;
1655  data->last_goto = NULL;
1656
1657  remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1658
1659  this_may_throw = data->may_throw;
1660  data->may_throw = save_may_throw;
1661
1662  /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR.  */
1663  if (!this_may_throw)
1664    {
1665      if (warn_notreached)
1666	remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1667      *stmt_p = TREE_OPERAND (*stmt_p, 0);
1668      data->repeat = true;
1669      return;
1670    }
1671
1672  /* Process the catch clause specially.  We may be able to tell that
1673     no exceptions propagate past this point.  */
1674
1675  this_may_throw = true;
1676  i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1677  stmt = tsi_stmt (i);
1678  data->last_goto = NULL;
1679
1680  switch (TREE_CODE (stmt))
1681    {
1682    case CATCH_EXPR:
1683      for (; !tsi_end_p (i); tsi_next (&i))
1684	{
1685	  stmt = tsi_stmt (i);
1686	  /* If we catch all exceptions, then the body does not
1687	     propagate exceptions past this point.  */
1688	  if (CATCH_TYPES (stmt) == NULL)
1689	    this_may_throw = false;
1690	  data->last_goto = NULL;
1691	  remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1692	}
1693      break;
1694
1695    case EH_FILTER_EXPR:
1696      if (EH_FILTER_MUST_NOT_THROW (stmt))
1697	this_may_throw = false;
1698      else if (EH_FILTER_TYPES (stmt) == NULL)
1699	this_may_throw = false;
1700      remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1701      break;
1702
1703    default:
1704      /* Otherwise this is a cleanup.  */
1705      remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1706
1707      /* If the cleanup is empty, then we can emit the TRY block without
1708	 the enclosing TRY_CATCH_EXPR.  */
1709      if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1710	{
1711	  *stmt_p = TREE_OPERAND (*stmt_p, 0);
1712	  data->repeat = true;
1713	}
1714      break;
1715    }
1716  data->may_throw |= this_may_throw;
1717}
1718
1719
1720static void
1721remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1722{
1723  tree block;
1724
1725  /* First remove anything underneath the BIND_EXPR.  */
1726  remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1727
1728  /* If the BIND_EXPR has no variables, then we can pull everything
1729     up one level and remove the BIND_EXPR, unless this is the toplevel
1730     BIND_EXPR for the current function or an inlined function.
1731
1732     When this situation occurs we will want to apply this
1733     optimization again.  */
1734  block = BIND_EXPR_BLOCK (*stmt_p);
1735  if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1736      && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1737      && (! block
1738	  || ! BLOCK_ABSTRACT_ORIGIN (block)
1739	  || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1740	      != FUNCTION_DECL)))
1741    {
1742      *stmt_p = BIND_EXPR_BODY (*stmt_p);
1743      data->repeat = true;
1744    }
1745}
1746
1747
1748static void
1749remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1750{
1751  tree dest = GOTO_DESTINATION (*stmt_p);
1752
1753  data->may_branch = true;
1754  data->last_goto = NULL;
1755
1756  /* Record the last goto expr, so that we can delete it if unnecessary.  */
1757  if (TREE_CODE (dest) == LABEL_DECL)
1758    data->last_goto = stmt_p;
1759}
1760
1761
1762static void
1763remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1764{
1765  tree label = LABEL_EXPR_LABEL (*stmt_p);
1766
1767  data->has_label = true;
1768
1769  /* We do want to jump across non-local label receiver code.  */
1770  if (DECL_NONLOCAL (label))
1771    data->last_goto = NULL;
1772
1773  else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1774    {
1775      *data->last_goto = build_empty_stmt ();
1776      data->repeat = true;
1777    }
1778
1779  /* ??? Add something here to delete unused labels.  */
1780}
1781
1782
1783/* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1784   decl.  This allows us to eliminate redundant or useless
1785   calls to "const" functions.
1786
1787   Gimplifier already does the same operation, but we may notice functions
1788   being const and pure once their calls has been gimplified, so we need
1789   to update the flag.  */
1790
1791static void
1792update_call_expr_flags (tree call)
1793{
1794  tree decl = get_callee_fndecl (call);
1795  if (!decl)
1796    return;
1797  if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1798    TREE_SIDE_EFFECTS (call) = 0;
1799  if (TREE_NOTHROW (decl))
1800    TREE_NOTHROW (call) = 1;
1801}
1802
1803
1804/* T is CALL_EXPR.  Set current_function_calls_* flags.  */
1805
1806void
1807notice_special_calls (tree t)
1808{
1809  int flags = call_expr_flags (t);
1810
1811  if (flags & ECF_MAY_BE_ALLOCA)
1812    current_function_calls_alloca = true;
1813  if (flags & ECF_RETURNS_TWICE)
1814    current_function_calls_setjmp = true;
1815}
1816
1817
1818/* Clear flags set by notice_special_calls.  Used by dead code removal
1819   to update the flags.  */
1820
1821void
1822clear_special_calls (void)
1823{
1824  current_function_calls_alloca = false;
1825  current_function_calls_setjmp = false;
1826}
1827
1828
1829static void
1830remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1831{
1832  tree t = *tp, op;
1833
1834  switch (TREE_CODE (t))
1835    {
1836    case COND_EXPR:
1837      remove_useless_stmts_cond (tp, data);
1838      break;
1839
1840    case TRY_FINALLY_EXPR:
1841      remove_useless_stmts_tf (tp, data);
1842      break;
1843
1844    case TRY_CATCH_EXPR:
1845      remove_useless_stmts_tc (tp, data);
1846      break;
1847
1848    case BIND_EXPR:
1849      remove_useless_stmts_bind (tp, data);
1850      break;
1851
1852    case GOTO_EXPR:
1853      remove_useless_stmts_goto (tp, data);
1854      break;
1855
1856    case LABEL_EXPR:
1857      remove_useless_stmts_label (tp, data);
1858      break;
1859
1860    case RETURN_EXPR:
1861      fold_stmt (tp);
1862      data->last_goto = NULL;
1863      data->may_branch = true;
1864      break;
1865
1866    case CALL_EXPR:
1867      fold_stmt (tp);
1868      data->last_goto = NULL;
1869      notice_special_calls (t);
1870      update_call_expr_flags (t);
1871      if (tree_could_throw_p (t))
1872	data->may_throw = true;
1873      break;
1874
1875    case MODIFY_EXPR:
1876      data->last_goto = NULL;
1877      fold_stmt (tp);
1878      op = get_call_expr_in (t);
1879      if (op)
1880	{
1881	  update_call_expr_flags (op);
1882	  notice_special_calls (op);
1883	}
1884      if (tree_could_throw_p (t))
1885	data->may_throw = true;
1886      break;
1887
1888    case STATEMENT_LIST:
1889      {
1890	tree_stmt_iterator i = tsi_start (t);
1891	while (!tsi_end_p (i))
1892	  {
1893	    t = tsi_stmt (i);
1894	    if (IS_EMPTY_STMT (t))
1895	      {
1896		tsi_delink (&i);
1897		continue;
1898	      }
1899
1900	    remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1901
1902	    t = tsi_stmt (i);
1903	    if (TREE_CODE (t) == STATEMENT_LIST)
1904	      {
1905		tsi_link_before (&i, t, TSI_SAME_STMT);
1906		tsi_delink (&i);
1907	      }
1908	    else
1909	      tsi_next (&i);
1910	  }
1911      }
1912      break;
1913    case ASM_EXPR:
1914      fold_stmt (tp);
1915      data->last_goto = NULL;
1916      break;
1917
1918    default:
1919      data->last_goto = NULL;
1920      break;
1921    }
1922}
1923
1924static void
1925remove_useless_stmts (void)
1926{
1927  struct rus_data data;
1928
1929  clear_special_calls ();
1930
1931  do
1932    {
1933      memset (&data, 0, sizeof (data));
1934      remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1935    }
1936  while (data.repeat);
1937}
1938
1939
1940struct tree_opt_pass pass_remove_useless_stmts =
1941{
1942  "useless",				/* name */
1943  NULL,					/* gate */
1944  remove_useless_stmts,			/* execute */
1945  NULL,					/* sub */
1946  NULL,					/* next */
1947  0,					/* static_pass_number */
1948  0,					/* tv_id */
1949  PROP_gimple_any,			/* properties_required */
1950  0,					/* properties_provided */
1951  0,					/* properties_destroyed */
1952  0,					/* todo_flags_start */
1953  TODO_dump_func,			/* todo_flags_finish */
1954  0					/* letter */
1955};
1956
1957/* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
1958
1959static void
1960remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1961{
1962  tree phi;
1963
1964  /* Since this block is no longer reachable, we can just delete all
1965     of its PHI nodes.  */
1966  phi = phi_nodes (bb);
1967  while (phi)
1968    {
1969      tree next = PHI_CHAIN (phi);
1970      remove_phi_node (phi, NULL_TREE);
1971      phi = next;
1972    }
1973
1974  /* Remove edges to BB's successors.  */
1975  while (EDGE_COUNT (bb->succs) > 0)
1976    remove_edge (EDGE_SUCC (bb, 0));
1977}
1978
1979
1980/* Remove statements of basic block BB.  */
1981
1982static void
1983remove_bb (basic_block bb)
1984{
1985  block_stmt_iterator i;
1986#ifdef USE_MAPPED_LOCATION
1987  source_location loc = UNKNOWN_LOCATION;
1988#else
1989  source_locus loc = 0;
1990#endif
1991
1992  if (dump_file)
1993    {
1994      fprintf (dump_file, "Removing basic block %d\n", bb->index);
1995      if (dump_flags & TDF_DETAILS)
1996	{
1997	  dump_bb (bb, dump_file, 0);
1998	  fprintf (dump_file, "\n");
1999	}
2000    }
2001
2002  /* If we remove the header or the latch of a loop, mark the loop for
2003     removal by setting its header and latch to NULL.  */
2004  if (current_loops)
2005    {
2006      struct loop *loop = bb->loop_father;
2007
2008      if (loop->latch == bb
2009	  || loop->header == bb)
2010	{
2011	  loop->latch = NULL;
2012	  loop->header = NULL;
2013
2014	  /* Also clean up the information associated with the loop.  Updating
2015	     it would waste time. More importantly, it may refer to ssa
2016	     names that were defined in other removed basic block -- these
2017	     ssa names are now removed and invalid.  */
2018	  free_numbers_of_iterations_estimates_loop (loop);
2019	}
2020    }
2021
2022  /* Remove all the instructions in the block.  */
2023  for (i = bsi_start (bb); !bsi_end_p (i);)
2024    {
2025      tree stmt = bsi_stmt (i);
2026      if (TREE_CODE (stmt) == LABEL_EXPR
2027          && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2028	      || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2029	{
2030	  basic_block new_bb;
2031	  block_stmt_iterator new_bsi;
2032
2033	  /* A non-reachable non-local label may still be referenced.
2034	     But it no longer needs to carry the extra semantics of
2035	     non-locality.  */
2036	  if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2037	    {
2038	      DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2039	      FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2040	    }
2041
2042	  new_bb = bb->prev_bb;
2043	  new_bsi = bsi_start (new_bb);
2044	  bsi_remove (&i);
2045	  bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2046	}
2047      else
2048        {
2049	  /* Release SSA definitions if we are in SSA.  Note that we
2050	     may be called when not in SSA.  For example,
2051	     final_cleanup calls this function via
2052	     cleanup_tree_cfg.  */
2053	  if (in_ssa_p)
2054	    release_defs (stmt);
2055
2056	  bsi_remove (&i);
2057	}
2058
2059      /* Don't warn for removed gotos.  Gotos are often removed due to
2060	 jump threading, thus resulting in bogus warnings.  Not great,
2061	 since this way we lose warnings for gotos in the original
2062	 program that are indeed unreachable.  */
2063      if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2064	{
2065#ifdef USE_MAPPED_LOCATION
2066	  if (EXPR_HAS_LOCATION (stmt))
2067	    loc = EXPR_LOCATION (stmt);
2068#else
2069	  source_locus t;
2070	  t = EXPR_LOCUS (stmt);
2071	  if (t && LOCATION_LINE (*t) > 0)
2072	    loc = t;
2073#endif
2074	}
2075    }
2076
2077  /* If requested, give a warning that the first statement in the
2078     block is unreachable.  We walk statements backwards in the
2079     loop above, so the last statement we process is the first statement
2080     in the block.  */
2081#ifdef USE_MAPPED_LOCATION
2082  if (loc > BUILTINS_LOCATION)
2083    warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2084#else
2085  if (loc)
2086    warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2087#endif
2088
2089  remove_phi_nodes_and_edges_for_unreachable_block (bb);
2090}
2091
2092
2093/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2094   predicate VAL, return the edge that will be taken out of the block.
2095   If VAL does not match a unique edge, NULL is returned.  */
2096
2097edge
2098find_taken_edge (basic_block bb, tree val)
2099{
2100  tree stmt;
2101
2102  stmt = last_stmt (bb);
2103
2104  gcc_assert (stmt);
2105  gcc_assert (is_ctrl_stmt (stmt));
2106  gcc_assert (val);
2107
2108  if (! is_gimple_min_invariant (val))
2109    return NULL;
2110
2111  if (TREE_CODE (stmt) == COND_EXPR)
2112    return find_taken_edge_cond_expr (bb, val);
2113
2114  if (TREE_CODE (stmt) == SWITCH_EXPR)
2115    return find_taken_edge_switch_expr (bb, val);
2116
2117  if (computed_goto_p (stmt))
2118    {
2119      /* Only optimize if the argument is a label, if the argument is
2120	 not a label then we can not construct a proper CFG.
2121
2122         It may be the case that we only need to allow the LABEL_REF to
2123         appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2124         appear inside a LABEL_EXPR just to be safe.  */
2125      if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2126	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2127	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2128      return NULL;
2129    }
2130
2131  gcc_unreachable ();
2132}
2133
2134/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2135   statement, determine which of the outgoing edges will be taken out of the
2136   block.  Return NULL if either edge may be taken.  */
2137
2138static edge
2139find_taken_edge_computed_goto (basic_block bb, tree val)
2140{
2141  basic_block dest;
2142  edge e = NULL;
2143
2144  dest = label_to_block (val);
2145  if (dest)
2146    {
2147      e = find_edge (bb, dest);
2148      gcc_assert (e != NULL);
2149    }
2150
2151  return e;
2152}
2153
2154/* Given a constant value VAL and the entry block BB to a COND_EXPR
2155   statement, determine which of the two edges will be taken out of the
2156   block.  Return NULL if either edge may be taken.  */
2157
2158static edge
2159find_taken_edge_cond_expr (basic_block bb, tree val)
2160{
2161  edge true_edge, false_edge;
2162
2163  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2164
2165  gcc_assert (TREE_CODE (val) == INTEGER_CST);
2166  return (zero_p (val) ? false_edge : true_edge);
2167}
2168
2169/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2170   statement, determine which edge will be taken out of the block.  Return
2171   NULL if any edge may be taken.  */
2172
2173static edge
2174find_taken_edge_switch_expr (basic_block bb, tree val)
2175{
2176  tree switch_expr, taken_case;
2177  basic_block dest_bb;
2178  edge e;
2179
2180  switch_expr = last_stmt (bb);
2181  taken_case = find_case_label_for_value (switch_expr, val);
2182  dest_bb = label_to_block (CASE_LABEL (taken_case));
2183
2184  e = find_edge (bb, dest_bb);
2185  gcc_assert (e);
2186  return e;
2187}
2188
2189
2190/* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2191   We can make optimal use here of the fact that the case labels are
2192   sorted: We can do a binary search for a case matching VAL.  */
2193
2194static tree
2195find_case_label_for_value (tree switch_expr, tree val)
2196{
2197  tree vec = SWITCH_LABELS (switch_expr);
2198  size_t low, high, n = TREE_VEC_LENGTH (vec);
2199  tree default_case = TREE_VEC_ELT (vec, n - 1);
2200
2201  for (low = -1, high = n - 1; high - low > 1; )
2202    {
2203      size_t i = (high + low) / 2;
2204      tree t = TREE_VEC_ELT (vec, i);
2205      int cmp;
2206
2207      /* Cache the result of comparing CASE_LOW and val.  */
2208      cmp = tree_int_cst_compare (CASE_LOW (t), val);
2209
2210      if (cmp > 0)
2211	high = i;
2212      else
2213	low = i;
2214
2215      if (CASE_HIGH (t) == NULL)
2216	{
2217	  /* A singe-valued case label.  */
2218	  if (cmp == 0)
2219	    return t;
2220	}
2221      else
2222	{
2223	  /* A case range.  We can only handle integer ranges.  */
2224	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2225	    return t;
2226	}
2227    }
2228
2229  return default_case;
2230}
2231
2232
2233
2234
2235/*---------------------------------------------------------------------------
2236			      Debugging functions
2237---------------------------------------------------------------------------*/
2238
2239/* Dump tree-specific information of block BB to file OUTF.  */
2240
2241void
2242tree_dump_bb (basic_block bb, FILE *outf, int indent)
2243{
2244  dump_generic_bb (outf, bb, indent, TDF_VOPS);
2245}
2246
2247
2248/* Dump a basic block on stderr.  */
2249
2250void
2251debug_tree_bb (basic_block bb)
2252{
2253  dump_bb (bb, stderr, 0);
2254}
2255
2256
2257/* Dump basic block with index N on stderr.  */
2258
2259basic_block
2260debug_tree_bb_n (int n)
2261{
2262  debug_tree_bb (BASIC_BLOCK (n));
2263  return BASIC_BLOCK (n);
2264}
2265
2266
2267/* Dump the CFG on stderr.
2268
2269   FLAGS are the same used by the tree dumping functions
2270   (see TDF_* in tree.h).  */
2271
2272void
2273debug_tree_cfg (int flags)
2274{
2275  dump_tree_cfg (stderr, flags);
2276}
2277
2278
2279/* Dump the program showing basic block boundaries on the given FILE.
2280
2281   FLAGS are the same used by the tree dumping functions (see TDF_* in
2282   tree.h).  */
2283
2284void
2285dump_tree_cfg (FILE *file, int flags)
2286{
2287  if (flags & TDF_DETAILS)
2288    {
2289      const char *funcname
2290	= lang_hooks.decl_printable_name (current_function_decl, 2);
2291
2292      fputc ('\n', file);
2293      fprintf (file, ";; Function %s\n\n", funcname);
2294      fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2295	       n_basic_blocks, n_edges, last_basic_block);
2296
2297      brief_dump_cfg (file);
2298      fprintf (file, "\n");
2299    }
2300
2301  if (flags & TDF_STATS)
2302    dump_cfg_stats (file);
2303
2304  dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2305}
2306
2307
2308/* Dump CFG statistics on FILE.  */
2309
2310void
2311dump_cfg_stats (FILE *file)
2312{
2313  static long max_num_merged_labels = 0;
2314  unsigned long size, total = 0;
2315  long num_edges;
2316  basic_block bb;
2317  const char * const fmt_str   = "%-30s%-13s%12s\n";
2318  const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2319  const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2320  const char * const fmt_str_3 = "%-43s%11lu%c\n";
2321  const char *funcname
2322    = lang_hooks.decl_printable_name (current_function_decl, 2);
2323
2324
2325  fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2326
2327  fprintf (file, "---------------------------------------------------------\n");
2328  fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2329  fprintf (file, fmt_str, "", "  instances  ", "used ");
2330  fprintf (file, "---------------------------------------------------------\n");
2331
2332  size = n_basic_blocks * sizeof (struct basic_block_def);
2333  total += size;
2334  fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2335	   SCALE (size), LABEL (size));
2336
2337  num_edges = 0;
2338  FOR_EACH_BB (bb)
2339    num_edges += EDGE_COUNT (bb->succs);
2340  size = num_edges * sizeof (struct edge_def);
2341  total += size;
2342  fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2343
2344  fprintf (file, "---------------------------------------------------------\n");
2345  fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2346	   LABEL (total));
2347  fprintf (file, "---------------------------------------------------------\n");
2348  fprintf (file, "\n");
2349
2350  if (cfg_stats.num_merged_labels > max_num_merged_labels)
2351    max_num_merged_labels = cfg_stats.num_merged_labels;
2352
2353  fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2354	   cfg_stats.num_merged_labels, max_num_merged_labels);
2355
2356  fprintf (file, "\n");
2357}
2358
2359
2360/* Dump CFG statistics on stderr.  Keep extern so that it's always
2361   linked in the final executable.  */
2362
2363void
2364debug_cfg_stats (void)
2365{
2366  dump_cfg_stats (stderr);
2367}
2368
2369
2370/* Dump the flowgraph to a .vcg FILE.  */
2371
2372static void
2373tree_cfg2vcg (FILE *file)
2374{
2375  edge e;
2376  edge_iterator ei;
2377  basic_block bb;
2378  const char *funcname
2379    = lang_hooks.decl_printable_name (current_function_decl, 2);
2380
2381  /* Write the file header.  */
2382  fprintf (file, "graph: { title: \"%s\"\n", funcname);
2383  fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2384  fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2385
2386  /* Write blocks and edges.  */
2387  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2388    {
2389      fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2390	       e->dest->index);
2391
2392      if (e->flags & EDGE_FAKE)
2393	fprintf (file, " linestyle: dotted priority: 10");
2394      else
2395	fprintf (file, " linestyle: solid priority: 100");
2396
2397      fprintf (file, " }\n");
2398    }
2399  fputc ('\n', file);
2400
2401  FOR_EACH_BB (bb)
2402    {
2403      enum tree_code head_code, end_code;
2404      const char *head_name, *end_name;
2405      int head_line = 0;
2406      int end_line = 0;
2407      tree first = first_stmt (bb);
2408      tree last = last_stmt (bb);
2409
2410      if (first)
2411	{
2412	  head_code = TREE_CODE (first);
2413	  head_name = tree_code_name[head_code];
2414	  head_line = get_lineno (first);
2415	}
2416      else
2417	head_name = "no-statement";
2418
2419      if (last)
2420	{
2421	  end_code = TREE_CODE (last);
2422	  end_name = tree_code_name[end_code];
2423	  end_line = get_lineno (last);
2424	}
2425      else
2426	end_name = "no-statement";
2427
2428      fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2429	       bb->index, bb->index, head_name, head_line, end_name,
2430	       end_line);
2431
2432      FOR_EACH_EDGE (e, ei, bb->succs)
2433	{
2434	  if (e->dest == EXIT_BLOCK_PTR)
2435	    fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2436	  else
2437	    fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2438
2439	  if (e->flags & EDGE_FAKE)
2440	    fprintf (file, " priority: 10 linestyle: dotted");
2441	  else
2442	    fprintf (file, " priority: 100 linestyle: solid");
2443
2444	  fprintf (file, " }\n");
2445	}
2446
2447      if (bb->next_bb != EXIT_BLOCK_PTR)
2448	fputc ('\n', file);
2449    }
2450
2451  fputs ("}\n\n", file);
2452}
2453
2454
2455
2456/*---------------------------------------------------------------------------
2457			     Miscellaneous helpers
2458---------------------------------------------------------------------------*/
2459
2460/* Return true if T represents a stmt that always transfers control.  */
2461
2462bool
2463is_ctrl_stmt (tree t)
2464{
2465  return (TREE_CODE (t) == COND_EXPR
2466	  || TREE_CODE (t) == SWITCH_EXPR
2467	  || TREE_CODE (t) == GOTO_EXPR
2468	  || TREE_CODE (t) == RETURN_EXPR
2469	  || TREE_CODE (t) == RESX_EXPR);
2470}
2471
2472
2473/* Return true if T is a statement that may alter the flow of control
2474   (e.g., a call to a non-returning function).  */
2475
2476bool
2477is_ctrl_altering_stmt (tree t)
2478{
2479  tree call;
2480
2481  gcc_assert (t);
2482  call = get_call_expr_in (t);
2483  if (call)
2484    {
2485      /* A non-pure/const CALL_EXPR alters flow control if the current
2486	 function has nonlocal labels.  */
2487      if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2488	return true;
2489
2490      /* A CALL_EXPR also alters control flow if it does not return.  */
2491      if (call_expr_flags (call) & ECF_NORETURN)
2492	return true;
2493    }
2494
2495  /* If a statement can throw, it alters control flow.  */
2496  return tree_can_throw_internal (t);
2497}
2498
2499
2500/* Return true if T is a computed goto.  */
2501
2502bool
2503computed_goto_p (tree t)
2504{
2505  return (TREE_CODE (t) == GOTO_EXPR
2506	  && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2507}
2508
2509
2510/* Checks whether EXPR is a simple local goto.  */
2511
2512bool
2513simple_goto_p (tree expr)
2514{
2515  return (TREE_CODE (expr) == GOTO_EXPR
2516	  && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2517}
2518
2519
2520/* Return true if T should start a new basic block.  PREV_T is the
2521   statement preceding T.  It is used when T is a label or a case label.
2522   Labels should only start a new basic block if their previous statement
2523   wasn't a label.  Otherwise, sequence of labels would generate
2524   unnecessary basic blocks that only contain a single label.  */
2525
2526static inline bool
2527stmt_starts_bb_p (tree t, tree prev_t)
2528{
2529  if (t == NULL_TREE)
2530    return false;
2531
2532  /* LABEL_EXPRs start a new basic block only if the preceding
2533     statement wasn't a label of the same type.  This prevents the
2534     creation of consecutive blocks that have nothing but a single
2535     label.  */
2536  if (TREE_CODE (t) == LABEL_EXPR)
2537    {
2538      /* Nonlocal and computed GOTO targets always start a new block.  */
2539      if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2540	  || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2541	return true;
2542
2543      if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2544	{
2545	  if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2546	    return true;
2547
2548	  cfg_stats.num_merged_labels++;
2549	  return false;
2550	}
2551      else
2552	return true;
2553    }
2554
2555  return false;
2556}
2557
2558
2559/* Return true if T should end a basic block.  */
2560
2561bool
2562stmt_ends_bb_p (tree t)
2563{
2564  return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2565}
2566
2567
2568/* Add gotos that used to be represented implicitly in the CFG.  */
2569
2570void
2571disband_implicit_edges (void)
2572{
2573  basic_block bb;
2574  block_stmt_iterator last;
2575  edge e;
2576  edge_iterator ei;
2577  tree stmt, label;
2578
2579  FOR_EACH_BB (bb)
2580    {
2581      last = bsi_last (bb);
2582      stmt = last_stmt (bb);
2583
2584      if (stmt && TREE_CODE (stmt) == COND_EXPR)
2585	{
2586	  /* Remove superfluous gotos from COND_EXPR branches.  Moved
2587	     from cfg_remove_useless_stmts here since it violates the
2588	     invariants for tree--cfg correspondence and thus fits better
2589	     here where we do it anyway.  */
2590	  e = find_edge (bb, bb->next_bb);
2591	  if (e)
2592	    {
2593	      if (e->flags & EDGE_TRUE_VALUE)
2594		COND_EXPR_THEN (stmt) = build_empty_stmt ();
2595	      else if (e->flags & EDGE_FALSE_VALUE)
2596		COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2597	      else
2598		gcc_unreachable ();
2599	      e->flags |= EDGE_FALLTHRU;
2600	    }
2601
2602	  continue;
2603	}
2604
2605      if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2606	{
2607	  /* Remove the RETURN_EXPR if we may fall though to the exit
2608	     instead.  */
2609	  gcc_assert (single_succ_p (bb));
2610	  gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2611
2612	  if (bb->next_bb == EXIT_BLOCK_PTR
2613	      && !TREE_OPERAND (stmt, 0))
2614	    {
2615	      bsi_remove (&last);
2616	      single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2617	    }
2618	  continue;
2619	}
2620
2621      /* There can be no fallthru edge if the last statement is a control
2622	 one.  */
2623      if (stmt && is_ctrl_stmt (stmt))
2624	continue;
2625
2626      /* Find a fallthru edge and emit the goto if necessary.  */
2627      FOR_EACH_EDGE (e, ei, bb->succs)
2628	if (e->flags & EDGE_FALLTHRU)
2629	  break;
2630
2631      if (!e || e->dest == bb->next_bb)
2632	continue;
2633
2634      gcc_assert (e->dest != EXIT_BLOCK_PTR);
2635      label = tree_block_label (e->dest);
2636
2637      stmt = build1 (GOTO_EXPR, void_type_node, label);
2638#ifdef USE_MAPPED_LOCATION
2639      SET_EXPR_LOCATION (stmt, e->goto_locus);
2640#else
2641      SET_EXPR_LOCUS (stmt, e->goto_locus);
2642#endif
2643      bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2644      e->flags &= ~EDGE_FALLTHRU;
2645    }
2646}
2647
2648/* Remove block annotations and other datastructures.  */
2649
2650void
2651delete_tree_cfg_annotations (void)
2652{
2653  label_to_block_map = NULL;
2654}
2655
2656
2657/* Return the first statement in basic block BB.  */
2658
2659tree
2660first_stmt (basic_block bb)
2661{
2662  block_stmt_iterator i = bsi_start (bb);
2663  return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2664}
2665
2666
2667/* Return the last statement in basic block BB.  */
2668
2669tree
2670last_stmt (basic_block bb)
2671{
2672  block_stmt_iterator b = bsi_last (bb);
2673  return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2674}
2675
2676
2677/* Return a pointer to the last statement in block BB.  */
2678
2679tree *
2680last_stmt_ptr (basic_block bb)
2681{
2682  block_stmt_iterator last = bsi_last (bb);
2683  return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2684}
2685
2686
2687/* Return the last statement of an otherwise empty block.  Return NULL
2688   if the block is totally empty, or if it contains more than one
2689   statement.  */
2690
2691tree
2692last_and_only_stmt (basic_block bb)
2693{
2694  block_stmt_iterator i = bsi_last (bb);
2695  tree last, prev;
2696
2697  if (bsi_end_p (i))
2698    return NULL_TREE;
2699
2700  last = bsi_stmt (i);
2701  bsi_prev (&i);
2702  if (bsi_end_p (i))
2703    return last;
2704
2705  /* Empty statements should no longer appear in the instruction stream.
2706     Everything that might have appeared before should be deleted by
2707     remove_useless_stmts, and the optimizers should just bsi_remove
2708     instead of smashing with build_empty_stmt.
2709
2710     Thus the only thing that should appear here in a block containing
2711     one executable statement is a label.  */
2712  prev = bsi_stmt (i);
2713  if (TREE_CODE (prev) == LABEL_EXPR)
2714    return last;
2715  else
2716    return NULL_TREE;
2717}
2718
2719
2720/* Mark BB as the basic block holding statement T.  */
2721
2722void
2723set_bb_for_stmt (tree t, basic_block bb)
2724{
2725  if (TREE_CODE (t) == PHI_NODE)
2726    PHI_BB (t) = bb;
2727  else if (TREE_CODE (t) == STATEMENT_LIST)
2728    {
2729      tree_stmt_iterator i;
2730      for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2731	set_bb_for_stmt (tsi_stmt (i), bb);
2732    }
2733  else
2734    {
2735      stmt_ann_t ann = get_stmt_ann (t);
2736      ann->bb = bb;
2737
2738      /* If the statement is a label, add the label to block-to-labels map
2739	 so that we can speed up edge creation for GOTO_EXPRs.  */
2740      if (TREE_CODE (t) == LABEL_EXPR)
2741	{
2742	  int uid;
2743
2744	  t = LABEL_EXPR_LABEL (t);
2745	  uid = LABEL_DECL_UID (t);
2746	  if (uid == -1)
2747	    {
2748	      LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2749	      if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2750		VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2751	    }
2752	  else
2753	    /* We're moving an existing label.  Make sure that we've
2754		removed it from the old block.  */
2755	    gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2756	  VARRAY_BB (label_to_block_map, uid) = bb;
2757	}
2758    }
2759}
2760
2761/* Finds iterator for STMT.  */
2762
2763extern block_stmt_iterator
2764bsi_for_stmt (tree stmt)
2765{
2766  block_stmt_iterator bsi;
2767
2768  for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2769    if (bsi_stmt (bsi) == stmt)
2770      return bsi;
2771
2772  gcc_unreachable ();
2773}
2774
2775/* Mark statement T as modified, and update it.  */
2776static inline void
2777update_modified_stmts (tree t)
2778{
2779  if (TREE_CODE (t) == STATEMENT_LIST)
2780    {
2781      tree_stmt_iterator i;
2782      tree stmt;
2783      for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2784        {
2785	  stmt = tsi_stmt (i);
2786	  update_stmt_if_modified (stmt);
2787	}
2788    }
2789  else
2790    update_stmt_if_modified (t);
2791}
2792
2793/* Insert statement (or statement list) T before the statement
2794   pointed-to by iterator I.  M specifies how to update iterator I
2795   after insertion (see enum bsi_iterator_update).  */
2796
2797void
2798bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2799{
2800  set_bb_for_stmt (t, i->bb);
2801  update_modified_stmts (t);
2802  tsi_link_before (&i->tsi, t, m);
2803}
2804
2805
2806/* Insert statement (or statement list) T after the statement
2807   pointed-to by iterator I.  M specifies how to update iterator I
2808   after insertion (see enum bsi_iterator_update).  */
2809
2810void
2811bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2812{
2813  set_bb_for_stmt (t, i->bb);
2814  update_modified_stmts (t);
2815  tsi_link_after (&i->tsi, t, m);
2816}
2817
2818
2819/* Remove the statement pointed to by iterator I.  The iterator is updated
2820   to the next statement.  */
2821
2822void
2823bsi_remove (block_stmt_iterator *i)
2824{
2825  tree t = bsi_stmt (*i);
2826  set_bb_for_stmt (t, NULL);
2827  delink_stmt_imm_use (t);
2828  tsi_delink (&i->tsi);
2829  mark_stmt_modified (t);
2830}
2831
2832
2833/* Move the statement at FROM so it comes right after the statement at TO.  */
2834
2835void
2836bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2837{
2838  tree stmt = bsi_stmt (*from);
2839  bsi_remove (from);
2840  bsi_insert_after (to, stmt, BSI_SAME_STMT);
2841}
2842
2843
2844/* Move the statement at FROM so it comes right before the statement at TO.  */
2845
2846void
2847bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2848{
2849  tree stmt = bsi_stmt (*from);
2850  bsi_remove (from);
2851  bsi_insert_before (to, stmt, BSI_SAME_STMT);
2852}
2853
2854
2855/* Move the statement at FROM to the end of basic block BB.  */
2856
2857void
2858bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2859{
2860  block_stmt_iterator last = bsi_last (bb);
2861
2862  /* Have to check bsi_end_p because it could be an empty block.  */
2863  if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2864    bsi_move_before (from, &last);
2865  else
2866    bsi_move_after (from, &last);
2867}
2868
2869
2870/* Replace the contents of the statement pointed to by iterator BSI
2871   with STMT.  If PRESERVE_EH_INFO is true, the exception handling
2872   information of the original statement is preserved.  */
2873
2874void
2875bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2876{
2877  int eh_region;
2878  tree orig_stmt = bsi_stmt (*bsi);
2879
2880  SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2881  set_bb_for_stmt (stmt, bsi->bb);
2882
2883  /* Preserve EH region information from the original statement, if
2884     requested by the caller.  */
2885  if (preserve_eh_info)
2886    {
2887      eh_region = lookup_stmt_eh_region (orig_stmt);
2888      if (eh_region >= 0)
2889	add_stmt_to_eh_region (stmt, eh_region);
2890    }
2891
2892  delink_stmt_imm_use (orig_stmt);
2893  *bsi_stmt_ptr (*bsi) = stmt;
2894  mark_stmt_modified (stmt);
2895  update_modified_stmts (stmt);
2896}
2897
2898
2899/* Insert the statement pointed-to by BSI into edge E.  Every attempt
2900   is made to place the statement in an existing basic block, but
2901   sometimes that isn't possible.  When it isn't possible, the edge is
2902   split and the statement is added to the new block.
2903
2904   In all cases, the returned *BSI points to the correct location.  The
2905   return value is true if insertion should be done after the location,
2906   or false if it should be done before the location.  If new basic block
2907   has to be created, it is stored in *NEW_BB.  */
2908
2909static bool
2910tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2911			   basic_block *new_bb)
2912{
2913  basic_block dest, src;
2914  tree tmp;
2915
2916  dest = e->dest;
2917 restart:
2918
2919  /* If the destination has one predecessor which has no PHI nodes,
2920     insert there.  Except for the exit block.
2921
2922     The requirement for no PHI nodes could be relaxed.  Basically we
2923     would have to examine the PHIs to prove that none of them used
2924     the value set by the statement we want to insert on E.  That
2925     hardly seems worth the effort.  */
2926  if (single_pred_p (dest)
2927      && ! phi_nodes (dest)
2928      && dest != EXIT_BLOCK_PTR)
2929    {
2930      *bsi = bsi_start (dest);
2931      if (bsi_end_p (*bsi))
2932	return true;
2933
2934      /* Make sure we insert after any leading labels.  */
2935      tmp = bsi_stmt (*bsi);
2936      while (TREE_CODE (tmp) == LABEL_EXPR)
2937	{
2938	  bsi_next (bsi);
2939	  if (bsi_end_p (*bsi))
2940	    break;
2941	  tmp = bsi_stmt (*bsi);
2942	}
2943
2944      if (bsi_end_p (*bsi))
2945	{
2946	  *bsi = bsi_last (dest);
2947	  return true;
2948	}
2949      else
2950	return false;
2951    }
2952
2953  /* If the source has one successor, the edge is not abnormal and
2954     the last statement does not end a basic block, insert there.
2955     Except for the entry block.  */
2956  src = e->src;
2957  if ((e->flags & EDGE_ABNORMAL) == 0
2958      && single_succ_p (src)
2959      && src != ENTRY_BLOCK_PTR)
2960    {
2961      *bsi = bsi_last (src);
2962      if (bsi_end_p (*bsi))
2963	return true;
2964
2965      tmp = bsi_stmt (*bsi);
2966      if (!stmt_ends_bb_p (tmp))
2967	return true;
2968
2969      /* Insert code just before returning the value.  We may need to decompose
2970         the return in the case it contains non-trivial operand.  */
2971      if (TREE_CODE (tmp) == RETURN_EXPR)
2972        {
2973	  tree op = TREE_OPERAND (tmp, 0);
2974	  if (op && !is_gimple_val (op))
2975	    {
2976	      gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
2977	      bsi_insert_before (bsi, op, BSI_NEW_STMT);
2978	      TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
2979	    }
2980	  bsi_prev (bsi);
2981	  return true;
2982        }
2983    }
2984
2985  /* Otherwise, create a new basic block, and split this edge.  */
2986  dest = split_edge (e);
2987  if (new_bb)
2988    *new_bb = dest;
2989  e = single_pred_edge (dest);
2990  goto restart;
2991}
2992
2993
2994/* This routine will commit all pending edge insertions, creating any new
2995   basic blocks which are necessary.  */
2996
2997void
2998bsi_commit_edge_inserts (void)
2999{
3000  basic_block bb;
3001  edge e;
3002  edge_iterator ei;
3003
3004  bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3005
3006  FOR_EACH_BB (bb)
3007    FOR_EACH_EDGE (e, ei, bb->succs)
3008      bsi_commit_one_edge_insert (e, NULL);
3009}
3010
3011
3012/* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3013   to this block, otherwise set it to NULL.  */
3014
3015void
3016bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3017{
3018  if (new_bb)
3019    *new_bb = NULL;
3020  if (PENDING_STMT (e))
3021    {
3022      block_stmt_iterator bsi;
3023      tree stmt = PENDING_STMT (e);
3024
3025      PENDING_STMT (e) = NULL_TREE;
3026
3027      if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3028	bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3029      else
3030	bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3031    }
3032}
3033
3034
3035/* Add STMT to the pending list of edge E.  No actual insertion is
3036   made until a call to bsi_commit_edge_inserts () is made.  */
3037
3038void
3039bsi_insert_on_edge (edge e, tree stmt)
3040{
3041  append_to_statement_list (stmt, &PENDING_STMT (e));
3042}
3043
3044/* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts.  If a new
3045   block has to be created, it is returned.  */
3046
3047basic_block
3048bsi_insert_on_edge_immediate (edge e, tree stmt)
3049{
3050  block_stmt_iterator bsi;
3051  basic_block new_bb = NULL;
3052
3053  gcc_assert (!PENDING_STMT (e));
3054
3055  if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3056    bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3057  else
3058    bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3059
3060  return new_bb;
3061}
3062
3063/*---------------------------------------------------------------------------
3064	     Tree specific functions for CFG manipulation
3065---------------------------------------------------------------------------*/
3066
3067/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
3068
3069static void
3070reinstall_phi_args (edge new_edge, edge old_edge)
3071{
3072  tree var, phi;
3073
3074  if (!PENDING_STMT (old_edge))
3075    return;
3076
3077  for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3078       var && phi;
3079       var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3080    {
3081      tree result = TREE_PURPOSE (var);
3082      tree arg = TREE_VALUE (var);
3083
3084      gcc_assert (result == PHI_RESULT (phi));
3085
3086      add_phi_arg (phi, arg, new_edge);
3087    }
3088
3089  PENDING_STMT (old_edge) = NULL;
3090}
3091
3092/* Returns the basic block after that the new basic block created
3093   by splitting edge EDGE_IN should be placed.  Tries to keep the new block
3094   near its "logical" location.  This is of most help to humans looking
3095   at debugging dumps.  */
3096
3097static basic_block
3098split_edge_bb_loc (edge edge_in)
3099{
3100  basic_block dest = edge_in->dest;
3101
3102  if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3103    return edge_in->src;
3104  else
3105    return dest->prev_bb;
3106}
3107
3108/* Split a (typically critical) edge EDGE_IN.  Return the new block.
3109   Abort on abnormal edges.  */
3110
3111static basic_block
3112tree_split_edge (edge edge_in)
3113{
3114  basic_block new_bb, after_bb, dest, src;
3115  edge new_edge, e;
3116
3117  /* Abnormal edges cannot be split.  */
3118  gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3119
3120  src = edge_in->src;
3121  dest = edge_in->dest;
3122
3123  after_bb = split_edge_bb_loc (edge_in);
3124
3125  new_bb = create_empty_bb (after_bb);
3126  new_bb->frequency = EDGE_FREQUENCY (edge_in);
3127  new_bb->count = edge_in->count;
3128  new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3129  new_edge->probability = REG_BR_PROB_BASE;
3130  new_edge->count = edge_in->count;
3131
3132  e = redirect_edge_and_branch (edge_in, new_bb);
3133  gcc_assert (e);
3134  reinstall_phi_args (new_edge, e);
3135
3136  return new_bb;
3137}
3138
3139
3140/* Return true when BB has label LABEL in it.  */
3141
3142static bool
3143has_label_p (basic_block bb, tree label)
3144{
3145  block_stmt_iterator bsi;
3146
3147  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3148    {
3149      tree stmt = bsi_stmt (bsi);
3150
3151      if (TREE_CODE (stmt) != LABEL_EXPR)
3152	return false;
3153      if (LABEL_EXPR_LABEL (stmt) == label)
3154	return true;
3155    }
3156  return false;
3157}
3158
3159
3160/* Callback for walk_tree, check that all elements with address taken are
3161   properly noticed as such.  The DATA is an int* that is 1 if TP was seen
3162   inside a PHI node.  */
3163
3164static tree
3165verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3166{
3167  tree t = *tp, x;
3168  bool in_phi = (data != NULL);
3169
3170  if (TYPE_P (t))
3171    *walk_subtrees = 0;
3172
3173  /* Check operand N for being valid GIMPLE and give error MSG if not.  */
3174#define CHECK_OP(N, MSG) \
3175  do { if (!is_gimple_val (TREE_OPERAND (t, N)))		\
3176       { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3177
3178  switch (TREE_CODE (t))
3179    {
3180    case SSA_NAME:
3181      if (SSA_NAME_IN_FREE_LIST (t))
3182	{
3183	  error ("SSA name in freelist but still referenced");
3184	  return *tp;
3185	}
3186      break;
3187
3188    case ASSERT_EXPR:
3189      x = fold (ASSERT_EXPR_COND (t));
3190      if (x == boolean_false_node)
3191	{
3192	  error ("ASSERT_EXPR with an always-false condition");
3193	  return *tp;
3194	}
3195      break;
3196
3197    case MODIFY_EXPR:
3198      x = TREE_OPERAND (t, 0);
3199      if (TREE_CODE (x) == BIT_FIELD_REF
3200	  && is_gimple_reg (TREE_OPERAND (x, 0)))
3201	{
3202	  error ("GIMPLE register modified with BIT_FIELD_REF");
3203	  return t;
3204	}
3205      break;
3206
3207    case ADDR_EXPR:
3208      {
3209	bool old_invariant;
3210	bool old_constant;
3211	bool old_side_effects;
3212	bool new_invariant;
3213	bool new_constant;
3214	bool new_side_effects;
3215
3216        /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3217	   dead PHIs that take the address of something.  But if the PHI
3218	   result is dead, the fact that it takes the address of anything
3219	   is irrelevant.  Because we can not tell from here if a PHI result
3220	   is dead, we just skip this check for PHIs altogether.  This means
3221	   we may be missing "valid" checks, but what can you do?
3222	   This was PR19217.  */
3223        if (in_phi)
3224	  break;
3225
3226	old_invariant = TREE_INVARIANT (t);
3227	old_constant = TREE_CONSTANT (t);
3228	old_side_effects = TREE_SIDE_EFFECTS (t);
3229
3230	recompute_tree_invarant_for_addr_expr (t);
3231	new_invariant = TREE_INVARIANT (t);
3232	new_side_effects = TREE_SIDE_EFFECTS (t);
3233	new_constant = TREE_CONSTANT (t);
3234
3235	if (old_invariant != new_invariant)
3236	  {
3237	    error ("invariant not recomputed when ADDR_EXPR changed");
3238	    return t;
3239	  }
3240
3241        if (old_constant != new_constant)
3242	  {
3243	    error ("constant not recomputed when ADDR_EXPR changed");
3244	    return t;
3245	  }
3246	if (old_side_effects != new_side_effects)
3247	  {
3248	    error ("side effects not recomputed when ADDR_EXPR changed");
3249	    return t;
3250	  }
3251
3252	/* Skip any references (they will be checked when we recurse down the
3253	   tree) and ensure that any variable used as a prefix is marked
3254	   addressable.  */
3255	for (x = TREE_OPERAND (t, 0);
3256	     handled_component_p (x);
3257	     x = TREE_OPERAND (x, 0))
3258	  ;
3259
3260	if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3261	  return NULL;
3262	if (!TREE_ADDRESSABLE (x))
3263	  {
3264	    error ("address taken, but ADDRESSABLE bit not set");
3265	    return x;
3266	  }
3267	break;
3268      }
3269
3270    case COND_EXPR:
3271      x = COND_EXPR_COND (t);
3272      if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3273	{
3274	  error ("non-boolean used in condition");
3275	  return x;
3276	}
3277      if (!is_gimple_condexpr (x))
3278        {
3279	  error ("invalid conditional operand");
3280	  return x;
3281	}
3282      break;
3283
3284    case NOP_EXPR:
3285    case CONVERT_EXPR:
3286    case FIX_TRUNC_EXPR:
3287    case FIX_CEIL_EXPR:
3288    case FIX_FLOOR_EXPR:
3289    case FIX_ROUND_EXPR:
3290    case FLOAT_EXPR:
3291    case NEGATE_EXPR:
3292    case ABS_EXPR:
3293    case BIT_NOT_EXPR:
3294    case NON_LVALUE_EXPR:
3295    case TRUTH_NOT_EXPR:
3296      CHECK_OP (0, "invalid operand to unary operator");
3297      break;
3298
3299    case REALPART_EXPR:
3300    case IMAGPART_EXPR:
3301    case COMPONENT_REF:
3302    case ARRAY_REF:
3303    case ARRAY_RANGE_REF:
3304    case BIT_FIELD_REF:
3305    case VIEW_CONVERT_EXPR:
3306      /* We have a nest of references.  Verify that each of the operands
3307	 that determine where to reference is either a constant or a variable,
3308	 verify that the base is valid, and then show we've already checked
3309	 the subtrees.  */
3310      while (handled_component_p (t))
3311	{
3312	  if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3313	    CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3314	  else if (TREE_CODE (t) == ARRAY_REF
3315		   || TREE_CODE (t) == ARRAY_RANGE_REF)
3316	    {
3317	      CHECK_OP (1, "invalid array index");
3318	      if (TREE_OPERAND (t, 2))
3319		CHECK_OP (2, "invalid array lower bound");
3320	      if (TREE_OPERAND (t, 3))
3321		CHECK_OP (3, "invalid array stride");
3322	    }
3323	  else if (TREE_CODE (t) == BIT_FIELD_REF)
3324	    {
3325	      CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3326	      CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3327	    }
3328
3329	  t = TREE_OPERAND (t, 0);
3330	}
3331
3332      if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3333	{
3334	  error ("invalid reference prefix");
3335	  return t;
3336	}
3337      *walk_subtrees = 0;
3338      break;
3339
3340    case LT_EXPR:
3341    case LE_EXPR:
3342    case GT_EXPR:
3343    case GE_EXPR:
3344    case EQ_EXPR:
3345    case NE_EXPR:
3346    case UNORDERED_EXPR:
3347    case ORDERED_EXPR:
3348    case UNLT_EXPR:
3349    case UNLE_EXPR:
3350    case UNGT_EXPR:
3351    case UNGE_EXPR:
3352    case UNEQ_EXPR:
3353    case LTGT_EXPR:
3354    case PLUS_EXPR:
3355    case MINUS_EXPR:
3356    case MULT_EXPR:
3357    case TRUNC_DIV_EXPR:
3358    case CEIL_DIV_EXPR:
3359    case FLOOR_DIV_EXPR:
3360    case ROUND_DIV_EXPR:
3361    case TRUNC_MOD_EXPR:
3362    case CEIL_MOD_EXPR:
3363    case FLOOR_MOD_EXPR:
3364    case ROUND_MOD_EXPR:
3365    case RDIV_EXPR:
3366    case EXACT_DIV_EXPR:
3367    case MIN_EXPR:
3368    case MAX_EXPR:
3369    case LSHIFT_EXPR:
3370    case RSHIFT_EXPR:
3371    case LROTATE_EXPR:
3372    case RROTATE_EXPR:
3373    case BIT_IOR_EXPR:
3374    case BIT_XOR_EXPR:
3375    case BIT_AND_EXPR:
3376      CHECK_OP (0, "invalid operand to binary operator");
3377      CHECK_OP (1, "invalid operand to binary operator");
3378      break;
3379
3380    default:
3381      break;
3382    }
3383  return NULL;
3384
3385#undef CHECK_OP
3386}
3387
3388
3389/* Verify STMT, return true if STMT is not in GIMPLE form.
3390   TODO: Implement type checking.  */
3391
3392static bool
3393verify_stmt (tree stmt, bool last_in_block)
3394{
3395  tree addr;
3396
3397  if (!is_gimple_stmt (stmt))
3398    {
3399      error ("is not a valid GIMPLE statement");
3400      goto fail;
3401    }
3402
3403  addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3404  if (addr)
3405    {
3406      debug_generic_stmt (addr);
3407      return true;
3408    }
3409
3410  /* If the statement is marked as part of an EH region, then it is
3411     expected that the statement could throw.  Verify that when we
3412     have optimizations that simplify statements such that we prove
3413     that they cannot throw, that we update other data structures
3414     to match.  */
3415  if (lookup_stmt_eh_region (stmt) >= 0)
3416    {
3417      if (!tree_could_throw_p (stmt))
3418	{
3419	  error ("statement marked for throw, but doesn%'t");
3420	  goto fail;
3421	}
3422      if (!last_in_block && tree_can_throw_internal (stmt))
3423	{
3424	  error ("statement marked for throw in middle of block");
3425	  goto fail;
3426	}
3427    }
3428
3429  return false;
3430
3431 fail:
3432  debug_generic_stmt (stmt);
3433  return true;
3434}
3435
3436
3437/* Return true when the T can be shared.  */
3438
3439static bool
3440tree_node_can_be_shared (tree t)
3441{
3442  if (IS_TYPE_OR_DECL_P (t)
3443      /* We check for constants explicitly since they are not considered
3444	 gimple invariants if they overflowed.  */
3445      || CONSTANT_CLASS_P (t)
3446      || is_gimple_min_invariant (t)
3447      || TREE_CODE (t) == SSA_NAME
3448      || t == error_mark_node)
3449    return true;
3450
3451  if (TREE_CODE (t) == CASE_LABEL_EXPR)
3452    return true;
3453
3454  while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3455	  /* We check for constants explicitly since they are not considered
3456	     gimple invariants if they overflowed.  */
3457	  && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3458	      || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3459	 || (TREE_CODE (t) == COMPONENT_REF
3460	     || TREE_CODE (t) == REALPART_EXPR
3461	     || TREE_CODE (t) == IMAGPART_EXPR))
3462    t = TREE_OPERAND (t, 0);
3463
3464  if (DECL_P (t))
3465    return true;
3466
3467  return false;
3468}
3469
3470
3471/* Called via walk_trees.  Verify tree sharing.  */
3472
3473static tree
3474verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3475{
3476  htab_t htab = (htab_t) data;
3477  void **slot;
3478
3479  if (tree_node_can_be_shared (*tp))
3480    {
3481      *walk_subtrees = false;
3482      return NULL;
3483    }
3484
3485  slot = htab_find_slot (htab, *tp, INSERT);
3486  if (*slot)
3487    return *slot;
3488  *slot = *tp;
3489
3490  return NULL;
3491}
3492
3493
3494/* Verify the GIMPLE statement chain.  */
3495
3496void
3497verify_stmts (void)
3498{
3499  basic_block bb;
3500  block_stmt_iterator bsi;
3501  bool err = false;
3502  htab_t htab;
3503  tree addr;
3504
3505  timevar_push (TV_TREE_STMT_VERIFY);
3506  htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3507
3508  FOR_EACH_BB (bb)
3509    {
3510      tree phi;
3511      int i;
3512
3513      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3514	{
3515	  int phi_num_args = PHI_NUM_ARGS (phi);
3516
3517	  if (bb_for_stmt (phi) != bb)
3518	    {
3519	      error ("bb_for_stmt (phi) is set to a wrong basic block");
3520	      err |= true;
3521	    }
3522
3523	  for (i = 0; i < phi_num_args; i++)
3524	    {
3525	      tree t = PHI_ARG_DEF (phi, i);
3526	      tree addr;
3527
3528	      /* Addressable variables do have SSA_NAMEs but they
3529		 are not considered gimple values.  */
3530	      if (TREE_CODE (t) != SSA_NAME
3531		  && TREE_CODE (t) != FUNCTION_DECL
3532		  && !is_gimple_val (t))
3533		{
3534		  error ("PHI def is not a GIMPLE value");
3535		  debug_generic_stmt (phi);
3536		  debug_generic_stmt (t);
3537		  err |= true;
3538		}
3539
3540	      addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3541	      if (addr)
3542		{
3543		  debug_generic_stmt (addr);
3544		  err |= true;
3545		}
3546
3547	      addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3548	      if (addr)
3549		{
3550		  error ("incorrect sharing of tree nodes");
3551		  debug_generic_stmt (phi);
3552		  debug_generic_stmt (addr);
3553		  err |= true;
3554		}
3555	    }
3556	}
3557
3558      for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3559	{
3560	  tree stmt = bsi_stmt (bsi);
3561
3562	  if (bb_for_stmt (stmt) != bb)
3563	    {
3564	      error ("bb_for_stmt (stmt) is set to a wrong basic block");
3565	      err |= true;
3566	    }
3567
3568	  bsi_next (&bsi);
3569	  err |= verify_stmt (stmt, bsi_end_p (bsi));
3570	  addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3571	  if (addr)
3572	    {
3573	      error ("incorrect sharing of tree nodes");
3574	      debug_generic_stmt (stmt);
3575	      debug_generic_stmt (addr);
3576	      err |= true;
3577	    }
3578	}
3579    }
3580
3581  if (err)
3582    internal_error ("verify_stmts failed");
3583
3584  htab_delete (htab);
3585  timevar_pop (TV_TREE_STMT_VERIFY);
3586}
3587
3588
3589/* Verifies that the flow information is OK.  */
3590
3591static int
3592tree_verify_flow_info (void)
3593{
3594  int err = 0;
3595  basic_block bb;
3596  block_stmt_iterator bsi;
3597  tree stmt;
3598  edge e;
3599  edge_iterator ei;
3600
3601  if (ENTRY_BLOCK_PTR->stmt_list)
3602    {
3603      error ("ENTRY_BLOCK has a statement list associated with it");
3604      err = 1;
3605    }
3606
3607  if (EXIT_BLOCK_PTR->stmt_list)
3608    {
3609      error ("EXIT_BLOCK has a statement list associated with it");
3610      err = 1;
3611    }
3612
3613  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3614    if (e->flags & EDGE_FALLTHRU)
3615      {
3616	error ("fallthru to exit from bb %d", e->src->index);
3617	err = 1;
3618      }
3619
3620  FOR_EACH_BB (bb)
3621    {
3622      bool found_ctrl_stmt = false;
3623
3624      stmt = NULL_TREE;
3625
3626      /* Skip labels on the start of basic block.  */
3627      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3628	{
3629	  tree prev_stmt = stmt;
3630
3631	  stmt = bsi_stmt (bsi);
3632
3633	  if (TREE_CODE (stmt) != LABEL_EXPR)
3634	    break;
3635
3636	  if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3637	    {
3638	      error ("nonlocal label %s is not first "
3639		     "in a sequence of labels in bb %d",
3640		     IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3641		     bb->index);
3642	      err = 1;
3643	    }
3644
3645	  if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3646	    {
3647	      error ("label %s to block does not match in bb %d",
3648		     IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3649		     bb->index);
3650	      err = 1;
3651	    }
3652
3653	  if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3654	      != current_function_decl)
3655	    {
3656	      error ("label %s has incorrect context in bb %d",
3657		     IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3658		     bb->index);
3659	      err = 1;
3660	    }
3661	}
3662
3663      /* Verify that body of basic block BB is free of control flow.  */
3664      for (; !bsi_end_p (bsi); bsi_next (&bsi))
3665	{
3666	  tree stmt = bsi_stmt (bsi);
3667
3668	  if (found_ctrl_stmt)
3669	    {
3670	      error ("control flow in the middle of basic block %d",
3671		     bb->index);
3672	      err = 1;
3673	    }
3674
3675	  if (stmt_ends_bb_p (stmt))
3676	    found_ctrl_stmt = true;
3677
3678	  if (TREE_CODE (stmt) == LABEL_EXPR)
3679	    {
3680	      error ("label %s in the middle of basic block %d",
3681		     IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3682		     bb->index);
3683	      err = 1;
3684	    }
3685	}
3686      bsi = bsi_last (bb);
3687      if (bsi_end_p (bsi))
3688	continue;
3689
3690      stmt = bsi_stmt (bsi);
3691
3692      err |= verify_eh_edges (stmt);
3693
3694      if (is_ctrl_stmt (stmt))
3695	{
3696	  FOR_EACH_EDGE (e, ei, bb->succs)
3697	    if (e->flags & EDGE_FALLTHRU)
3698	      {
3699		error ("fallthru edge after a control statement in bb %d",
3700		       bb->index);
3701		err = 1;
3702	      }
3703	}
3704
3705      if (TREE_CODE (stmt) != COND_EXPR)
3706	{
3707	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3708	     after anything else but if statement.  */
3709	  FOR_EACH_EDGE (e, ei, bb->succs)
3710	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3711	      {
3712		error ("true/false edge after a non-COND_EXPR in bb %d",
3713		       bb->index);
3714		err = 1;
3715	      }
3716	}
3717
3718      switch (TREE_CODE (stmt))
3719	{
3720	case COND_EXPR:
3721	  {
3722	    edge true_edge;
3723	    edge false_edge;
3724	    if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3725		|| TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3726	      {
3727		error ("structured COND_EXPR at the end of bb %d", bb->index);
3728		err = 1;
3729	      }
3730
3731	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3732
3733	    if (!true_edge || !false_edge
3734		|| !(true_edge->flags & EDGE_TRUE_VALUE)
3735		|| !(false_edge->flags & EDGE_FALSE_VALUE)
3736		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3737		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3738		|| EDGE_COUNT (bb->succs) >= 3)
3739	      {
3740		error ("wrong outgoing edge flags at end of bb %d",
3741		       bb->index);
3742		err = 1;
3743	      }
3744
3745	    if (!has_label_p (true_edge->dest,
3746			      GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3747	      {
3748		error ("%<then%> label does not match edge at end of bb %d",
3749		       bb->index);
3750		err = 1;
3751	      }
3752
3753	    if (!has_label_p (false_edge->dest,
3754			      GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3755	      {
3756		error ("%<else%> label does not match edge at end of bb %d",
3757		       bb->index);
3758		err = 1;
3759	      }
3760	  }
3761	  break;
3762
3763	case GOTO_EXPR:
3764	  if (simple_goto_p (stmt))
3765	    {
3766	      error ("explicit goto at end of bb %d", bb->index);
3767    	      err = 1;
3768	    }
3769	  else
3770	    {
3771	      /* FIXME.  We should double check that the labels in the
3772		 destination blocks have their address taken.  */
3773	      FOR_EACH_EDGE (e, ei, bb->succs)
3774		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3775				 | EDGE_FALSE_VALUE))
3776		    || !(e->flags & EDGE_ABNORMAL))
3777		  {
3778		    error ("wrong outgoing edge flags at end of bb %d",
3779			   bb->index);
3780		    err = 1;
3781		  }
3782	    }
3783	  break;
3784
3785	case RETURN_EXPR:
3786	  if (!single_succ_p (bb)
3787	      || (single_succ_edge (bb)->flags
3788		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
3789		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3790	    {
3791	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
3792	      err = 1;
3793	    }
3794	  if (single_succ (bb) != EXIT_BLOCK_PTR)
3795	    {
3796	      error ("return edge does not point to exit in bb %d",
3797		     bb->index);
3798	      err = 1;
3799	    }
3800	  break;
3801
3802	case SWITCH_EXPR:
3803	  {
3804	    tree prev;
3805	    edge e;
3806	    size_t i, n;
3807	    tree vec;
3808
3809	    vec = SWITCH_LABELS (stmt);
3810	    n = TREE_VEC_LENGTH (vec);
3811
3812	    /* Mark all the destination basic blocks.  */
3813	    for (i = 0; i < n; ++i)
3814	      {
3815		tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3816		basic_block label_bb = label_to_block (lab);
3817
3818		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3819		label_bb->aux = (void *)1;
3820	      }
3821
3822	    /* Verify that the case labels are sorted.  */
3823	    prev = TREE_VEC_ELT (vec, 0);
3824	    for (i = 1; i < n - 1; ++i)
3825	      {
3826		tree c = TREE_VEC_ELT (vec, i);
3827		if (! CASE_LOW (c))
3828		  {
3829		    error ("found default case not at end of case vector");
3830		    err = 1;
3831		    continue;
3832		  }
3833		if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3834		  {
3835		    error ("case labels not sorted:");
3836		    print_generic_expr (stderr, prev, 0);
3837		    fprintf (stderr," is greater than ");
3838		    print_generic_expr (stderr, c, 0);
3839		    fprintf (stderr," but comes before it.\n");
3840		    err = 1;
3841		  }
3842		prev = c;
3843	      }
3844	    if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3845	      {
3846		error ("no default case found at end of case vector");
3847		err = 1;
3848	      }
3849
3850	    FOR_EACH_EDGE (e, ei, bb->succs)
3851	      {
3852		if (!e->dest->aux)
3853		  {
3854		    error ("extra outgoing edge %d->%d",
3855			   bb->index, e->dest->index);
3856		    err = 1;
3857		  }
3858		e->dest->aux = (void *)2;
3859		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3860				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3861		  {
3862		    error ("wrong outgoing edge flags at end of bb %d",
3863			   bb->index);
3864		    err = 1;
3865		  }
3866	      }
3867
3868	    /* Check that we have all of them.  */
3869	    for (i = 0; i < n; ++i)
3870	      {
3871		tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3872		basic_block label_bb = label_to_block (lab);
3873
3874		if (label_bb->aux != (void *)2)
3875		  {
3876		    error ("missing edge %i->%i",
3877			   bb->index, label_bb->index);
3878		    err = 1;
3879		  }
3880	      }
3881
3882	    FOR_EACH_EDGE (e, ei, bb->succs)
3883	      e->dest->aux = (void *)0;
3884	  }
3885
3886	default: ;
3887	}
3888    }
3889
3890  if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3891    verify_dominators (CDI_DOMINATORS);
3892
3893  return err;
3894}
3895
3896
3897/* Updates phi nodes after creating a forwarder block joined
3898   by edge FALLTHRU.  */
3899
3900static void
3901tree_make_forwarder_block (edge fallthru)
3902{
3903  edge e;
3904  edge_iterator ei;
3905  basic_block dummy, bb;
3906  tree phi, new_phi, var;
3907
3908  dummy = fallthru->src;
3909  bb = fallthru->dest;
3910
3911  if (single_pred_p (bb))
3912    return;
3913
3914  /* If we redirected a branch we must create new phi nodes at the
3915     start of BB.  */
3916  for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3917    {
3918      var = PHI_RESULT (phi);
3919      new_phi = create_phi_node (var, bb);
3920      SSA_NAME_DEF_STMT (var) = new_phi;
3921      SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3922      add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3923    }
3924
3925  /* Ensure that the PHI node chain is in the same order.  */
3926  set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3927
3928  /* Add the arguments we have stored on edges.  */
3929  FOR_EACH_EDGE (e, ei, bb->preds)
3930    {
3931      if (e == fallthru)
3932	continue;
3933
3934      flush_pending_stmts (e);
3935    }
3936}
3937
3938
3939/* Return a non-special label in the head of basic block BLOCK.
3940   Create one if it doesn't exist.  */
3941
3942tree
3943tree_block_label (basic_block bb)
3944{
3945  block_stmt_iterator i, s = bsi_start (bb);
3946  bool first = true;
3947  tree label, stmt;
3948
3949  for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
3950    {
3951      stmt = bsi_stmt (i);
3952      if (TREE_CODE (stmt) != LABEL_EXPR)
3953	break;
3954      label = LABEL_EXPR_LABEL (stmt);
3955      if (!DECL_NONLOCAL (label))
3956	{
3957	  if (!first)
3958	    bsi_move_before (&i, &s);
3959	  return label;
3960	}
3961    }
3962
3963  label = create_artificial_label ();
3964  stmt = build1 (LABEL_EXPR, void_type_node, label);
3965  bsi_insert_before (&s, stmt, BSI_NEW_STMT);
3966  return label;
3967}
3968
3969
3970/* Attempt to perform edge redirection by replacing a possibly complex
3971   jump instruction by a goto or by removing the jump completely.
3972   This can apply only if all edges now point to the same block.  The
3973   parameters and return values are equivalent to
3974   redirect_edge_and_branch.  */
3975
3976static edge
3977tree_try_redirect_by_replacing_jump (edge e, basic_block target)
3978{
3979  basic_block src = e->src;
3980  block_stmt_iterator b;
3981  tree stmt;
3982
3983  /* We can replace or remove a complex jump only when we have exactly
3984     two edges.  */
3985  if (EDGE_COUNT (src->succs) != 2
3986      /* Verify that all targets will be TARGET.  Specifically, the
3987	 edge that is not E must also go to TARGET.  */
3988      || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
3989    return NULL;
3990
3991  b = bsi_last (src);
3992  if (bsi_end_p (b))
3993    return NULL;
3994  stmt = bsi_stmt (b);
3995
3996  if (TREE_CODE (stmt) == COND_EXPR
3997      || TREE_CODE (stmt) == SWITCH_EXPR)
3998    {
3999      bsi_remove (&b);
4000      e = ssa_redirect_edge (e, target);
4001      e->flags = EDGE_FALLTHRU;
4002      return e;
4003    }
4004
4005  return NULL;
4006}
4007
4008
4009/* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
4010   edge representing the redirected branch.  */
4011
4012static edge
4013tree_redirect_edge_and_branch (edge e, basic_block dest)
4014{
4015  basic_block bb = e->src;
4016  block_stmt_iterator bsi;
4017  edge ret;
4018  tree label, stmt;
4019
4020  if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4021    return NULL;
4022
4023  if (e->src != ENTRY_BLOCK_PTR
4024      && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4025    return ret;
4026
4027  if (e->dest == dest)
4028    return NULL;
4029
4030  label = tree_block_label (dest);
4031
4032  bsi = bsi_last (bb);
4033  stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4034
4035  switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4036    {
4037    case COND_EXPR:
4038      stmt = (e->flags & EDGE_TRUE_VALUE
4039	      ? COND_EXPR_THEN (stmt)
4040	      : COND_EXPR_ELSE (stmt));
4041      GOTO_DESTINATION (stmt) = label;
4042      break;
4043
4044    case GOTO_EXPR:
4045      /* No non-abnormal edges should lead from a non-simple goto, and
4046	 simple ones should be represented implicitly.  */
4047      gcc_unreachable ();
4048
4049    case SWITCH_EXPR:
4050      {
4051        tree cases = get_cases_for_edge (e, stmt);
4052
4053	/* If we have a list of cases associated with E, then use it
4054	   as it's a lot faster than walking the entire case vector.  */
4055	if (cases)
4056	  {
4057	    edge e2 = find_edge (e->src, dest);
4058	    tree last, first;
4059
4060	    first = cases;
4061	    while (cases)
4062	      {
4063		last = cases;
4064		CASE_LABEL (cases) = label;
4065		cases = TREE_CHAIN (cases);
4066	      }
4067
4068	    /* If there was already an edge in the CFG, then we need
4069	       to move all the cases associated with E to E2.  */
4070	    if (e2)
4071	      {
4072		tree cases2 = get_cases_for_edge (e2, stmt);
4073
4074		TREE_CHAIN (last) = TREE_CHAIN (cases2);
4075		TREE_CHAIN (cases2) = first;
4076	      }
4077	  }
4078	else
4079	  {
4080	    tree vec = SWITCH_LABELS (stmt);
4081	    size_t i, n = TREE_VEC_LENGTH (vec);
4082
4083	    for (i = 0; i < n; i++)
4084	      {
4085		tree elt = TREE_VEC_ELT (vec, i);
4086
4087		if (label_to_block (CASE_LABEL (elt)) == e->dest)
4088		  CASE_LABEL (elt) = label;
4089	      }
4090	  }
4091
4092	break;
4093      }
4094
4095    case RETURN_EXPR:
4096      bsi_remove (&bsi);
4097      e->flags |= EDGE_FALLTHRU;
4098      break;
4099
4100    default:
4101      /* Otherwise it must be a fallthru edge, and we don't need to
4102	 do anything besides redirecting it.  */
4103      gcc_assert (e->flags & EDGE_FALLTHRU);
4104      break;
4105    }
4106
4107  /* Update/insert PHI nodes as necessary.  */
4108
4109  /* Now update the edges in the CFG.  */
4110  e = ssa_redirect_edge (e, dest);
4111
4112  return e;
4113}
4114
4115
4116/* Simple wrapper, as we can always redirect fallthru edges.  */
4117
4118static basic_block
4119tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4120{
4121  e = tree_redirect_edge_and_branch (e, dest);
4122  gcc_assert (e);
4123
4124  return NULL;
4125}
4126
4127
4128/* Splits basic block BB after statement STMT (but at least after the
4129   labels).  If STMT is NULL, BB is split just after the labels.  */
4130
4131static basic_block
4132tree_split_block (basic_block bb, void *stmt)
4133{
4134  block_stmt_iterator bsi, bsi_tgt;
4135  tree act;
4136  basic_block new_bb;
4137  edge e;
4138  edge_iterator ei;
4139
4140  new_bb = create_empty_bb (bb);
4141
4142  /* Redirect the outgoing edges.  */
4143  new_bb->succs = bb->succs;
4144  bb->succs = NULL;
4145  FOR_EACH_EDGE (e, ei, new_bb->succs)
4146    e->src = new_bb;
4147
4148  if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4149    stmt = NULL;
4150
4151  /* Move everything from BSI to the new basic block.  */
4152  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4153    {
4154      act = bsi_stmt (bsi);
4155      if (TREE_CODE (act) == LABEL_EXPR)
4156	continue;
4157
4158      if (!stmt)
4159	break;
4160
4161      if (stmt == act)
4162	{
4163	  bsi_next (&bsi);
4164	  break;
4165	}
4166    }
4167
4168  bsi_tgt = bsi_start (new_bb);
4169  while (!bsi_end_p (bsi))
4170    {
4171      act = bsi_stmt (bsi);
4172      bsi_remove (&bsi);
4173      bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4174    }
4175
4176  return new_bb;
4177}
4178
4179
4180/* Moves basic block BB after block AFTER.  */
4181
4182static bool
4183tree_move_block_after (basic_block bb, basic_block after)
4184{
4185  if (bb->prev_bb == after)
4186    return true;
4187
4188  unlink_block (bb);
4189  link_block (bb, after);
4190
4191  return true;
4192}
4193
4194
4195/* Return true if basic_block can be duplicated.  */
4196
4197static bool
4198tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4199{
4200  return true;
4201}
4202
4203
4204/* Create a duplicate of the basic block BB.  NOTE: This does not
4205   preserve SSA form.  */
4206
4207static basic_block
4208tree_duplicate_bb (basic_block bb)
4209{
4210  basic_block new_bb;
4211  block_stmt_iterator bsi, bsi_tgt;
4212  tree phi;
4213
4214  new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4215
4216  /* Copy the PHI nodes.  We ignore PHI node arguments here because
4217     the incoming edges have not been setup yet.  */
4218  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4219    {
4220      tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4221      create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4222    }
4223
4224  /* Keep the chain of PHI nodes in the same order so that they can be
4225     updated by ssa_redirect_edge.  */
4226  set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4227
4228  bsi_tgt = bsi_start (new_bb);
4229  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4230    {
4231      def_operand_p def_p;
4232      ssa_op_iter op_iter;
4233      tree stmt, copy;
4234      int region;
4235
4236      stmt = bsi_stmt (bsi);
4237      if (TREE_CODE (stmt) == LABEL_EXPR)
4238	continue;
4239
4240      /* Create a new copy of STMT and duplicate STMT's virtual
4241	 operands.  */
4242      copy = unshare_expr (stmt);
4243      bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4244      copy_virtual_operands (copy, stmt);
4245      region = lookup_stmt_eh_region (stmt);
4246      if (region >= 0)
4247	add_stmt_to_eh_region (copy, region);
4248
4249      /* Create new names for all the definitions created by COPY and
4250	 add replacement mappings for each new name.  */
4251      FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4252	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4253    }
4254
4255  return new_bb;
4256}
4257
4258
4259/* Basic block BB_COPY was created by code duplication.  Add phi node
4260   arguments for edges going out of BB_COPY.  The blocks that were
4261   duplicated have BB_DUPLICATED set.  */
4262
4263void
4264add_phi_args_after_copy_bb (basic_block bb_copy)
4265{
4266  basic_block bb, dest;
4267  edge e, e_copy;
4268  edge_iterator ei;
4269  tree phi, phi_copy, phi_next, def;
4270
4271  bb = get_bb_original (bb_copy);
4272
4273  FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4274    {
4275      if (!phi_nodes (e_copy->dest))
4276	continue;
4277
4278      if (e_copy->dest->flags & BB_DUPLICATED)
4279	dest = get_bb_original (e_copy->dest);
4280      else
4281	dest = e_copy->dest;
4282
4283      e = find_edge (bb, dest);
4284      if (!e)
4285	{
4286	  /* During loop unrolling the target of the latch edge is copied.
4287	     In this case we are not looking for edge to dest, but to
4288	     duplicated block whose original was dest.  */
4289	  FOR_EACH_EDGE (e, ei, bb->succs)
4290	    if ((e->dest->flags & BB_DUPLICATED)
4291		&& get_bb_original (e->dest) == dest)
4292	      break;
4293
4294	  gcc_assert (e != NULL);
4295	}
4296
4297      for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4298	   phi;
4299	   phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4300	{
4301	  phi_next = PHI_CHAIN (phi);
4302	  def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4303	  add_phi_arg (phi_copy, def, e_copy);
4304	}
4305    }
4306}
4307
4308/* Blocks in REGION_COPY array of length N_REGION were created by
4309   duplication of basic blocks.  Add phi node arguments for edges
4310   going from these blocks.  */
4311
4312void
4313add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4314{
4315  unsigned i;
4316
4317  for (i = 0; i < n_region; i++)
4318    region_copy[i]->flags |= BB_DUPLICATED;
4319
4320  for (i = 0; i < n_region; i++)
4321    add_phi_args_after_copy_bb (region_copy[i]);
4322
4323  for (i = 0; i < n_region; i++)
4324    region_copy[i]->flags &= ~BB_DUPLICATED;
4325}
4326
4327/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4328   important exit edge EXIT.  By important we mean that no SSA name defined
4329   inside region is live over the other exit edges of the region.  All entry
4330   edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
4331   to the duplicate of the region.  SSA form is not updated, but dominance
4332   and loop information is.  The new basic blocks are stored to REGION_COPY
4333   in the same order as they had in REGION, provided that REGION_COPY is not
4334   NULL.  The function returns false if it is unable to copy the region,
4335   true otherwise.  */
4336
4337bool
4338tree_duplicate_sese_region (edge entry, edge exit,
4339			    basic_block *region, unsigned n_region,
4340			    basic_block *region_copy)
4341{
4342  unsigned i, n_doms;
4343  bool free_region_copy = false, copying_header = false;
4344  struct loop *loop = entry->dest->loop_father;
4345  edge exit_copy;
4346  basic_block *doms;
4347  edge redirected;
4348  int total_freq = 0, entry_freq = 0;
4349  gcov_type total_count = 0, entry_count = 0;
4350
4351  if (!can_copy_bbs_p (region, n_region))
4352    return false;
4353
4354  /* Some sanity checking.  Note that we do not check for all possible
4355     missuses of the functions.  I.e. if you ask to copy something weird,
4356     it will work, but the state of structures probably will not be
4357     correct.  */
4358  for (i = 0; i < n_region; i++)
4359    {
4360      /* We do not handle subloops, i.e. all the blocks must belong to the
4361	 same loop.  */
4362      if (region[i]->loop_father != loop)
4363	return false;
4364
4365      if (region[i] != entry->dest
4366	  && region[i] == loop->header)
4367	return false;
4368    }
4369
4370  loop->copy = loop;
4371
4372  /* In case the function is used for loop header copying (which is the primary
4373     use), ensure that EXIT and its copy will be new latch and entry edges.  */
4374  if (loop->header == entry->dest)
4375    {
4376      copying_header = true;
4377      loop->copy = loop->outer;
4378
4379      if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4380	return false;
4381
4382      for (i = 0; i < n_region; i++)
4383	if (region[i] != exit->src
4384	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4385	  return false;
4386    }
4387
4388  if (!region_copy)
4389    {
4390      region_copy = xmalloc (sizeof (basic_block) * n_region);
4391      free_region_copy = true;
4392    }
4393
4394  /* gcc_assert (!need_ssa_update_p ()); */
4395
4396  /* Record blocks outside the region that are dominated by something
4397     inside.  */
4398  doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4399  initialize_original_copy_tables ();
4400
4401  n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4402
4403  if (entry->dest->count)
4404    {
4405      total_count = entry->dest->count;
4406      entry_count = entry->count;
4407      /* Fix up corner cases, to avoid division by zero or creation of negative
4408	 frequencies.  */
4409      if (entry_count > total_count)
4410	entry_count = total_count;
4411    }
4412  else
4413    {
4414      total_freq = entry->dest->frequency;
4415      entry_freq = EDGE_FREQUENCY (entry);
4416      /* Fix up corner cases, to avoid division by zero or creation of negative
4417	 frequencies.  */
4418      if (total_freq == 0)
4419	total_freq = 1;
4420      else if (entry_freq > total_freq)
4421	entry_freq = total_freq;
4422    }
4423
4424  copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4425	    split_edge_bb_loc (entry));
4426  if (total_count)
4427    {
4428      scale_bbs_frequencies_gcov_type (region, n_region,
4429				       total_count - entry_count,
4430				       total_count);
4431      scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4432	  			       total_count);
4433    }
4434  else
4435    {
4436      scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4437				 total_freq);
4438      scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4439    }
4440
4441  if (copying_header)
4442    {
4443      loop->header = exit->dest;
4444      loop->latch = exit->src;
4445    }
4446
4447  /* Redirect the entry and add the phi node arguments.  */
4448  redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4449  gcc_assert (redirected != NULL);
4450  flush_pending_stmts (entry);
4451
4452  /* Concerning updating of dominators:  We must recount dominators
4453     for entry block and its copy.  Anything that is outside of the
4454     region, but was dominated by something inside needs recounting as
4455     well.  */
4456  set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4457  doms[n_doms++] = get_bb_original (entry->dest);
4458  iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4459  free (doms);
4460
4461  /* Add the other PHI node arguments.  */
4462  add_phi_args_after_copy (region_copy, n_region);
4463
4464  if (free_region_copy)
4465    free (region_copy);
4466
4467  free_original_copy_tables ();
4468  return true;
4469}
4470
4471
4472/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h)  */
4473
4474void
4475dump_function_to_file (tree fn, FILE *file, int flags)
4476{
4477  tree arg, vars, var;
4478  bool ignore_topmost_bind = false, any_var = false;
4479  basic_block bb;
4480  tree chain;
4481
4482  fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4483
4484  arg = DECL_ARGUMENTS (fn);
4485  while (arg)
4486    {
4487      print_generic_expr (file, arg, dump_flags);
4488      if (TREE_CHAIN (arg))
4489	fprintf (file, ", ");
4490      arg = TREE_CHAIN (arg);
4491    }
4492  fprintf (file, ")\n");
4493
4494  if (flags & TDF_DETAILS)
4495    dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
4496  if (flags & TDF_RAW)
4497    {
4498      dump_node (fn, TDF_SLIM | flags, file);
4499      return;
4500    }
4501
4502  /* When GIMPLE is lowered, the variables are no longer available in
4503     BIND_EXPRs, so display them separately.  */
4504  if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
4505    {
4506      ignore_topmost_bind = true;
4507
4508      fprintf (file, "{\n");
4509      for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4510	{
4511	  var = TREE_VALUE (vars);
4512
4513	  print_generic_decl (file, var, flags);
4514	  fprintf (file, "\n");
4515
4516	  any_var = true;
4517	}
4518    }
4519
4520  if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
4521    {
4522      /* Make a CFG based dump.  */
4523      check_bb_profile (ENTRY_BLOCK_PTR, file);
4524      if (!ignore_topmost_bind)
4525	fprintf (file, "{\n");
4526
4527      if (any_var && n_basic_blocks)
4528	fprintf (file, "\n");
4529
4530      FOR_EACH_BB (bb)
4531	dump_generic_bb (file, bb, 2, flags);
4532
4533      fprintf (file, "}\n");
4534      check_bb_profile (EXIT_BLOCK_PTR, file);
4535    }
4536  else
4537    {
4538      int indent;
4539
4540      /* Make a tree based dump.  */
4541      chain = DECL_SAVED_TREE (fn);
4542
4543      if (TREE_CODE (chain) == BIND_EXPR)
4544	{
4545	  if (ignore_topmost_bind)
4546	    {
4547	      chain = BIND_EXPR_BODY (chain);
4548	      indent = 2;
4549	    }
4550	  else
4551	    indent = 0;
4552	}
4553      else
4554	{
4555	  if (!ignore_topmost_bind)
4556	    fprintf (file, "{\n");
4557	  indent = 2;
4558	}
4559
4560      if (any_var)
4561	fprintf (file, "\n");
4562
4563      print_generic_stmt_indented (file, chain, flags, indent);
4564      if (ignore_topmost_bind)
4565	fprintf (file, "}\n");
4566    }
4567
4568  fprintf (file, "\n\n");
4569}
4570
4571
4572/* Pretty print of the loops intermediate representation.  */
4573static void print_loop (FILE *, struct loop *, int);
4574static void print_pred_bbs (FILE *, basic_block bb);
4575static void print_succ_bbs (FILE *, basic_block bb);
4576
4577
4578/* Print on FILE the indexes for the predecessors of basic_block BB.  */
4579
4580static void
4581print_pred_bbs (FILE *file, basic_block bb)
4582{
4583  edge e;
4584  edge_iterator ei;
4585
4586  FOR_EACH_EDGE (e, ei, bb->preds)
4587    fprintf (file, "bb_%d ", e->src->index);
4588}
4589
4590
4591/* Print on FILE the indexes for the successors of basic_block BB.  */
4592
4593static void
4594print_succ_bbs (FILE *file, basic_block bb)
4595{
4596  edge e;
4597  edge_iterator ei;
4598
4599  FOR_EACH_EDGE (e, ei, bb->succs)
4600    fprintf (file, "bb_%d ", e->dest->index);
4601}
4602
4603
4604/* Pretty print LOOP on FILE, indented INDENT spaces.  */
4605
4606static void
4607print_loop (FILE *file, struct loop *loop, int indent)
4608{
4609  char *s_indent;
4610  basic_block bb;
4611
4612  if (loop == NULL)
4613    return;
4614
4615  s_indent = (char *) alloca ((size_t) indent + 1);
4616  memset ((void *) s_indent, ' ', (size_t) indent);
4617  s_indent[indent] = '\0';
4618
4619  /* Print the loop's header.  */
4620  fprintf (file, "%sloop_%d\n", s_indent, loop->num);
4621
4622  /* Print the loop's body.  */
4623  fprintf (file, "%s{\n", s_indent);
4624  FOR_EACH_BB (bb)
4625    if (bb->loop_father == loop)
4626      {
4627	/* Print the basic_block's header.  */
4628	fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
4629	print_pred_bbs (file, bb);
4630	fprintf (file, "}, succs = {");
4631	print_succ_bbs (file, bb);
4632	fprintf (file, "})\n");
4633
4634	/* Print the basic_block's body.  */
4635	fprintf (file, "%s  {\n", s_indent);
4636	tree_dump_bb (bb, file, indent + 4);
4637	fprintf (file, "%s  }\n", s_indent);
4638      }
4639
4640  print_loop (file, loop->inner, indent + 2);
4641  fprintf (file, "%s}\n", s_indent);
4642  print_loop (file, loop->next, indent);
4643}
4644
4645
4646/* Follow a CFG edge from the entry point of the program, and on entry
4647   of a loop, pretty print the loop structure on FILE.  */
4648
4649void
4650print_loop_ir (FILE *file)
4651{
4652  basic_block bb;
4653
4654  bb = BASIC_BLOCK (0);
4655  if (bb && bb->loop_father)
4656    print_loop (file, bb->loop_father, 0);
4657}
4658
4659
4660/* Debugging loops structure at tree level.  */
4661
4662void
4663debug_loop_ir (void)
4664{
4665  print_loop_ir (stderr);
4666}
4667
4668
4669/* Return true if BB ends with a call, possibly followed by some
4670   instructions that must stay with the call.  Return false,
4671   otherwise.  */
4672
4673static bool
4674tree_block_ends_with_call_p (basic_block bb)
4675{
4676  block_stmt_iterator bsi = bsi_last (bb);
4677  return get_call_expr_in (bsi_stmt (bsi)) != NULL;
4678}
4679
4680
4681/* Return true if BB ends with a conditional branch.  Return false,
4682   otherwise.  */
4683
4684static bool
4685tree_block_ends_with_condjump_p (basic_block bb)
4686{
4687  tree stmt = last_stmt (bb);
4688  return (stmt && TREE_CODE (stmt) == COND_EXPR);
4689}
4690
4691
4692/* Return true if we need to add fake edge to exit at statement T.
4693   Helper function for tree_flow_call_edges_add.  */
4694
4695static bool
4696need_fake_edge_p (tree t)
4697{
4698  tree call;
4699
4700  /* NORETURN and LONGJMP calls already have an edge to exit.
4701     CONST and PURE calls do not need one.
4702     We don't currently check for CONST and PURE here, although
4703     it would be a good idea, because those attributes are
4704     figured out from the RTL in mark_constant_function, and
4705     the counter incrementation code from -fprofile-arcs
4706     leads to different results from -fbranch-probabilities.  */
4707  call = get_call_expr_in (t);
4708  if (call
4709      && !(call_expr_flags (call) & ECF_NORETURN))
4710    return true;
4711
4712  if (TREE_CODE (t) == ASM_EXPR
4713       && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
4714    return true;
4715
4716  return false;
4717}
4718
4719
4720/* Add fake edges to the function exit for any non constant and non
4721   noreturn calls, volatile inline assembly in the bitmap of blocks
4722   specified by BLOCKS or to the whole CFG if BLOCKS is zero.  Return
4723   the number of blocks that were split.
4724
4725   The goal is to expose cases in which entering a basic block does
4726   not imply that all subsequent instructions must be executed.  */
4727
4728static int
4729tree_flow_call_edges_add (sbitmap blocks)
4730{
4731  int i;
4732  int blocks_split = 0;
4733  int last_bb = last_basic_block;
4734  bool check_last_block = false;
4735
4736  if (n_basic_blocks == 0)
4737    return 0;
4738
4739  if (! blocks)
4740    check_last_block = true;
4741  else
4742    check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4743
4744  /* In the last basic block, before epilogue generation, there will be
4745     a fallthru edge to EXIT.  Special care is required if the last insn
4746     of the last basic block is a call because make_edge folds duplicate
4747     edges, which would result in the fallthru edge also being marked
4748     fake, which would result in the fallthru edge being removed by
4749     remove_fake_edges, which would result in an invalid CFG.
4750
4751     Moreover, we can't elide the outgoing fake edge, since the block
4752     profiler needs to take this into account in order to solve the minimal
4753     spanning tree in the case that the call doesn't return.
4754
4755     Handle this by adding a dummy instruction in a new last basic block.  */
4756  if (check_last_block)
4757    {
4758      basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4759      block_stmt_iterator bsi = bsi_last (bb);
4760      tree t = NULL_TREE;
4761      if (!bsi_end_p (bsi))
4762	t = bsi_stmt (bsi);
4763
4764      if (t && need_fake_edge_p (t))
4765	{
4766	  edge e;
4767
4768	  e = find_edge (bb, EXIT_BLOCK_PTR);
4769	  if (e)
4770	    {
4771	      bsi_insert_on_edge (e, build_empty_stmt ());
4772	      bsi_commit_edge_inserts ();
4773	    }
4774	}
4775    }
4776
4777  /* Now add fake edges to the function exit for any non constant
4778     calls since there is no way that we can determine if they will
4779     return or not...  */
4780  for (i = 0; i < last_bb; i++)
4781    {
4782      basic_block bb = BASIC_BLOCK (i);
4783      block_stmt_iterator bsi;
4784      tree stmt, last_stmt;
4785
4786      if (!bb)
4787	continue;
4788
4789      if (blocks && !TEST_BIT (blocks, i))
4790	continue;
4791
4792      bsi = bsi_last (bb);
4793      if (!bsi_end_p (bsi))
4794	{
4795	  last_stmt = bsi_stmt (bsi);
4796	  do
4797	    {
4798	      stmt = bsi_stmt (bsi);
4799	      if (need_fake_edge_p (stmt))
4800		{
4801		  edge e;
4802		  /* The handling above of the final block before the
4803		     epilogue should be enough to verify that there is
4804		     no edge to the exit block in CFG already.
4805		     Calling make_edge in such case would cause us to
4806		     mark that edge as fake and remove it later.  */
4807#ifdef ENABLE_CHECKING
4808		  if (stmt == last_stmt)
4809		    {
4810		      e = find_edge (bb, EXIT_BLOCK_PTR);
4811		      gcc_assert (e == NULL);
4812		    }
4813#endif
4814
4815		  /* Note that the following may create a new basic block
4816		     and renumber the existing basic blocks.  */
4817		  if (stmt != last_stmt)
4818		    {
4819		      e = split_block (bb, stmt);
4820		      if (e)
4821			blocks_split++;
4822		    }
4823		  make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4824		}
4825	      bsi_prev (&bsi);
4826	    }
4827	  while (!bsi_end_p (bsi));
4828	}
4829    }
4830
4831  if (blocks_split)
4832    verify_flow_info ();
4833
4834  return blocks_split;
4835}
4836
4837bool
4838tree_purge_dead_eh_edges (basic_block bb)
4839{
4840  bool changed = false;
4841  edge e;
4842  edge_iterator ei;
4843  tree stmt = last_stmt (bb);
4844
4845  if (stmt && tree_can_throw_internal (stmt))
4846    return false;
4847
4848  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4849    {
4850      if (e->flags & EDGE_EH)
4851	{
4852	  remove_edge (e);
4853	  changed = true;
4854	}
4855      else
4856	ei_next (&ei);
4857    }
4858
4859  /* Removal of dead EH edges might change dominators of not
4860     just immediate successors.  E.g. when bb1 is changed so that
4861     it no longer can throw and bb1->bb3 and bb1->bb4 are dead
4862     eh edges purged by this function in:
4863           0
4864	  / \
4865	 v   v
4866	 1-->2
4867        / \  |
4868       v   v |
4869       3-->4 |
4870        \    v
4871	 --->5
4872	     |
4873	     -
4874     idom(bb5) must be recomputed.  For now just free the dominance
4875     info.  */
4876  if (changed)
4877    free_dominance_info (CDI_DOMINATORS);
4878
4879  return changed;
4880}
4881
4882bool
4883tree_purge_all_dead_eh_edges (bitmap blocks)
4884{
4885  bool changed = false;
4886  unsigned i;
4887  bitmap_iterator bi;
4888
4889  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
4890    {
4891      changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
4892    }
4893
4894  return changed;
4895}
4896
4897/* This function is called whenever a new edge is created or
4898   redirected.  */
4899
4900static void
4901tree_execute_on_growing_pred (edge e)
4902{
4903  basic_block bb = e->dest;
4904
4905  if (phi_nodes (bb))
4906    reserve_phi_args_for_new_edge (bb);
4907}
4908
4909/* This function is called immediately before edge E is removed from
4910   the edge vector E->dest->preds.  */
4911
4912static void
4913tree_execute_on_shrinking_pred (edge e)
4914{
4915  if (phi_nodes (e->dest))
4916    remove_phi_args (e);
4917}
4918
4919/*---------------------------------------------------------------------------
4920  Helper functions for Loop versioning
4921  ---------------------------------------------------------------------------*/
4922
4923/* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
4924   of 'first'. Both of them are dominated by 'new_head' basic block. When
4925   'new_head' was created by 'second's incoming edge it received phi arguments
4926   on the edge by split_edge(). Later, additional edge 'e' was created to
4927   connect 'new_head' and 'first'. Now this routine adds phi args on this
4928   additional edge 'e' that new_head to second edge received as part of edge
4929   splitting.
4930*/
4931
4932static void
4933tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
4934				basic_block new_head, edge e)
4935{
4936  tree phi1, phi2;
4937  edge e2 = find_edge (new_head, second);
4938
4939  /* Because NEW_HEAD has been created by splitting SECOND's incoming
4940     edge, we should always have an edge from NEW_HEAD to SECOND.  */
4941  gcc_assert (e2 != NULL);
4942
4943  /* Browse all 'second' basic block phi nodes and add phi args to
4944     edge 'e' for 'first' head. PHI args are always in correct order.  */
4945
4946  for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
4947       phi2 && phi1;
4948       phi2 = PHI_CHAIN (phi2),  phi1 = PHI_CHAIN (phi1))
4949    {
4950      tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
4951      add_phi_arg (phi1, def, e);
4952    }
4953}
4954
4955/* Adds a if else statement to COND_BB with condition COND_EXPR.
4956   SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
4957   the destination of the ELSE part.  */
4958static void
4959tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
4960                            basic_block cond_bb, void *cond_e)
4961{
4962  block_stmt_iterator bsi;
4963  tree goto1 = NULL_TREE;
4964  tree goto2 = NULL_TREE;
4965  tree new_cond_expr = NULL_TREE;
4966  tree cond_expr = (tree) cond_e;
4967  edge e0;
4968
4969  /* Build new conditional expr */
4970  goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
4971  goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
4972  new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
4973
4974  /* Add new cond in cond_bb.  */
4975  bsi = bsi_start (cond_bb);
4976  bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
4977  /* Adjust edges appropriately to connect new head with first head
4978     as well as second head.  */
4979  e0 = single_succ_edge (cond_bb);
4980  e0->flags &= ~EDGE_FALLTHRU;
4981  e0->flags |= EDGE_FALSE_VALUE;
4982}
4983
4984struct cfg_hooks tree_cfg_hooks = {
4985  "tree",
4986  tree_verify_flow_info,
4987  tree_dump_bb,			/* dump_bb  */
4988  create_bb,			/* create_basic_block  */
4989  tree_redirect_edge_and_branch,/* redirect_edge_and_branch  */
4990  tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force  */
4991  remove_bb,			/* delete_basic_block  */
4992  tree_split_block,		/* split_block  */
4993  tree_move_block_after,	/* move_block_after  */
4994  tree_can_merge_blocks_p,	/* can_merge_blocks_p  */
4995  tree_merge_blocks,		/* merge_blocks  */
4996  tree_predict_edge,		/* predict_edge  */
4997  tree_predicted_by_p,		/* predicted_by_p  */
4998  tree_can_duplicate_bb_p,	/* can_duplicate_block_p  */
4999  tree_duplicate_bb,		/* duplicate_block  */
5000  tree_split_edge,		/* split_edge  */
5001  tree_make_forwarder_block,	/* make_forward_block  */
5002  NULL,				/* tidy_fallthru_edge  */
5003  tree_block_ends_with_call_p,	/* block_ends_with_call_p */
5004  tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5005  tree_flow_call_edges_add,     /* flow_call_edges_add */
5006  tree_execute_on_growing_pred,	/* execute_on_growing_pred */
5007  tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5008  tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5009  tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5010  tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5011  extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5012  flush_pending_stmts 		/* flush_pending_stmts */
5013};
5014
5015
5016/* Split all critical edges.  */
5017
5018static void
5019split_critical_edges (void)
5020{
5021  basic_block bb;
5022  edge e;
5023  edge_iterator ei;
5024
5025  /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5026     expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
5027     mappings around the calls to split_edge.  */
5028  start_recording_case_labels ();
5029  FOR_ALL_BB (bb)
5030    {
5031      FOR_EACH_EDGE (e, ei, bb->succs)
5032	if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5033	  {
5034	    split_edge (e);
5035	  }
5036    }
5037  end_recording_case_labels ();
5038}
5039
5040struct tree_opt_pass pass_split_crit_edges =
5041{
5042  "crited",                          /* name */
5043  NULL,                          /* gate */
5044  split_critical_edges,          /* execute */
5045  NULL,                          /* sub */
5046  NULL,                          /* next */
5047  0,                             /* static_pass_number */
5048  TV_TREE_SPLIT_EDGES,           /* tv_id */
5049  PROP_cfg,                      /* properties required */
5050  PROP_no_crit_edges,            /* properties_provided */
5051  0,                             /* properties_destroyed */
5052  0,                             /* todo_flags_start */
5053  TODO_dump_func,                /* todo_flags_finish */
5054  0                              /* letter */
5055};
5056
5057
5058/* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5059   a temporary, make sure and register it to be renamed if necessary,
5060   and finally return the temporary.  Put the statements to compute
5061   EXP before the current statement in BSI.  */
5062
5063tree
5064gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5065{
5066  tree t, new_stmt, orig_stmt;
5067
5068  if (is_gimple_val (exp))
5069    return exp;
5070
5071  t = make_rename_temp (type, NULL);
5072  new_stmt = build (MODIFY_EXPR, type, t, exp);
5073
5074  orig_stmt = bsi_stmt (*bsi);
5075  SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5076  TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5077
5078  bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5079
5080  return t;
5081}
5082
5083/* Build a ternary operation and gimplify it.  Emit code before BSI.
5084   Return the gimple_val holding the result.  */
5085
5086tree
5087gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5088		 tree type, tree a, tree b, tree c)
5089{
5090  tree ret;
5091
5092  ret = fold_build3 (code, type, a, b, c);
5093  STRIP_NOPS (ret);
5094
5095  return gimplify_val (bsi, type, ret);
5096}
5097
5098/* Build a binary operation and gimplify it.  Emit code before BSI.
5099   Return the gimple_val holding the result.  */
5100
5101tree
5102gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5103		 tree type, tree a, tree b)
5104{
5105  tree ret;
5106
5107  ret = fold_build2 (code, type, a, b);
5108  STRIP_NOPS (ret);
5109
5110  return gimplify_val (bsi, type, ret);
5111}
5112
5113/* Build a unary operation and gimplify it.  Emit code before BSI.
5114   Return the gimple_val holding the result.  */
5115
5116tree
5117gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5118		 tree a)
5119{
5120  tree ret;
5121
5122  ret = fold_build1 (code, type, a);
5123  STRIP_NOPS (ret);
5124
5125  return gimplify_val (bsi, type, ret);
5126}
5127
5128
5129
5130/* Emit return warnings.  */
5131
5132static void
5133execute_warn_function_return (void)
5134{
5135#ifdef USE_MAPPED_LOCATION
5136  source_location location;
5137#else
5138  location_t *locus;
5139#endif
5140  tree last;
5141  edge e;
5142  edge_iterator ei;
5143
5144  /* If we have a path to EXIT, then we do return.  */
5145  if (TREE_THIS_VOLATILE (cfun->decl)
5146      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5147    {
5148#ifdef USE_MAPPED_LOCATION
5149      location = UNKNOWN_LOCATION;
5150#else
5151      locus = NULL;
5152#endif
5153      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5154	{
5155	  last = last_stmt (e->src);
5156	  if (TREE_CODE (last) == RETURN_EXPR
5157#ifdef USE_MAPPED_LOCATION
5158	      && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5159#else
5160	      && (locus = EXPR_LOCUS (last)) != NULL)
5161#endif
5162	    break;
5163	}
5164#ifdef USE_MAPPED_LOCATION
5165      if (location == UNKNOWN_LOCATION)
5166	location = cfun->function_end_locus;
5167      if (warn_missing_noreturn)
5168        warning (0, "%H%<noreturn%> function does return", &location);
5169#else
5170      if (!locus)
5171	locus = &cfun->function_end_locus;
5172      if (warn_missing_noreturn)
5173        warning (0, "%H%<noreturn%> function does return", locus);
5174#endif
5175    }
5176
5177  /* If we see "return;" in some basic block, then we do reach the end
5178     without returning a value.  */
5179  else if (warn_return_type
5180	   && !TREE_NO_WARNING (cfun->decl)
5181	   && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5182	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5183    {
5184      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5185	{
5186	  tree last = last_stmt (e->src);
5187	  if (TREE_CODE (last) == RETURN_EXPR
5188	      && TREE_OPERAND (last, 0) == NULL
5189	      && !TREE_NO_WARNING (last))
5190	    {
5191#ifdef USE_MAPPED_LOCATION
5192	      location = EXPR_LOCATION (last);
5193	      if (location == UNKNOWN_LOCATION)
5194		  location = cfun->function_end_locus;
5195	      warning (0, "%Hcontrol reaches end of non-void function", &location);
5196#else
5197	      locus = EXPR_LOCUS (last);
5198	      if (!locus)
5199		locus = &cfun->function_end_locus;
5200	      warning (0, "%Hcontrol reaches end of non-void function", locus);
5201#endif
5202	      TREE_NO_WARNING (cfun->decl) = 1;
5203	      break;
5204	    }
5205	}
5206    }
5207}
5208
5209
5210/* Given a basic block B which ends with a conditional and has
5211   precisely two successors, determine which of the edges is taken if
5212   the conditional is true and which is taken if the conditional is
5213   false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
5214
5215void
5216extract_true_false_edges_from_block (basic_block b,
5217				     edge *true_edge,
5218				     edge *false_edge)
5219{
5220  edge e = EDGE_SUCC (b, 0);
5221
5222  if (e->flags & EDGE_TRUE_VALUE)
5223    {
5224      *true_edge = e;
5225      *false_edge = EDGE_SUCC (b, 1);
5226    }
5227  else
5228    {
5229      *false_edge = e;
5230      *true_edge = EDGE_SUCC (b, 1);
5231    }
5232}
5233
5234struct tree_opt_pass pass_warn_function_return =
5235{
5236  NULL,					/* name */
5237  NULL,					/* gate */
5238  execute_warn_function_return,		/* execute */
5239  NULL,					/* sub */
5240  NULL,					/* next */
5241  0,					/* static_pass_number */
5242  0,					/* tv_id */
5243  PROP_cfg,				/* properties_required */
5244  0,					/* properties_provided */
5245  0,					/* properties_destroyed */
5246  0,					/* todo_flags_start */
5247  0,					/* todo_flags_finish */
5248  0					/* letter */
5249};
5250
5251/* Emit noreturn warnings.  */
5252
5253static void
5254execute_warn_function_noreturn (void)
5255{
5256  if (warn_missing_noreturn
5257      && !TREE_THIS_VOLATILE (cfun->decl)
5258      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5259      && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5260    warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5261	     "for attribute %<noreturn%>",
5262	     cfun->decl);
5263}
5264
5265struct tree_opt_pass pass_warn_function_noreturn =
5266{
5267  NULL,					/* name */
5268  NULL,					/* gate */
5269  execute_warn_function_noreturn,	/* execute */
5270  NULL,					/* sub */
5271  NULL,					/* next */
5272  0,					/* static_pass_number */
5273  0,					/* tv_id */
5274  PROP_cfg,				/* properties_required */
5275  0,					/* properties_provided */
5276  0,					/* properties_destroyed */
5277  0,					/* todo_flags_start */
5278  0,					/* todo_flags_finish */
5279  0					/* letter */
5280};
5281