1/* Basic block reordering routines for the GNU compiler.
2   Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3   Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "hard-reg-set.h"
28#include "obstack.h"
29#include "basic-block.h"
30#include "insn-config.h"
31#include "output.h"
32#include "function.h"
33#include "cfglayout.h"
34#include "cfgloop.h"
35#include "target.h"
36#include "ggc.h"
37#include "alloc-pool.h"
38#include "flags.h"
39#include "tree-pass.h"
40#include "df.h"
41#include "vecprim.h"
42
43/* Holds the interesting trailing notes for the function.  */
44rtx cfg_layout_function_footer;
45rtx cfg_layout_function_header;
46
47static rtx skip_insns_after_block (basic_block);
48static void record_effective_endpoints (void);
49static rtx label_for_bb (basic_block);
50static void fixup_reorder_chain (void);
51
52static void change_scope (rtx, tree, tree);
53
54void verify_insn_chain (void);
55static void fixup_fallthru_exit_predecessor (void);
56static tree insn_scope (const_rtx);
57
58rtx
59unlink_insn_chain (rtx first, rtx last)
60{
61  rtx prevfirst = PREV_INSN (first);
62  rtx nextlast = NEXT_INSN (last);
63
64  PREV_INSN (first) = NULL;
65  NEXT_INSN (last) = NULL;
66  if (prevfirst)
67    NEXT_INSN (prevfirst) = nextlast;
68  if (nextlast)
69    PREV_INSN (nextlast) = prevfirst;
70  else
71    set_last_insn (prevfirst);
72  if (!prevfirst)
73    set_first_insn (nextlast);
74  return first;
75}
76
77/* Skip over inter-block insns occurring after BB which are typically
78   associated with BB (e.g., barriers). If there are any such insns,
79   we return the last one. Otherwise, we return the end of BB.  */
80
81static rtx
82skip_insns_after_block (basic_block bb)
83{
84  rtx insn, last_insn, next_head, prev;
85
86  next_head = NULL_RTX;
87  if (bb->next_bb != EXIT_BLOCK_PTR)
88    next_head = BB_HEAD (bb->next_bb);
89
90  for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
91    {
92      if (insn == next_head)
93	break;
94
95      switch (GET_CODE (insn))
96	{
97	case BARRIER:
98	  last_insn = insn;
99	  continue;
100
101	case NOTE:
102	  switch (NOTE_KIND (insn))
103	    {
104	    case NOTE_INSN_BLOCK_END:
105	      gcc_unreachable ();
106	      continue;
107	    default:
108	      continue;
109	      break;
110	    }
111	  break;
112
113	case CODE_LABEL:
114	  if (NEXT_INSN (insn)
115	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
116	    {
117	      insn = NEXT_INSN (insn);
118	      last_insn = insn;
119	      continue;
120	    }
121	  break;
122
123	default:
124	  break;
125	}
126
127      break;
128    }
129
130  /* It is possible to hit contradictory sequence.  For instance:
131
132     jump_insn
133     NOTE_INSN_BLOCK_BEG
134     barrier
135
136     Where barrier belongs to jump_insn, but the note does not.  This can be
137     created by removing the basic block originally following
138     NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
139
140  for (insn = last_insn; insn != BB_END (bb); insn = prev)
141    {
142      prev = PREV_INSN (insn);
143      if (NOTE_P (insn))
144	switch (NOTE_KIND (insn))
145	  {
146	  case NOTE_INSN_BLOCK_END:
147	    gcc_unreachable ();
148	    break;
149	  case NOTE_INSN_DELETED:
150	  case NOTE_INSN_DELETED_LABEL:
151	    continue;
152	  default:
153	    reorder_insns (insn, insn, last_insn);
154	  }
155    }
156
157  return last_insn;
158}
159
160/* Locate or create a label for a given basic block.  */
161
162static rtx
163label_for_bb (basic_block bb)
164{
165  rtx label = BB_HEAD (bb);
166
167  if (!LABEL_P (label))
168    {
169      if (dump_file)
170	fprintf (dump_file, "Emitting label for block %d\n", bb->index);
171
172      label = block_label (bb);
173    }
174
175  return label;
176}
177
178/* Locate the effective beginning and end of the insn chain for each
179   block, as defined by skip_insns_after_block above.  */
180
181static void
182record_effective_endpoints (void)
183{
184  rtx next_insn;
185  basic_block bb;
186  rtx insn;
187
188  for (insn = get_insns ();
189       insn
190       && NOTE_P (insn)
191       && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
192       insn = NEXT_INSN (insn))
193    continue;
194  /* No basic blocks at all?  */
195  gcc_assert (insn);
196
197  if (PREV_INSN (insn))
198    cfg_layout_function_header =
199	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
200  else
201    cfg_layout_function_header = NULL_RTX;
202
203  next_insn = get_insns ();
204  FOR_EACH_BB (bb)
205    {
206      rtx end;
207
208      if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
209	bb->il.rtl->header = unlink_insn_chain (next_insn,
210					      PREV_INSN (BB_HEAD (bb)));
211      end = skip_insns_after_block (bb);
212      if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
213	bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
214      next_insn = NEXT_INSN (BB_END (bb));
215    }
216
217  cfg_layout_function_footer = next_insn;
218  if (cfg_layout_function_footer)
219    cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
220}
221
222/* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
223   numbers and files.  In order to be GGC friendly we need to use separate
224   varrays.  This also slightly improve the memory locality in binary search.
225   The _locs array contains locators where the given property change.  The
226   block_locators_blocks contains the scope block that is used for all insn
227   locator greater than corresponding block_locators_locs value and smaller
228   than the following one.  Similarly for the other properties.  */
229static VEC(int,heap) *block_locators_locs;
230static GTY(()) VEC(tree,gc) *block_locators_blocks;
231static VEC(int,heap) *locations_locators_locs;
232DEF_VEC_O(location_t);
233DEF_VEC_ALLOC_O(location_t,heap);
234static VEC(location_t,heap) *locations_locators_vals;
235int prologue_locator;
236int epilogue_locator;
237
238/* Hold current location information and last location information, so the
239   datastructures are built lazily only when some instructions in given
240   place are needed.  */
241static location_t curr_location, last_location;
242static tree curr_block, last_block;
243static int curr_rtl_loc = -1;
244
245/* Allocate insn locator datastructure.  */
246void
247insn_locators_alloc (void)
248{
249  prologue_locator = epilogue_locator = 0;
250
251  block_locators_locs = VEC_alloc (int, heap, 32);
252  block_locators_blocks = VEC_alloc (tree, gc, 32);
253  locations_locators_locs = VEC_alloc (int, heap, 32);
254  locations_locators_vals = VEC_alloc (location_t, heap, 32);
255
256  last_location = -1;
257  curr_location = -1;
258  curr_block = NULL;
259  last_block = NULL;
260  curr_rtl_loc = 0;
261}
262
263/* At the end of emit stage, clear current location.  */
264void
265insn_locators_finalize (void)
266{
267  if (curr_rtl_loc >= 0)
268    epilogue_locator = curr_insn_locator ();
269  curr_rtl_loc = -1;
270}
271
272/* Allocate insn locator datastructure.  */
273void
274insn_locators_free (void)
275{
276  prologue_locator = epilogue_locator = 0;
277
278  VEC_free (int, heap, block_locators_locs);
279  VEC_free (tree,gc, block_locators_blocks);
280  VEC_free (int, heap, locations_locators_locs);
281  VEC_free (location_t, heap, locations_locators_vals);
282}
283
284
285/* Set current location.  */
286void
287set_curr_insn_source_location (location_t location)
288{
289  /* IV opts calls into RTL expansion to compute costs of operations.  At this
290     time locators are not initialized.  */
291  if (curr_rtl_loc == -1)
292    return;
293  curr_location = location;
294}
295
296/* Get current location.  */
297location_t
298get_curr_insn_source_location (void)
299{
300  return curr_location;
301}
302
303/* Set current scope block.  */
304void
305set_curr_insn_block (tree b)
306{
307  /* IV opts calls into RTL expansion to compute costs of operations.  At this
308     time locators are not initialized.  */
309  if (curr_rtl_loc == -1)
310    return;
311  if (b)
312    curr_block = b;
313}
314
315/* Get current scope block.  */
316tree
317get_curr_insn_block (void)
318{
319  return curr_block;
320}
321
322/* Return current insn locator.  */
323int
324curr_insn_locator (void)
325{
326  if (curr_rtl_loc == -1)
327    return 0;
328  if (last_block != curr_block)
329    {
330      curr_rtl_loc++;
331      VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
332      VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
333      last_block = curr_block;
334    }
335  if (last_location != curr_location)
336    {
337      curr_rtl_loc++;
338      VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
339      VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
340      last_location = curr_location;
341    }
342  return curr_rtl_loc;
343}
344
345static unsigned int
346into_cfg_layout_mode (void)
347{
348  cfg_layout_initialize (0);
349  return 0;
350}
351
352static unsigned int
353outof_cfg_layout_mode (void)
354{
355  basic_block bb;
356
357  FOR_EACH_BB (bb)
358    if (bb->next_bb != EXIT_BLOCK_PTR)
359      bb->aux = bb->next_bb;
360
361  cfg_layout_finalize ();
362
363  return 0;
364}
365
366struct rtl_opt_pass pass_into_cfg_layout_mode =
367{
368 {
369  RTL_PASS,
370  "into_cfglayout",                     /* name */
371  NULL,                                 /* gate */
372  into_cfg_layout_mode,                 /* execute */
373  NULL,                                 /* sub */
374  NULL,                                 /* next */
375  0,                                    /* static_pass_number */
376  TV_NONE,                              /* tv_id */
377  0,                                    /* properties_required */
378  PROP_cfglayout,                       /* properties_provided */
379  0,                                    /* properties_destroyed */
380  0,                                    /* todo_flags_start */
381  TODO_dump_func,                       /* todo_flags_finish */
382 }
383};
384
385struct rtl_opt_pass pass_outof_cfg_layout_mode =
386{
387 {
388  RTL_PASS,
389  "outof_cfglayout",                    /* name */
390  NULL,                                 /* gate */
391  outof_cfg_layout_mode,                /* execute */
392  NULL,                                 /* sub */
393  NULL,                                 /* next */
394  0,                                    /* static_pass_number */
395  TV_NONE,                              /* tv_id */
396  0,                                    /* properties_required */
397  0,                                    /* properties_provided */
398  PROP_cfglayout,                       /* properties_destroyed */
399  0,                                    /* todo_flags_start */
400  TODO_dump_func,                       /* todo_flags_finish */
401 }
402};
403
404/* Return scope resulting from combination of S1 and S2.  */
405static tree
406choose_inner_scope (tree s1, tree s2)
407{
408   if (!s1)
409     return s2;
410   if (!s2)
411     return s1;
412   if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
413     return s1;
414   return s2;
415}
416
417/* Emit lexical block notes needed to change scope from S1 to S2.  */
418
419static void
420change_scope (rtx orig_insn, tree s1, tree s2)
421{
422  rtx insn = orig_insn;
423  tree com = NULL_TREE;
424  tree ts1 = s1, ts2 = s2;
425  tree s;
426
427  while (ts1 != ts2)
428    {
429      gcc_assert (ts1 && ts2);
430      if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
431	ts1 = BLOCK_SUPERCONTEXT (ts1);
432      else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
433	ts2 = BLOCK_SUPERCONTEXT (ts2);
434      else
435	{
436	  ts1 = BLOCK_SUPERCONTEXT (ts1);
437	  ts2 = BLOCK_SUPERCONTEXT (ts2);
438	}
439    }
440  com = ts1;
441
442  /* Close scopes.  */
443  s = s1;
444  while (s != com)
445    {
446      rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
447      NOTE_BLOCK (note) = s;
448      s = BLOCK_SUPERCONTEXT (s);
449    }
450
451  /* Open scopes.  */
452  s = s2;
453  while (s != com)
454    {
455      insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
456      NOTE_BLOCK (insn) = s;
457      s = BLOCK_SUPERCONTEXT (s);
458    }
459}
460
461/* Return lexical scope block locator belongs to.  */
462static tree
463locator_scope (int loc)
464{
465  int max = VEC_length (int, block_locators_locs);
466  int min = 0;
467
468  /* When block_locators_locs was initialized, the pro- and epilogue
469     insns didn't exist yet and can therefore not be found this way.
470     But we know that they belong to the outer most block of the
471     current function.
472     Without this test, the prologue would be put inside the block of
473     the first valid instruction in the function and when that first
474     insn is part of an inlined function then the low_pc of that
475     inlined function is messed up.  Likewise for the epilogue and
476     the last valid instruction.  */
477  if (loc == prologue_locator || loc == epilogue_locator)
478    return DECL_INITIAL (cfun->decl);
479
480  if (!max || !loc)
481    return NULL;
482  while (1)
483    {
484      int pos = (min + max) / 2;
485      int tmp = VEC_index (int, block_locators_locs, pos);
486
487      if (tmp <= loc && min != pos)
488	min = pos;
489      else if (tmp > loc && max != pos)
490	max = pos;
491      else
492	{
493	  min = pos;
494	  break;
495	}
496    }
497  return VEC_index (tree, block_locators_blocks, min);
498}
499
500/* Return lexical scope block insn belongs to.  */
501static tree
502insn_scope (const_rtx insn)
503{
504  return locator_scope (INSN_LOCATOR (insn));
505}
506
507/* Return line number of the statement specified by the locator.  */
508location_t
509locator_location (int loc)
510{
511  int max = VEC_length (int, locations_locators_locs);
512  int min = 0;
513
514  while (1)
515    {
516      int pos = (min + max) / 2;
517      int tmp = VEC_index (int, locations_locators_locs, pos);
518
519      if (tmp <= loc && min != pos)
520	min = pos;
521      else if (tmp > loc && max != pos)
522	max = pos;
523      else
524	{
525	  min = pos;
526	  break;
527	}
528    }
529  return *VEC_index (location_t, locations_locators_vals, min);
530}
531
532/* Return source line of the statement that produced this insn.  */
533int
534locator_line (int loc)
535{
536  expanded_location xloc;
537  if (!loc)
538    return 0;
539  else
540    xloc = expand_location (locator_location (loc));
541  return xloc.line;
542}
543
544/* Return line number of the statement that produced this insn.  */
545int
546insn_line (const_rtx insn)
547{
548  return locator_line (INSN_LOCATOR (insn));
549}
550
551/* Return source file of the statement specified by LOC.  */
552const char *
553locator_file (int loc)
554{
555  expanded_location xloc;
556  if (!loc)
557    return 0;
558  else
559    xloc = expand_location (locator_location (loc));
560  return xloc.file;
561}
562
563/* Return source file of the statement that produced this insn.  */
564const char *
565insn_file (const_rtx insn)
566{
567  return locator_file (INSN_LOCATOR (insn));
568}
569
570/* Return true if LOC1 and LOC2 locators have the same location and scope.  */
571bool
572locator_eq (int loc1, int loc2)
573{
574  if (loc1 == loc2)
575    return true;
576  if (locator_location (loc1) != locator_location (loc2))
577    return false;
578  return locator_scope (loc1) == locator_scope (loc2);
579}
580
581/* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
582   on the scope tree and the newly reordered instructions.  */
583
584void
585reemit_insn_block_notes (void)
586{
587  tree cur_block = DECL_INITIAL (cfun->decl);
588  rtx insn, note;
589
590  insn = get_insns ();
591  if (!active_insn_p (insn))
592    insn = next_active_insn (insn);
593  for (; insn; insn = next_active_insn (insn))
594    {
595      tree this_block;
596
597      /* Avoid putting scope notes between jump table and its label.  */
598      if (JUMP_TABLE_DATA_P (insn))
599	continue;
600
601      this_block = insn_scope (insn);
602      /* For sequences compute scope resulting from merging all scopes
603	 of instructions nested inside.  */
604      if (GET_CODE (PATTERN (insn)) == SEQUENCE)
605	{
606	  int i;
607	  rtx body = PATTERN (insn);
608
609	  this_block = NULL;
610	  for (i = 0; i < XVECLEN (body, 0); i++)
611	    this_block = choose_inner_scope (this_block,
612					 insn_scope (XVECEXP (body, 0, i)));
613	}
614      if (! this_block)
615	continue;
616
617      if (this_block != cur_block)
618	{
619	  change_scope (insn, cur_block, this_block);
620	  cur_block = this_block;
621	}
622    }
623
624  /* change_scope emits before the insn, not after.  */
625  note = emit_note (NOTE_INSN_DELETED);
626  change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
627  delete_insn (note);
628
629  reorder_blocks ();
630}
631
632
633/* Link the basic blocks in the correct order, compacting the basic
634   block queue while at it.  This also clears the visited flag on
635   all basic blocks.  If STAY_IN_CFGLAYOUT_MODE is false, this function
636   also clears the basic block header and footer fields.
637
638   This function is usually called after a pass (e.g. tracer) finishes
639   some transformations while in cfglayout mode.  The required sequence
640   of the basic blocks is in a linked list along the bb->aux field.
641   This functions re-links the basic block prev_bb and next_bb pointers
642   accordingly, and it compacts and renumbers the blocks.  */
643
644void
645relink_block_chain (bool stay_in_cfglayout_mode)
646{
647  basic_block bb, prev_bb;
648  int index;
649
650  /* Maybe dump the re-ordered sequence.  */
651  if (dump_file)
652    {
653      fprintf (dump_file, "Reordered sequence:\n");
654      for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
655	   bb;
656	   bb = (basic_block) bb->aux, index++)
657	{
658	  fprintf (dump_file, " %i ", index);
659	  if (get_bb_original (bb))
660	    fprintf (dump_file, "duplicate of %i ",
661		     get_bb_original (bb)->index);
662	  else if (forwarder_block_p (bb)
663		   && !LABEL_P (BB_HEAD (bb)))
664	    fprintf (dump_file, "compensation ");
665	  else
666	    fprintf (dump_file, "bb %i ", bb->index);
667	  fprintf (dump_file, " [%i]\n", bb->frequency);
668	}
669    }
670
671  /* Now reorder the blocks.  */
672  prev_bb = ENTRY_BLOCK_PTR;
673  bb = ENTRY_BLOCK_PTR->next_bb;
674  for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
675    {
676      bb->prev_bb = prev_bb;
677      prev_bb->next_bb = bb;
678    }
679  prev_bb->next_bb = EXIT_BLOCK_PTR;
680  EXIT_BLOCK_PTR->prev_bb = prev_bb;
681
682  /* Then, clean up the aux and visited fields.  */
683  FOR_ALL_BB (bb)
684    {
685      bb->aux = NULL;
686      bb->il.rtl->visited = 0;
687      if (!stay_in_cfglayout_mode)
688	bb->il.rtl->header = bb->il.rtl->footer = NULL;
689    }
690
691  /* Maybe reset the original copy tables, they are not valid anymore
692     when we renumber the basic blocks in compact_blocks.  If we are
693     are going out of cfglayout mode, don't re-allocate the tables.  */
694  free_original_copy_tables ();
695  if (stay_in_cfglayout_mode)
696    initialize_original_copy_tables ();
697
698  /* Finally, put basic_block_info in the new order.  */
699  compact_blocks ();
700}
701
702
703/* Given a reorder chain, rearrange the code to match.  */
704
705static void
706fixup_reorder_chain (void)
707{
708  basic_block bb;
709  rtx insn = NULL;
710
711  if (cfg_layout_function_header)
712    {
713      set_first_insn (cfg_layout_function_header);
714      insn = cfg_layout_function_header;
715      while (NEXT_INSN (insn))
716	insn = NEXT_INSN (insn);
717    }
718
719  /* First do the bulk reordering -- rechain the blocks without regard to
720     the needed changes to jumps and labels.  */
721
722  for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
723    {
724      if (bb->il.rtl->header)
725	{
726	  if (insn)
727	    NEXT_INSN (insn) = bb->il.rtl->header;
728	  else
729	    set_first_insn (bb->il.rtl->header);
730	  PREV_INSN (bb->il.rtl->header) = insn;
731	  insn = bb->il.rtl->header;
732	  while (NEXT_INSN (insn))
733	    insn = NEXT_INSN (insn);
734	}
735      if (insn)
736	NEXT_INSN (insn) = BB_HEAD (bb);
737      else
738	set_first_insn (BB_HEAD (bb));
739      PREV_INSN (BB_HEAD (bb)) = insn;
740      insn = BB_END (bb);
741      if (bb->il.rtl->footer)
742	{
743	  NEXT_INSN (insn) = bb->il.rtl->footer;
744	  PREV_INSN (bb->il.rtl->footer) = insn;
745	  while (NEXT_INSN (insn))
746	    insn = NEXT_INSN (insn);
747	}
748    }
749
750  NEXT_INSN (insn) = cfg_layout_function_footer;
751  if (cfg_layout_function_footer)
752    PREV_INSN (cfg_layout_function_footer) = insn;
753
754  while (NEXT_INSN (insn))
755    insn = NEXT_INSN (insn);
756
757  set_last_insn (insn);
758#ifdef ENABLE_CHECKING
759  verify_insn_chain ();
760#endif
761
762  /* Now add jumps and labels as needed to match the blocks new
763     outgoing edges.  */
764
765  for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
766    {
767      edge e_fall, e_taken, e;
768      rtx bb_end_insn;
769      basic_block nb;
770      edge_iterator ei;
771
772      if (EDGE_COUNT (bb->succs) == 0)
773	continue;
774
775      /* Find the old fallthru edge, and another non-EH edge for
776	 a taken jump.  */
777      e_taken = e_fall = NULL;
778
779      FOR_EACH_EDGE (e, ei, bb->succs)
780	if (e->flags & EDGE_FALLTHRU)
781	  e_fall = e;
782	else if (! (e->flags & EDGE_EH))
783	  e_taken = e;
784
785      bb_end_insn = BB_END (bb);
786      if (JUMP_P (bb_end_insn))
787	{
788	  if (any_condjump_p (bb_end_insn))
789	    {
790	      /* This might happen if the conditional jump has side
791		 effects and could therefore not be optimized away.
792		 Make the basic block to end with a barrier in order
793		 to prevent rtl_verify_flow_info from complaining.  */
794	      if (!e_fall)
795		{
796		  gcc_assert (!onlyjump_p (bb_end_insn)
797			      || returnjump_p (bb_end_insn));
798		  bb->il.rtl->footer = emit_barrier_after (bb_end_insn);
799		  continue;
800		}
801
802	      /* If the old fallthru is still next, nothing to do.  */
803	      if (bb->aux == e_fall->dest
804		  || e_fall->dest == EXIT_BLOCK_PTR)
805		continue;
806
807	      /* The degenerated case of conditional jump jumping to the next
808		 instruction can happen for jumps with side effects.  We need
809		 to construct a forwarder block and this will be done just
810		 fine by force_nonfallthru below.  */
811	      if (!e_taken)
812		;
813
814	      /* There is another special case: if *neither* block is next,
815		 such as happens at the very end of a function, then we'll
816		 need to add a new unconditional jump.  Choose the taken
817		 edge based on known or assumed probability.  */
818	      else if (bb->aux != e_taken->dest)
819		{
820		  rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
821
822		  if (note
823		      && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
824		      && invert_jump (bb_end_insn,
825				      (e_fall->dest == EXIT_BLOCK_PTR
826				       ? NULL_RTX
827				       : label_for_bb (e_fall->dest)), 0))
828		    {
829		      e_fall->flags &= ~EDGE_FALLTHRU;
830#ifdef ENABLE_CHECKING
831		      gcc_assert (could_fall_through
832				  (e_taken->src, e_taken->dest));
833#endif
834		      e_taken->flags |= EDGE_FALLTHRU;
835		      update_br_prob_note (bb);
836		      e = e_fall, e_fall = e_taken, e_taken = e;
837		    }
838		}
839
840	      /* If the "jumping" edge is a crossing edge, and the fall
841		 through edge is non-crossing, leave things as they are.  */
842	      else if ((e_taken->flags & EDGE_CROSSING)
843		       && !(e_fall->flags & EDGE_CROSSING))
844		continue;
845
846	      /* Otherwise we can try to invert the jump.  This will
847		 basically never fail, however, keep up the pretense.  */
848	      else if (invert_jump (bb_end_insn,
849				    (e_fall->dest == EXIT_BLOCK_PTR
850				     ? NULL_RTX
851				     : label_for_bb (e_fall->dest)), 0))
852		{
853		  e_fall->flags &= ~EDGE_FALLTHRU;
854#ifdef ENABLE_CHECKING
855		  gcc_assert (could_fall_through
856			      (e_taken->src, e_taken->dest));
857#endif
858		  e_taken->flags |= EDGE_FALLTHRU;
859		  update_br_prob_note (bb);
860		  continue;
861		}
862	    }
863	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
864	    {
865	      /* If the old fallthru is still next or if
866		 asm goto doesn't have a fallthru (e.g. when followed by
867		 __builtin_unreachable ()), nothing to do.  */
868	      if (! e_fall
869		  || bb->aux == e_fall->dest
870		  || e_fall->dest == EXIT_BLOCK_PTR)
871		continue;
872
873	      /* Otherwise we'll have to use the fallthru fixup below.  */
874	    }
875	  else
876	    {
877	      /* Otherwise we have some return, switch or computed
878		 jump.  In the 99% case, there should not have been a
879		 fallthru edge.  */
880	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
881	      continue;
882	    }
883	}
884      else
885	{
886	  /* No fallthru implies a noreturn function with EH edges, or
887	     something similarly bizarre.  In any case, we don't need to
888	     do anything.  */
889	  if (! e_fall)
890	    continue;
891
892	  /* If the fallthru block is still next, nothing to do.  */
893	  if (bb->aux == e_fall->dest)
894	    continue;
895
896	  /* A fallthru to exit block.  */
897	  if (e_fall->dest == EXIT_BLOCK_PTR)
898	    continue;
899	}
900
901      /* We got here if we need to add a new jump insn.  */
902      nb = force_nonfallthru (e_fall);
903      if (nb)
904	{
905	  nb->il.rtl->visited = 1;
906	  nb->aux = bb->aux;
907	  bb->aux = nb;
908	  /* Don't process this new block.  */
909	  bb = nb;
910
911	  /* Make sure new bb is tagged for correct section (same as
912	     fall-thru source, since you cannot fall-throu across
913	     section boundaries).  */
914	  BB_COPY_PARTITION (e_fall->src, single_pred (bb));
915	  if (flag_reorder_blocks_and_partition
916	      && targetm.have_named_sections
917	      && JUMP_P (BB_END (bb))
918	      && !any_condjump_p (BB_END (bb))
919	      && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
920	    add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
921	}
922    }
923
924  relink_block_chain (/*stay_in_cfglayout_mode=*/false);
925
926  /* Annoying special case - jump around dead jumptables left in the code.  */
927  FOR_EACH_BB (bb)
928    {
929      edge e;
930      edge_iterator ei;
931
932      FOR_EACH_EDGE (e, ei, bb->succs)
933	if (e->flags & EDGE_FALLTHRU)
934	  break;
935
936      if (e && !can_fallthru (e->src, e->dest))
937	force_nonfallthru (e);
938    }
939
940  /* Ensure goto_locus from edges has some instructions with that locus
941     in RTL.  */
942  if (!optimize)
943    FOR_EACH_BB (bb)
944      {
945        edge e;
946        edge_iterator ei;
947
948        FOR_EACH_EDGE (e, ei, bb->succs)
949	  if (e->goto_locus && !(e->flags & EDGE_ABNORMAL))
950	    {
951	      basic_block nb;
952	      rtx end;
953
954	      insn = BB_END (e->src);
955	      end = PREV_INSN (BB_HEAD (e->src));
956	      while (insn != end
957		     && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
958		insn = PREV_INSN (insn);
959	      if (insn != end
960		  && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
961		continue;
962	      if (simplejump_p (BB_END (e->src))
963		  && INSN_LOCATOR (BB_END (e->src)) == 0)
964		{
965		  INSN_LOCATOR (BB_END (e->src)) = e->goto_locus;
966		  continue;
967		}
968	      if (e->dest != EXIT_BLOCK_PTR)
969		{
970		  insn = BB_HEAD (e->dest);
971		  end = NEXT_INSN (BB_END (e->dest));
972		  while (insn != end && !INSN_P (insn))
973		    insn = NEXT_INSN (insn);
974		  if (insn != end && INSN_LOCATOR (insn)
975		      && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
976		    continue;
977		}
978	      nb = split_edge (e);
979	      if (!INSN_P (BB_END (nb)))
980		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
981						     nb);
982	      INSN_LOCATOR (BB_END (nb)) = e->goto_locus;
983	    }
984      }
985}
986
987/* Perform sanity checks on the insn chain.
988   1. Check that next/prev pointers are consistent in both the forward and
989      reverse direction.
990   2. Count insns in chain, going both directions, and check if equal.
991   3. Check that get_last_insn () returns the actual end of chain.  */
992
993void
994verify_insn_chain (void)
995{
996  rtx x, prevx, nextx;
997  int insn_cnt1, insn_cnt2;
998
999  for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
1000       x != 0;
1001       prevx = x, insn_cnt1++, x = NEXT_INSN (x))
1002    gcc_assert (PREV_INSN (x) == prevx);
1003
1004  gcc_assert (prevx == get_last_insn ());
1005
1006  for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
1007       x != 0;
1008       nextx = x, insn_cnt2++, x = PREV_INSN (x))
1009    gcc_assert (NEXT_INSN (x) == nextx);
1010
1011  gcc_assert (insn_cnt1 == insn_cnt2);
1012}
1013
1014/* If we have assembler epilogues, the block falling through to exit must
1015   be the last one in the reordered chain when we reach final.  Ensure
1016   that this condition is met.  */
1017static void
1018fixup_fallthru_exit_predecessor (void)
1019{
1020  edge e;
1021  edge_iterator ei;
1022  basic_block bb = NULL;
1023
1024  /* This transformation is not valid before reload, because we might
1025     separate a call from the instruction that copies the return
1026     value.  */
1027  gcc_assert (reload_completed);
1028
1029  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1030    if (e->flags & EDGE_FALLTHRU)
1031      bb = e->src;
1032
1033  if (bb && bb->aux)
1034    {
1035      basic_block c = ENTRY_BLOCK_PTR->next_bb;
1036
1037      /* If the very first block is the one with the fall-through exit
1038	 edge, we have to split that block.  */
1039      if (c == bb)
1040	{
1041	  bb = split_block (bb, NULL)->dest;
1042	  bb->aux = c->aux;
1043	  c->aux = bb;
1044	  bb->il.rtl->footer = c->il.rtl->footer;
1045	  c->il.rtl->footer = NULL;
1046	}
1047
1048      while (c->aux != bb)
1049	c = (basic_block) c->aux;
1050
1051      c->aux = bb->aux;
1052      while (c->aux)
1053	c = (basic_block) c->aux;
1054
1055      c->aux = bb;
1056      bb->aux = NULL;
1057    }
1058}
1059
1060/* In case there are more than one fallthru predecessors of exit, force that
1061   there is only one.  */
1062
1063static void
1064force_one_exit_fallthru (void)
1065{
1066  edge e, predecessor = NULL;
1067  bool more = false;
1068  edge_iterator ei;
1069  basic_block forwarder, bb;
1070
1071  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1072    if (e->flags & EDGE_FALLTHRU)
1073      {
1074	if (predecessor == NULL)
1075	  predecessor = e;
1076	else
1077	  {
1078	    more = true;
1079	    break;
1080	  }
1081      }
1082
1083  if (!more)
1084    return;
1085
1086  /* Exit has several fallthru predecessors.  Create a forwarder block for
1087     them.  */
1088  forwarder = split_edge (predecessor);
1089  for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
1090    {
1091      if (e->src == forwarder
1092	  || !(e->flags & EDGE_FALLTHRU))
1093	ei_next (&ei);
1094      else
1095	redirect_edge_and_branch_force (e, forwarder);
1096    }
1097
1098  /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1099     exit block.  */
1100  FOR_EACH_BB (bb)
1101    {
1102      if (bb->aux == NULL && bb != forwarder)
1103	{
1104	  bb->aux = forwarder;
1105	  break;
1106	}
1107    }
1108}
1109
1110/* Return true in case it is possible to duplicate the basic block BB.  */
1111
1112/* We do not want to declare the function in a header file, since it should
1113   only be used through the cfghooks interface, and we do not want to move
1114   it to cfgrtl.c since it would require also moving quite a lot of related
1115   code.  */
1116extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1117
1118bool
1119cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1120{
1121  /* Do not attempt to duplicate tablejumps, as we need to unshare
1122     the dispatch table.  This is difficult to do, as the instructions
1123     computing jump destination may be hoisted outside the basic block.  */
1124  if (tablejump_p (BB_END (bb), NULL, NULL))
1125    return false;
1126
1127  /* Do not duplicate blocks containing insns that can't be copied.  */
1128  if (targetm.cannot_copy_insn_p)
1129    {
1130      rtx insn = BB_HEAD (bb);
1131      while (1)
1132	{
1133	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1134	    return false;
1135	  if (insn == BB_END (bb))
1136	    break;
1137	  insn = NEXT_INSN (insn);
1138	}
1139    }
1140
1141  return true;
1142}
1143
1144rtx
1145duplicate_insn_chain (rtx from, rtx to)
1146{
1147  rtx insn, last, copy;
1148
1149  /* Avoid updating of boundaries of previous basic block.  The
1150     note will get removed from insn stream in fixup.  */
1151  last = emit_note (NOTE_INSN_DELETED);
1152
1153  /* Create copy at the end of INSN chain.  The chain will
1154     be reordered later.  */
1155  for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1156    {
1157      switch (GET_CODE (insn))
1158	{
1159	case DEBUG_INSN:
1160	case INSN:
1161	case CALL_INSN:
1162	case JUMP_INSN:
1163	  /* Avoid copying of dispatch tables.  We never duplicate
1164	     tablejumps, so this can hit only in case the table got
1165	     moved far from original jump.  */
1166	  if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1167	      || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1168	    {
1169	      /* Avoid copying following barrier as well if any
1170		 (and debug insns in between).  */
1171	      rtx next;
1172
1173	      for (next = NEXT_INSN (insn);
1174		   next != NEXT_INSN (to);
1175		   next = NEXT_INSN (next))
1176		if (!DEBUG_INSN_P (next))
1177		  break;
1178	      if (next != NEXT_INSN (to) && BARRIER_P (next))
1179		insn = next;
1180	      break;
1181	    }
1182	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
1183          maybe_copy_epilogue_insn (insn, copy);
1184	  break;
1185
1186	case CODE_LABEL:
1187	  break;
1188
1189	case BARRIER:
1190	  emit_barrier ();
1191	  break;
1192
1193	case NOTE:
1194	  switch (NOTE_KIND (insn))
1195	    {
1196	      /* In case prologue is empty and function contain label
1197		 in first BB, we may want to copy the block.  */
1198	    case NOTE_INSN_PROLOGUE_END:
1199
1200	    case NOTE_INSN_DELETED:
1201	    case NOTE_INSN_DELETED_LABEL:
1202	      /* No problem to strip these.  */
1203	    case NOTE_INSN_FUNCTION_BEG:
1204	      /* There is always just single entry to function.  */
1205	    case NOTE_INSN_BASIC_BLOCK:
1206	      break;
1207
1208	    case NOTE_INSN_EPILOGUE_BEG:
1209	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1210	      emit_note_copy (insn);
1211	      break;
1212
1213	    default:
1214	      /* All other notes should have already been eliminated.  */
1215	      gcc_unreachable ();
1216	    }
1217	  break;
1218	default:
1219	  gcc_unreachable ();
1220	}
1221    }
1222  insn = NEXT_INSN (last);
1223  delete_insn (last);
1224  return insn;
1225}
1226/* Create a duplicate of the basic block BB.  */
1227
1228/* We do not want to declare the function in a header file, since it should
1229   only be used through the cfghooks interface, and we do not want to move
1230   it to cfgrtl.c since it would require also moving quite a lot of related
1231   code.  */
1232extern basic_block cfg_layout_duplicate_bb (basic_block);
1233
1234basic_block
1235cfg_layout_duplicate_bb (basic_block bb)
1236{
1237  rtx insn;
1238  basic_block new_bb;
1239
1240  insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1241  new_bb = create_basic_block (insn,
1242			       insn ? get_last_insn () : NULL,
1243			       EXIT_BLOCK_PTR->prev_bb);
1244
1245  BB_COPY_PARTITION (new_bb, bb);
1246  if (bb->il.rtl->header)
1247    {
1248      insn = bb->il.rtl->header;
1249      while (NEXT_INSN (insn))
1250	insn = NEXT_INSN (insn);
1251      insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1252      if (insn)
1253	new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1254    }
1255
1256  if (bb->il.rtl->footer)
1257    {
1258      insn = bb->il.rtl->footer;
1259      while (NEXT_INSN (insn))
1260	insn = NEXT_INSN (insn);
1261      insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1262      if (insn)
1263	new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1264    }
1265
1266  return new_bb;
1267}
1268
1269
1270/* Main entry point to this module - initialize the datastructures for
1271   CFG layout changes.  It keeps LOOPS up-to-date if not null.
1272
1273   FLAGS is a set of additional flags to pass to cleanup_cfg().  */
1274
1275void
1276cfg_layout_initialize (unsigned int flags)
1277{
1278  rtx x;
1279  basic_block bb;
1280
1281  initialize_original_copy_tables ();
1282
1283  cfg_layout_rtl_register_cfg_hooks ();
1284
1285  record_effective_endpoints ();
1286
1287  /* Make sure that the targets of non local gotos are marked.  */
1288  for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1289    {
1290      bb = BLOCK_FOR_INSN (XEXP (x, 0));
1291      bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1292    }
1293
1294  cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1295}
1296
1297/* Splits superblocks.  */
1298void
1299break_superblocks (void)
1300{
1301  sbitmap superblocks;
1302  bool need = false;
1303  basic_block bb;
1304
1305  superblocks = sbitmap_alloc (last_basic_block);
1306  sbitmap_zero (superblocks);
1307
1308  FOR_EACH_BB (bb)
1309    if (bb->flags & BB_SUPERBLOCK)
1310      {
1311	bb->flags &= ~BB_SUPERBLOCK;
1312	SET_BIT (superblocks, bb->index);
1313	need = true;
1314      }
1315
1316  if (need)
1317    {
1318      rebuild_jump_labels (get_insns ());
1319      find_many_sub_basic_blocks (superblocks);
1320    }
1321
1322  free (superblocks);
1323}
1324
1325/* Finalize the changes: reorder insn list according to the sequence specified
1326   by aux pointers, enter compensation code, rebuild scope forest.  */
1327
1328void
1329cfg_layout_finalize (void)
1330{
1331#ifdef ENABLE_CHECKING
1332  verify_flow_info ();
1333#endif
1334  force_one_exit_fallthru ();
1335  rtl_register_cfg_hooks ();
1336  if (reload_completed
1337#ifdef HAVE_epilogue
1338      && !HAVE_epilogue
1339#endif
1340      )
1341    fixup_fallthru_exit_predecessor ();
1342  fixup_reorder_chain ();
1343
1344  rebuild_jump_labels (get_insns ());
1345  delete_dead_jumptables ();
1346
1347#ifdef ENABLE_CHECKING
1348  verify_insn_chain ();
1349  verify_flow_info ();
1350#endif
1351}
1352
1353/* Checks whether all N blocks in BBS array can be copied.  */
1354bool
1355can_copy_bbs_p (basic_block *bbs, unsigned n)
1356{
1357  unsigned i;
1358  edge e;
1359  int ret = true;
1360
1361  for (i = 0; i < n; i++)
1362    bbs[i]->flags |= BB_DUPLICATED;
1363
1364  for (i = 0; i < n; i++)
1365    {
1366      /* In case we should redirect abnormal edge during duplication, fail.  */
1367      edge_iterator ei;
1368      FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1369	if ((e->flags & EDGE_ABNORMAL)
1370	    && (e->dest->flags & BB_DUPLICATED))
1371	  {
1372	    ret = false;
1373	    goto end;
1374	  }
1375
1376      if (!can_duplicate_block_p (bbs[i]))
1377	{
1378	  ret = false;
1379	  break;
1380	}
1381    }
1382
1383end:
1384  for (i = 0; i < n; i++)
1385    bbs[i]->flags &= ~BB_DUPLICATED;
1386
1387  return ret;
1388}
1389
1390/* Duplicates N basic blocks stored in array BBS.  Newly created basic blocks
1391   are placed into array NEW_BBS in the same order.  Edges from basic blocks
1392   in BBS are also duplicated and copies of those of them
1393   that lead into BBS are redirected to appropriate newly created block.  The
1394   function assigns bbs into loops (copy of basic block bb is assigned to
1395   bb->loop_father->copy loop, so this must be set up correctly in advance)
1396   and updates dominators locally (LOOPS structure that contains the information
1397   about dominators is passed to enable this).
1398
1399   BASE is the superloop to that basic block belongs; if its header or latch
1400   is copied, we do not set the new blocks as header or latch.
1401
1402   Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1403   also in the same order.
1404
1405   Newly created basic blocks are put after the basic block AFTER in the
1406   instruction stream, and the order of the blocks in BBS array is preserved.  */
1407
1408void
1409copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1410	  edge *edges, unsigned num_edges, edge *new_edges,
1411	  struct loop *base, basic_block after)
1412{
1413  unsigned i, j;
1414  basic_block bb, new_bb, dom_bb;
1415  edge e;
1416
1417  /* Duplicate bbs, update dominators, assign bbs to loops.  */
1418  for (i = 0; i < n; i++)
1419    {
1420      /* Duplicate.  */
1421      bb = bbs[i];
1422      new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1423      after = new_bb;
1424      bb->flags |= BB_DUPLICATED;
1425      /* Possibly set loop header.  */
1426      if (bb->loop_father->header == bb && bb->loop_father != base)
1427	new_bb->loop_father->header = new_bb;
1428      /* Or latch.  */
1429      if (bb->loop_father->latch == bb && bb->loop_father != base)
1430	new_bb->loop_father->latch = new_bb;
1431    }
1432
1433  /* Set dominators.  */
1434  for (i = 0; i < n; i++)
1435    {
1436      bb = bbs[i];
1437      new_bb = new_bbs[i];
1438
1439      dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1440      if (dom_bb->flags & BB_DUPLICATED)
1441	{
1442	  dom_bb = get_bb_copy (dom_bb);
1443	  set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1444	}
1445    }
1446
1447  /* Redirect edges.  */
1448  for (j = 0; j < num_edges; j++)
1449    new_edges[j] = NULL;
1450  for (i = 0; i < n; i++)
1451    {
1452      edge_iterator ei;
1453      new_bb = new_bbs[i];
1454      bb = bbs[i];
1455
1456      FOR_EACH_EDGE (e, ei, new_bb->succs)
1457	{
1458	  for (j = 0; j < num_edges; j++)
1459	    if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1460	      new_edges[j] = e;
1461
1462	  if (!(e->dest->flags & BB_DUPLICATED))
1463	    continue;
1464	  redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1465	}
1466    }
1467
1468  /* Clear information about duplicates.  */
1469  for (i = 0; i < n; i++)
1470    bbs[i]->flags &= ~BB_DUPLICATED;
1471}
1472
1473#include "gt-cfglayout.h"
1474