cfganal.c revision 90075
1/* Control flow graph analysis code for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This file contains various simple utilities to analyze the CFG.  */
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "hard-reg-set.h"
27#include "basic-block.h"
28#include "toplev.h"
29
30#include "obstack.h"
31
32/* Store the data structures necessary for depth-first search.  */
33struct depth_first_search_dsS {
34  /* stack for backtracking during the algorithm */
35  basic_block *stack;
36
37  /* number of edges in the stack.  That is, positions 0, ..., sp-1
38     have edges.  */
39  unsigned int sp;
40
41  /* record of basic blocks already seen by depth-first search */
42  sbitmap visited_blocks;
43};
44typedef struct depth_first_search_dsS *depth_first_search_ds;
45
46static void flow_dfs_compute_reverse_init
47  PARAMS ((depth_first_search_ds));
48static void flow_dfs_compute_reverse_add_bb
49  PARAMS ((depth_first_search_ds, basic_block));
50static basic_block flow_dfs_compute_reverse_execute
51  PARAMS ((depth_first_search_ds));
52static void flow_dfs_compute_reverse_finish
53  PARAMS ((depth_first_search_ds));
54static void remove_fake_successors	PARAMS ((basic_block));
55static bool need_fake_edge_p		PARAMS ((rtx));
56
57/* Return true if the block has no effect and only forwards control flow to
58   its single destination.  */
59
60bool
61forwarder_block_p (bb)
62     basic_block bb;
63{
64  rtx insn;
65
66  if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
67      || !bb->succ || bb->succ->succ_next)
68    return false;
69
70  for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
71    if (INSN_P (insn) && active_insn_p (insn))
72      return false;
73
74  return (!INSN_P (insn)
75	  || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
76	  || !active_insn_p (insn));
77}
78
79/* Return nonzero if we can reach target from src by falling through.  */
80
81bool
82can_fallthru (src, target)
83     basic_block src, target;
84{
85  rtx insn = src->end;
86  rtx insn2 = target->head;
87
88  if (src->index + 1 == target->index && !active_insn_p (insn2))
89    insn2 = next_active_insn (insn2);
90
91  /* ??? Later we may add code to move jump tables offline.  */
92  return next_active_insn (insn) == insn2;
93}
94
95/* Mark the back edges in DFS traversal.
96   Return non-zero if a loop (natural or otherwise) is present.
97   Inspired by Depth_First_Search_PP described in:
98
99     Advanced Compiler Design and Implementation
100     Steven Muchnick
101     Morgan Kaufmann, 1997
102
103   and heavily borrowed from flow_depth_first_order_compute.  */
104
105bool
106mark_dfs_back_edges ()
107{
108  edge *stack;
109  int *pre;
110  int *post;
111  int sp;
112  int prenum = 1;
113  int postnum = 1;
114  sbitmap visited;
115  bool found = false;
116
117  /* Allocate the preorder and postorder number arrays.  */
118  pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
119  post = (int *) xcalloc (n_basic_blocks, sizeof (int));
120
121  /* Allocate stack for back-tracking up CFG.  */
122  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
123  sp = 0;
124
125  /* Allocate bitmap to track nodes that have been visited.  */
126  visited = sbitmap_alloc (n_basic_blocks);
127
128  /* None of the nodes in the CFG have been visited yet.  */
129  sbitmap_zero (visited);
130
131  /* Push the first edge on to the stack.  */
132  stack[sp++] = ENTRY_BLOCK_PTR->succ;
133
134  while (sp)
135    {
136      edge e;
137      basic_block src;
138      basic_block dest;
139
140      /* Look at the edge on the top of the stack.  */
141      e = stack[sp - 1];
142      src = e->src;
143      dest = e->dest;
144      e->flags &= ~EDGE_DFS_BACK;
145
146      /* Check if the edge destination has been visited yet.  */
147      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
148	{
149	  /* Mark that we have visited the destination.  */
150	  SET_BIT (visited, dest->index);
151
152	  pre[dest->index] = prenum++;
153	  if (dest->succ)
154	    {
155	      /* Since the DEST node has been visited for the first
156		 time, check its successors.  */
157	      stack[sp++] = dest->succ;
158	    }
159	  else
160	    post[dest->index] = postnum++;
161	}
162      else
163	{
164	  if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
165	      && pre[src->index] >= pre[dest->index]
166	      && post[dest->index] == 0)
167	    e->flags |= EDGE_DFS_BACK, found = true;
168
169	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
170	    post[src->index] = postnum++;
171
172	  if (e->succ_next)
173	    stack[sp - 1] = e->succ_next;
174	  else
175	    sp--;
176	}
177    }
178
179  free (pre);
180  free (post);
181  free (stack);
182  sbitmap_free (visited);
183
184  return found;
185}
186
187/* Return true if we need to add fake edge to exit.
188   Helper function for the flow_call_edges_add.  */
189
190static bool
191need_fake_edge_p (insn)
192     rtx insn;
193{
194  if (!INSN_P (insn))
195    return false;
196
197  if ((GET_CODE (insn) == CALL_INSN
198       && !SIBLING_CALL_P (insn)
199       && !find_reg_note (insn, REG_NORETURN, NULL)
200       && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
201       && !CONST_OR_PURE_CALL_P (insn)))
202    return true;
203
204  return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
205	   && MEM_VOLATILE_P (PATTERN (insn)))
206	  || (GET_CODE (PATTERN (insn)) == PARALLEL
207	      && asm_noperands (insn) != -1
208	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
209	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
210}
211
212/* Add fake edges to the function exit for any non constant and non noreturn
213   calls, volatile inline assembly in the bitmap of blocks specified by
214   BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
215   that were split.
216
217   The goal is to expose cases in which entering a basic block does not imply
218   that all subsequent instructions must be executed.  */
219
220int
221flow_call_edges_add (blocks)
222     sbitmap blocks;
223{
224  int i;
225  int blocks_split = 0;
226  int bb_num = 0;
227  basic_block *bbs;
228  bool check_last_block = false;
229
230  /* Map bb indices into basic block pointers since split_block
231     will renumber the basic blocks.  */
232
233  bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
234
235  if (! blocks)
236    {
237      for (i = 0; i < n_basic_blocks; i++)
238	bbs[bb_num++] = BASIC_BLOCK (i);
239
240      check_last_block = true;
241    }
242  else
243    EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
244			       {
245				 bbs[bb_num++] = BASIC_BLOCK (i);
246				 if (i == n_basic_blocks - 1)
247				   check_last_block = true;
248			       });
249
250  /* In the last basic block, before epilogue generation, there will be
251     a fallthru edge to EXIT.  Special care is required if the last insn
252     of the last basic block is a call because make_edge folds duplicate
253     edges, which would result in the fallthru edge also being marked
254     fake, which would result in the fallthru edge being removed by
255     remove_fake_edges, which would result in an invalid CFG.
256
257     Moreover, we can't elide the outgoing fake edge, since the block
258     profiler needs to take this into account in order to solve the minimal
259     spanning tree in the case that the call doesn't return.
260
261     Handle this by adding a dummy instruction in a new last basic block.  */
262  if (check_last_block
263      && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
264    {
265       edge e;
266
267       for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
268	 if (e->dest == EXIT_BLOCK_PTR)
269	    break;
270
271       insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
272       commit_edge_insertions ();
273    }
274
275  /* Now add fake edges to the function exit for any non constant
276     calls since there is no way that we can determine if they will
277     return or not...  */
278
279  for (i = 0; i < bb_num; i++)
280    {
281      basic_block bb = bbs[i];
282      rtx insn;
283      rtx prev_insn;
284
285      for (insn = bb->end; ; insn = prev_insn)
286	{
287	  prev_insn = PREV_INSN (insn);
288	  if (need_fake_edge_p (insn))
289	    {
290	      edge e;
291
292	      /* The above condition should be enough to verify that there is
293		 no edge to the exit block in CFG already.  Calling make_edge
294		 in such case would make us to mark that edge as fake and
295		 remove it later.  */
296
297#ifdef ENABLE_CHECKING
298	      if (insn == bb->end)
299		for (e = bb->succ; e; e = e->succ_next)
300		  if (e->dest == EXIT_BLOCK_PTR)
301		    abort ();
302#endif
303
304	      /* Note that the following may create a new basic block
305		 and renumber the existing basic blocks.  */
306	      e = split_block (bb, insn);
307	      if (e)
308		blocks_split++;
309
310	      make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
311	    }
312
313	  if (insn == bb->head)
314	    break;
315	}
316    }
317
318  if (blocks_split)
319    verify_flow_info ();
320
321  free (bbs);
322  return blocks_split;
323}
324
325/* Find unreachable blocks.  An unreachable block will have 0 in
326   the reachable bit in block->flags.  A non-zero value indicates the
327   block is reachable.  */
328
329void
330find_unreachable_blocks ()
331{
332  edge e;
333  int i, n;
334  basic_block *tos, *worklist;
335
336  n = n_basic_blocks;
337  tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
338
339  /* Clear all the reachability flags.  */
340
341  for (i = 0; i < n; ++i)
342    BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
343
344  /* Add our starting points to the worklist.  Almost always there will
345     be only one.  It isn't inconceivable that we might one day directly
346     support Fortran alternate entry points.  */
347
348  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
349    {
350      *tos++ = e->dest;
351
352      /* Mark the block reachable.  */
353      e->dest->flags |= BB_REACHABLE;
354    }
355
356  /* Iterate: find everything reachable from what we've already seen.  */
357
358  while (tos != worklist)
359    {
360      basic_block b = *--tos;
361
362      for (e = b->succ; e; e = e->succ_next)
363	if (!(e->dest->flags & BB_REACHABLE))
364	  {
365	    *tos++ = e->dest;
366	    e->dest->flags |= BB_REACHABLE;
367	  }
368    }
369
370  free (worklist);
371}
372
373/* Functions to access an edge list with a vector representation.
374   Enough data is kept such that given an index number, the
375   pred and succ that edge represents can be determined, or
376   given a pred and a succ, its index number can be returned.
377   This allows algorithms which consume a lot of memory to
378   represent the normally full matrix of edge (pred,succ) with a
379   single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
380   wasted space in the client code due to sparse flow graphs.  */
381
382/* This functions initializes the edge list. Basically the entire
383   flowgraph is processed, and all edges are assigned a number,
384   and the data structure is filled in.  */
385
386struct edge_list *
387create_edge_list ()
388{
389  struct edge_list *elist;
390  edge e;
391  int num_edges;
392  int x;
393  int block_count;
394
395  block_count = n_basic_blocks + 2;   /* Include the entry and exit blocks.  */
396
397  num_edges = 0;
398
399  /* Determine the number of edges in the flow graph by counting successor
400     edges on each basic block.  */
401  for (x = 0; x < n_basic_blocks; x++)
402    {
403      basic_block bb = BASIC_BLOCK (x);
404
405      for (e = bb->succ; e; e = e->succ_next)
406	num_edges++;
407    }
408
409  /* Don't forget successors of the entry block.  */
410  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
411    num_edges++;
412
413  elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
414  elist->num_blocks = block_count;
415  elist->num_edges = num_edges;
416  elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
417
418  num_edges = 0;
419
420  /* Follow successors of the entry block, and register these edges.  */
421  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
422    elist->index_to_edge[num_edges++] = e;
423
424  for (x = 0; x < n_basic_blocks; x++)
425    {
426      basic_block bb = BASIC_BLOCK (x);
427
428      /* Follow all successors of blocks, and register these edges.  */
429      for (e = bb->succ; e; e = e->succ_next)
430	elist->index_to_edge[num_edges++] = e;
431    }
432
433  return elist;
434}
435
436/* This function free's memory associated with an edge list.  */
437
438void
439free_edge_list (elist)
440     struct edge_list *elist;
441{
442  if (elist)
443    {
444      free (elist->index_to_edge);
445      free (elist);
446    }
447}
448
449/* This function provides debug output showing an edge list.  */
450
451void
452print_edge_list (f, elist)
453     FILE *f;
454     struct edge_list *elist;
455{
456  int x;
457
458  fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
459	   elist->num_blocks - 2, elist->num_edges);
460
461  for (x = 0; x < elist->num_edges; x++)
462    {
463      fprintf (f, " %-4d - edge(", x);
464      if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
465	fprintf (f, "entry,");
466      else
467	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
468
469      if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
470	fprintf (f, "exit)\n");
471      else
472	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
473    }
474}
475
476/* This function provides an internal consistency check of an edge list,
477   verifying that all edges are present, and that there are no
478   extra edges.  */
479
480void
481verify_edge_list (f, elist)
482     FILE *f;
483     struct edge_list *elist;
484{
485  int x, pred, succ, index;
486  edge e;
487
488  for (x = 0; x < n_basic_blocks; x++)
489    {
490      basic_block bb = BASIC_BLOCK (x);
491
492      for (e = bb->succ; e; e = e->succ_next)
493	{
494	  pred = e->src->index;
495	  succ = e->dest->index;
496	  index = EDGE_INDEX (elist, e->src, e->dest);
497	  if (index == EDGE_INDEX_NO_EDGE)
498	    {
499	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
500	      continue;
501	    }
502
503	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
504	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
505		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
506	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
507	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
508		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
509	}
510    }
511
512  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
513    {
514      pred = e->src->index;
515      succ = e->dest->index;
516      index = EDGE_INDEX (elist, e->src, e->dest);
517      if (index == EDGE_INDEX_NO_EDGE)
518	{
519	  fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
520	  continue;
521	}
522
523      if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
524	fprintf (f, "*p* Pred for index %d should be %d not %d\n",
525		 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
526      if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
527	fprintf (f, "*p* Succ for index %d should be %d not %d\n",
528		 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
529    }
530
531  /* We've verified that all the edges are in the list, no lets make sure
532     there are no spurious edges in the list.  */
533
534  for (pred = 0; pred < n_basic_blocks; pred++)
535    for (succ = 0; succ < n_basic_blocks; succ++)
536      {
537	basic_block p = BASIC_BLOCK (pred);
538	basic_block s = BASIC_BLOCK (succ);
539	int found_edge = 0;
540
541	for (e = p->succ; e; e = e->succ_next)
542	  if (e->dest == s)
543	    {
544	      found_edge = 1;
545	      break;
546	    }
547
548	for (e = s->pred; e; e = e->pred_next)
549	  if (e->src == p)
550	    {
551	      found_edge = 1;
552	      break;
553	    }
554
555	if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
556	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
557	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
558		   pred, succ);
559	if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
560	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
561	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
562		   pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
563					   BASIC_BLOCK (succ)));
564      }
565
566  for (succ = 0; succ < n_basic_blocks; succ++)
567    {
568      basic_block p = ENTRY_BLOCK_PTR;
569      basic_block s = BASIC_BLOCK (succ);
570      int found_edge = 0;
571
572      for (e = p->succ; e; e = e->succ_next)
573	if (e->dest == s)
574	  {
575	    found_edge = 1;
576	    break;
577	  }
578
579      for (e = s->pred; e; e = e->pred_next)
580	if (e->src == p)
581	  {
582	    found_edge = 1;
583	    break;
584	  }
585
586      if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
587	  == EDGE_INDEX_NO_EDGE && found_edge != 0)
588	fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
589		 succ);
590      if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
591	  != EDGE_INDEX_NO_EDGE && found_edge == 0)
592	fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
593		 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
594				   BASIC_BLOCK (succ)));
595    }
596
597  for (pred = 0; pred < n_basic_blocks; pred++)
598    {
599      basic_block p = BASIC_BLOCK (pred);
600      basic_block s = EXIT_BLOCK_PTR;
601      int found_edge = 0;
602
603      for (e = p->succ; e; e = e->succ_next)
604	if (e->dest == s)
605	  {
606	    found_edge = 1;
607	    break;
608	  }
609
610      for (e = s->pred; e; e = e->pred_next)
611	if (e->src == p)
612	  {
613	    found_edge = 1;
614	    break;
615	  }
616
617      if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
618	  == EDGE_INDEX_NO_EDGE && found_edge != 0)
619	fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
620		 pred);
621      if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
622	  != EDGE_INDEX_NO_EDGE && found_edge == 0)
623	fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
624		 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
625				   EXIT_BLOCK_PTR));
626    }
627}
628
629/* This routine will determine what, if any, edge there is between
630   a specified predecessor and successor.  */
631
632int
633find_edge_index (edge_list, pred, succ)
634     struct edge_list *edge_list;
635     basic_block pred, succ;
636{
637  int x;
638
639  for (x = 0; x < NUM_EDGES (edge_list); x++)
640    if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
641	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
642      return x;
643
644  return (EDGE_INDEX_NO_EDGE);
645}
646
647/* Dump the list of basic blocks in the bitmap NODES.  */
648
649void
650flow_nodes_print (str, nodes, file)
651     const char *str;
652     const sbitmap nodes;
653     FILE *file;
654{
655  int node;
656
657  if (! nodes)
658    return;
659
660  fprintf (file, "%s { ", str);
661  EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
662  fputs ("}\n", file);
663}
664
665/* Dump the list of edges in the array EDGE_LIST.  */
666
667void
668flow_edge_list_print (str, edge_list, num_edges, file)
669     const char *str;
670     const edge *edge_list;
671     int num_edges;
672     FILE *file;
673{
674  int i;
675
676  if (! edge_list)
677    return;
678
679  fprintf (file, "%s { ", str);
680  for (i = 0; i < num_edges; i++)
681    fprintf (file, "%d->%d ", edge_list[i]->src->index,
682	     edge_list[i]->dest->index);
683
684  fputs ("}\n", file);
685}
686
687
688/* This routine will remove any fake successor edges for a basic block.
689   When the edge is removed, it is also removed from whatever predecessor
690   list it is in.  */
691
692static void
693remove_fake_successors (bb)
694     basic_block bb;
695{
696  edge e;
697
698  for (e = bb->succ; e;)
699    {
700      edge tmp = e;
701
702      e = e->succ_next;
703      if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
704	remove_edge (tmp);
705    }
706}
707
708/* This routine will remove all fake edges from the flow graph.  If
709   we remove all fake successors, it will automatically remove all
710   fake predecessors.  */
711
712void
713remove_fake_edges ()
714{
715  int x;
716
717  for (x = 0; x < n_basic_blocks; x++)
718    remove_fake_successors (BASIC_BLOCK (x));
719
720  /* We've handled all successors except the entry block's.  */
721  remove_fake_successors (ENTRY_BLOCK_PTR);
722}
723
724/* This function will add a fake edge between any block which has no
725   successors, and the exit block. Some data flow equations require these
726   edges to exist.  */
727
728void
729add_noreturn_fake_exit_edges ()
730{
731  int x;
732
733  for (x = 0; x < n_basic_blocks; x++)
734    if (BASIC_BLOCK (x)->succ == NULL)
735      make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
736}
737
738/* This function adds a fake edge between any infinite loops to the
739   exit block.  Some optimizations require a path from each node to
740   the exit node.
741
742   See also Morgan, Figure 3.10, pp. 82-83.
743
744   The current implementation is ugly, not attempting to minimize the
745   number of inserted fake edges.  To reduce the number of fake edges
746   to insert, add fake edges from _innermost_ loops containing only
747   nodes not reachable from the exit block.  */
748
749void
750connect_infinite_loops_to_exit ()
751{
752  basic_block unvisited_block;
753  struct depth_first_search_dsS dfs_ds;
754
755  /* Perform depth-first search in the reverse graph to find nodes
756     reachable from the exit block.  */
757  flow_dfs_compute_reverse_init (&dfs_ds);
758  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
759
760  /* Repeatedly add fake edges, updating the unreachable nodes.  */
761  while (1)
762    {
763      unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
764      if (!unvisited_block)
765	break;
766
767      make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
768      flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
769    }
770
771  flow_dfs_compute_reverse_finish (&dfs_ds);
772  return;
773}
774
775/* Compute reverse top sort order */
776
777void
778flow_reverse_top_sort_order_compute (rts_order)
779     int *rts_order;
780{
781  edge *stack;
782  int sp;
783  int postnum = 0;
784  sbitmap visited;
785
786  /* Allocate stack for back-tracking up CFG.  */
787  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
788  sp = 0;
789
790  /* Allocate bitmap to track nodes that have been visited.  */
791  visited = sbitmap_alloc (n_basic_blocks);
792
793  /* None of the nodes in the CFG have been visited yet.  */
794  sbitmap_zero (visited);
795
796  /* Push the first edge on to the stack.  */
797  stack[sp++] = ENTRY_BLOCK_PTR->succ;
798
799  while (sp)
800    {
801      edge e;
802      basic_block src;
803      basic_block dest;
804
805      /* Look at the edge on the top of the stack.  */
806      e = stack[sp - 1];
807      src = e->src;
808      dest = e->dest;
809
810      /* Check if the edge destination has been visited yet.  */
811      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
812	{
813	  /* Mark that we have visited the destination.  */
814	  SET_BIT (visited, dest->index);
815
816	  if (dest->succ)
817	    /* Since the DEST node has been visited for the first
818	       time, check its successors.  */
819	    stack[sp++] = dest->succ;
820	  else
821	    rts_order[postnum++] = dest->index;
822	}
823      else
824	{
825	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
826	   rts_order[postnum++] = src->index;
827
828	  if (e->succ_next)
829	    stack[sp - 1] = e->succ_next;
830	  else
831	    sp--;
832	}
833    }
834
835  free (stack);
836  sbitmap_free (visited);
837}
838
839/* Compute the depth first search order and store in the array
840  DFS_ORDER if non-zero, marking the nodes visited in VISITED.  If
841  RC_ORDER is non-zero, return the reverse completion number for each
842  node.  Returns the number of nodes visited.  A depth first search
843  tries to get as far away from the starting point as quickly as
844  possible.  */
845
846int
847flow_depth_first_order_compute (dfs_order, rc_order)
848     int *dfs_order;
849     int *rc_order;
850{
851  edge *stack;
852  int sp;
853  int dfsnum = 0;
854  int rcnum = n_basic_blocks - 1;
855  sbitmap visited;
856
857  /* Allocate stack for back-tracking up CFG.  */
858  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
859  sp = 0;
860
861  /* Allocate bitmap to track nodes that have been visited.  */
862  visited = sbitmap_alloc (n_basic_blocks);
863
864  /* None of the nodes in the CFG have been visited yet.  */
865  sbitmap_zero (visited);
866
867  /* Push the first edge on to the stack.  */
868  stack[sp++] = ENTRY_BLOCK_PTR->succ;
869
870  while (sp)
871    {
872      edge e;
873      basic_block src;
874      basic_block dest;
875
876      /* Look at the edge on the top of the stack.  */
877      e = stack[sp - 1];
878      src = e->src;
879      dest = e->dest;
880
881      /* Check if the edge destination has been visited yet.  */
882      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
883	{
884	  /* Mark that we have visited the destination.  */
885	  SET_BIT (visited, dest->index);
886
887	  if (dfs_order)
888	    dfs_order[dfsnum] = dest->index;
889
890	  dfsnum++;
891
892	  if (dest->succ)
893	    /* Since the DEST node has been visited for the first
894	       time, check its successors.  */
895	    stack[sp++] = dest->succ;
896	  else if (rc_order)
897	    /* There are no successors for the DEST node so assign
898	       its reverse completion number.  */
899	    rc_order[rcnum--] = dest->index;
900	}
901      else
902	{
903	  if (! e->succ_next && src != ENTRY_BLOCK_PTR
904	      && rc_order)
905	    /* There are no more successors for the SRC node
906	       so assign its reverse completion number.  */
907	    rc_order[rcnum--] = src->index;
908
909	  if (e->succ_next)
910	    stack[sp - 1] = e->succ_next;
911	  else
912	    sp--;
913	}
914    }
915
916  free (stack);
917  sbitmap_free (visited);
918
919  /* The number of nodes visited should not be greater than
920     n_basic_blocks.  */
921  if (dfsnum > n_basic_blocks)
922    abort ();
923
924  /* There are some nodes left in the CFG that are unreachable.  */
925  if (dfsnum < n_basic_blocks)
926    abort ();
927
928  return dfsnum;
929}
930
931struct dfst_node
932{
933    unsigned nnodes;
934    struct dfst_node **node;
935    struct dfst_node *up;
936};
937
938/* Compute a preorder transversal ordering such that a sub-tree which
939   is the source of a cross edge appears before the sub-tree which is
940   the destination of the cross edge.  This allows for easy detection
941   of all the entry blocks for a loop.
942
943   The ordering is compute by:
944
945     1) Generating a depth first spanning tree.
946
947     2) Walking the resulting tree from right to left.  */
948
949void
950flow_preorder_transversal_compute (pot_order)
951     int *pot_order;
952{
953  edge e;
954  edge *stack;
955  int i;
956  int max_successors;
957  int sp;
958  sbitmap visited;
959  struct dfst_node *node;
960  struct dfst_node *dfst;
961
962  /* Allocate stack for back-tracking up CFG.  */
963  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
964  sp = 0;
965
966  /* Allocate the tree.  */
967  dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
968				       sizeof (struct dfst_node));
969
970  for (i = 0; i < n_basic_blocks; i++)
971    {
972      max_successors = 0;
973      for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
974	max_successors++;
975
976      dfst[i].node
977	= (max_successors
978	   ? (struct dfst_node **) xcalloc (max_successors,
979					    sizeof (struct dfst_node *))
980	   : NULL);
981    }
982
983  /* Allocate bitmap to track nodes that have been visited.  */
984  visited = sbitmap_alloc (n_basic_blocks);
985
986  /* None of the nodes in the CFG have been visited yet.  */
987  sbitmap_zero (visited);
988
989  /* Push the first edge on to the stack.  */
990  stack[sp++] = ENTRY_BLOCK_PTR->succ;
991
992  while (sp)
993    {
994      basic_block src;
995      basic_block dest;
996
997      /* Look at the edge on the top of the stack.  */
998      e = stack[sp - 1];
999      src = e->src;
1000      dest = e->dest;
1001
1002      /* Check if the edge destination has been visited yet.  */
1003      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1004	{
1005	  /* Mark that we have visited the destination.  */
1006	  SET_BIT (visited, dest->index);
1007
1008	  /* Add the destination to the preorder tree.  */
1009	  if (src != ENTRY_BLOCK_PTR)
1010	    {
1011	      dfst[src->index].node[dfst[src->index].nnodes++]
1012		= &dfst[dest->index];
1013	      dfst[dest->index].up = &dfst[src->index];
1014	    }
1015
1016	  if (dest->succ)
1017	    /* Since the DEST node has been visited for the first
1018	       time, check its successors.  */
1019	    stack[sp++] = dest->succ;
1020	}
1021
1022      else if (e->succ_next)
1023	stack[sp - 1] = e->succ_next;
1024      else
1025	sp--;
1026    }
1027
1028  free (stack);
1029  sbitmap_free (visited);
1030
1031  /* Record the preorder transversal order by
1032     walking the tree from right to left.  */
1033
1034  i = 0;
1035  node = &dfst[0];
1036  pot_order[i++] = 0;
1037
1038  while (node)
1039    {
1040      if (node->nnodes)
1041	{
1042	  node = node->node[--node->nnodes];
1043	  pot_order[i++] = node - dfst;
1044	}
1045      else
1046	node = node->up;
1047    }
1048
1049  /* Free the tree.  */
1050
1051  for (i = 0; i < n_basic_blocks; i++)
1052    if (dfst[i].node)
1053      free (dfst[i].node);
1054
1055  free (dfst);
1056}
1057
1058/* Compute the depth first search order on the _reverse_ graph and
1059   store in the array DFS_ORDER, marking the nodes visited in VISITED.
1060   Returns the number of nodes visited.
1061
1062   The computation is split into three pieces:
1063
1064   flow_dfs_compute_reverse_init () creates the necessary data
1065   structures.
1066
1067   flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1068   structures.  The block will start the search.
1069
1070   flow_dfs_compute_reverse_execute () continues (or starts) the
1071   search using the block on the top of the stack, stopping when the
1072   stack is empty.
1073
1074   flow_dfs_compute_reverse_finish () destroys the necessary data
1075   structures.
1076
1077   Thus, the user will probably call ..._init(), call ..._add_bb() to
1078   add a beginning basic block to the stack, call ..._execute(),
1079   possibly add another bb to the stack and again call ..._execute(),
1080   ..., and finally call _finish().  */
1081
1082/* Initialize the data structures used for depth-first search on the
1083   reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1084   added to the basic block stack.  DATA is the current depth-first
1085   search context.  If INITIALIZE_STACK is non-zero, there is an
1086   element on the stack.  */
1087
1088static void
1089flow_dfs_compute_reverse_init (data)
1090     depth_first_search_ds data;
1091{
1092  /* Allocate stack for back-tracking up CFG.  */
1093  data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1094					 * sizeof (basic_block));
1095  data->sp = 0;
1096
1097  /* Allocate bitmap to track nodes that have been visited.  */
1098  data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
1099
1100  /* None of the nodes in the CFG have been visited yet.  */
1101  sbitmap_zero (data->visited_blocks);
1102
1103  return;
1104}
1105
1106/* Add the specified basic block to the top of the dfs data
1107   structures.  When the search continues, it will start at the
1108   block.  */
1109
1110static void
1111flow_dfs_compute_reverse_add_bb (data, bb)
1112     depth_first_search_ds data;
1113     basic_block bb;
1114{
1115  data->stack[data->sp++] = bb;
1116  SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1117}
1118
1119/* Continue the depth-first search through the reverse graph starting with the
1120   block at the stack's top and ending when the stack is empty.  Visited nodes
1121   are marked.  Returns an unvisited basic block, or NULL if there is none
1122   available.  */
1123
1124static basic_block
1125flow_dfs_compute_reverse_execute (data)
1126     depth_first_search_ds data;
1127{
1128  basic_block bb;
1129  edge e;
1130  int i;
1131
1132  while (data->sp > 0)
1133    {
1134      bb = data->stack[--data->sp];
1135
1136      /* Perform depth-first search on adjacent vertices.  */
1137      for (e = bb->pred; e; e = e->pred_next)
1138	if (!TEST_BIT (data->visited_blocks,
1139		       e->src->index - (INVALID_BLOCK + 1)))
1140	  flow_dfs_compute_reverse_add_bb (data, e->src);
1141    }
1142
1143  /* Determine if there are unvisited basic blocks.  */
1144  for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
1145    if (!TEST_BIT (data->visited_blocks, i))
1146      return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1147
1148  return NULL;
1149}
1150
1151/* Destroy the data structures needed for depth-first search on the
1152   reverse graph.  */
1153
1154static void
1155flow_dfs_compute_reverse_finish (data)
1156     depth_first_search_ds data;
1157{
1158  free (data->stack);
1159  sbitmap_free (data->visited_blocks);
1160}
1161