cfganal.c revision 107590
1/* Control flow graph analysis code for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This file contains various simple utilities to analyze the CFG.  */
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "hard-reg-set.h"
27#include "basic-block.h"
28#include "insn-config.h"
29#include "recog.h"
30#include "toplev.h"
31#include "obstack.h"
32#include "tm_p.h"
33
34/* Store the data structures necessary for depth-first search.  */
35struct depth_first_search_dsS {
36  /* stack for backtracking during the algorithm */
37  basic_block *stack;
38
39  /* number of edges in the stack.  That is, positions 0, ..., sp-1
40     have edges.  */
41  unsigned int sp;
42
43  /* record of basic blocks already seen by depth-first search */
44  sbitmap visited_blocks;
45};
46typedef struct depth_first_search_dsS *depth_first_search_ds;
47
48static void flow_dfs_compute_reverse_init
49  PARAMS ((depth_first_search_ds));
50static void flow_dfs_compute_reverse_add_bb
51  PARAMS ((depth_first_search_ds, basic_block));
52static basic_block flow_dfs_compute_reverse_execute
53  PARAMS ((depth_first_search_ds));
54static void flow_dfs_compute_reverse_finish
55  PARAMS ((depth_first_search_ds));
56static void remove_fake_successors	PARAMS ((basic_block));
57static bool need_fake_edge_p		PARAMS ((rtx));
58static bool keep_with_call_p		PARAMS ((rtx));
59static bool flow_active_insn_p		PARAMS ((rtx));
60
61/* Like active_insn_p, except keep the return value clobber around
62   even after reload.  */
63
64static bool
65flow_active_insn_p (insn)
66     rtx insn;
67{
68  if (active_insn_p (insn))
69    return true;
70
71  /* A clobber of the function return value exists for buggy
72     programs that fail to return a value.  It's effect is to
73     keep the return value from being live across the entire
74     function.  If we allow it to be skipped, we introduce the
75     possibility for register livetime aborts.  */
76  if (GET_CODE (PATTERN (insn)) == CLOBBER
77      && GET_CODE (XEXP (PATTERN (insn), 0)) == REG
78      && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
79    return true;
80
81  return false;
82}
83
84/* Return true if the block has no effect and only forwards control flow to
85   its single destination.  */
86
87bool
88forwarder_block_p (bb)
89     basic_block bb;
90{
91  rtx insn;
92
93  if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
94      || !bb->succ || bb->succ->succ_next)
95    return false;
96
97  for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
98    if (INSN_P (insn) && flow_active_insn_p (insn))
99      return false;
100
101  return (!INSN_P (insn)
102	  || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
103	  || !flow_active_insn_p (insn));
104}
105
106/* Return nonzero if we can reach target from src by falling through.  */
107
108bool
109can_fallthru (src, target)
110     basic_block src, target;
111{
112  rtx insn = src->end;
113  rtx insn2 = target->head;
114
115  if (src->index + 1 == target->index && !active_insn_p (insn2))
116    insn2 = next_active_insn (insn2);
117
118  /* ??? Later we may add code to move jump tables offline.  */
119  return next_active_insn (insn) == insn2;
120}
121
122/* Mark the back edges in DFS traversal.
123   Return non-zero if a loop (natural or otherwise) is present.
124   Inspired by Depth_First_Search_PP described in:
125
126     Advanced Compiler Design and Implementation
127     Steven Muchnick
128     Morgan Kaufmann, 1997
129
130   and heavily borrowed from flow_depth_first_order_compute.  */
131
132bool
133mark_dfs_back_edges ()
134{
135  edge *stack;
136  int *pre;
137  int *post;
138  int sp;
139  int prenum = 1;
140  int postnum = 1;
141  sbitmap visited;
142  bool found = false;
143
144  /* Allocate the preorder and postorder number arrays.  */
145  pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
146  post = (int *) xcalloc (n_basic_blocks, sizeof (int));
147
148  /* Allocate stack for back-tracking up CFG.  */
149  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
150  sp = 0;
151
152  /* Allocate bitmap to track nodes that have been visited.  */
153  visited = sbitmap_alloc (n_basic_blocks);
154
155  /* None of the nodes in the CFG have been visited yet.  */
156  sbitmap_zero (visited);
157
158  /* Push the first edge on to the stack.  */
159  stack[sp++] = ENTRY_BLOCK_PTR->succ;
160
161  while (sp)
162    {
163      edge e;
164      basic_block src;
165      basic_block dest;
166
167      /* Look at the edge on the top of the stack.  */
168      e = stack[sp - 1];
169      src = e->src;
170      dest = e->dest;
171      e->flags &= ~EDGE_DFS_BACK;
172
173      /* Check if the edge destination has been visited yet.  */
174      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
175	{
176	  /* Mark that we have visited the destination.  */
177	  SET_BIT (visited, dest->index);
178
179	  pre[dest->index] = prenum++;
180	  if (dest->succ)
181	    {
182	      /* Since the DEST node has been visited for the first
183		 time, check its successors.  */
184	      stack[sp++] = dest->succ;
185	    }
186	  else
187	    post[dest->index] = postnum++;
188	}
189      else
190	{
191	  if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
192	      && pre[src->index] >= pre[dest->index]
193	      && post[dest->index] == 0)
194	    e->flags |= EDGE_DFS_BACK, found = true;
195
196	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
197	    post[src->index] = postnum++;
198
199	  if (e->succ_next)
200	    stack[sp - 1] = e->succ_next;
201	  else
202	    sp--;
203	}
204    }
205
206  free (pre);
207  free (post);
208  free (stack);
209  sbitmap_free (visited);
210
211  return found;
212}
213
214/* Return true if we need to add fake edge to exit.
215   Helper function for the flow_call_edges_add.  */
216
217static bool
218need_fake_edge_p (insn)
219     rtx insn;
220{
221  if (!INSN_P (insn))
222    return false;
223
224  if ((GET_CODE (insn) == CALL_INSN
225       && !SIBLING_CALL_P (insn)
226       && !find_reg_note (insn, REG_NORETURN, NULL)
227       && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
228       && !CONST_OR_PURE_CALL_P (insn)))
229    return true;
230
231  return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
232	   && MEM_VOLATILE_P (PATTERN (insn)))
233	  || (GET_CODE (PATTERN (insn)) == PARALLEL
234	      && asm_noperands (insn) != -1
235	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
236	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
237}
238
239/* Return true if INSN should be kept in the same block as a preceding call.
240   This is done for a single-set whose destination is a fixed register or
241   whose source is the function return value.  This is a helper function for
242   flow_call_edges_add.  */
243
244static bool
245keep_with_call_p (insn)
246     rtx insn;
247{
248  rtx set;
249
250  if (INSN_P (insn) && (set = single_set (insn)) != NULL)
251    {
252      if (GET_CODE (SET_DEST (set)) == REG
253	  && fixed_regs[REGNO (SET_DEST (set))]
254	  && general_operand (SET_SRC (set), VOIDmode))
255	return true;
256      if (GET_CODE (SET_SRC (set)) == REG
257	  && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
258	  && GET_CODE (SET_DEST (set)) == REG
259	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
260	return true;
261    }
262  return false;
263}
264
265/* Add fake edges to the function exit for any non constant and non noreturn
266   calls, volatile inline assembly in the bitmap of blocks specified by
267   BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
268   that were split.
269
270   The goal is to expose cases in which entering a basic block does not imply
271   that all subsequent instructions must be executed.  */
272
273int
274flow_call_edges_add (blocks)
275     sbitmap blocks;
276{
277  int i;
278  int blocks_split = 0;
279  int bb_num = 0;
280  basic_block *bbs;
281  bool check_last_block = false;
282
283  /* Map bb indices into basic block pointers since split_block
284     will renumber the basic blocks.  */
285
286  bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
287
288  if (! blocks)
289    {
290      for (i = 0; i < n_basic_blocks; i++)
291	bbs[bb_num++] = BASIC_BLOCK (i);
292
293      check_last_block = true;
294    }
295  else
296    EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
297			       {
298				 bbs[bb_num++] = BASIC_BLOCK (i);
299				 if (i == n_basic_blocks - 1)
300				   check_last_block = true;
301			       });
302
303  /* In the last basic block, before epilogue generation, there will be
304     a fallthru edge to EXIT.  Special care is required if the last insn
305     of the last basic block is a call because make_edge folds duplicate
306     edges, which would result in the fallthru edge also being marked
307     fake, which would result in the fallthru edge being removed by
308     remove_fake_edges, which would result in an invalid CFG.
309
310     Moreover, we can't elide the outgoing fake edge, since the block
311     profiler needs to take this into account in order to solve the minimal
312     spanning tree in the case that the call doesn't return.
313
314     Handle this by adding a dummy instruction in a new last basic block.  */
315  if (check_last_block)
316    {
317      basic_block bb = BASIC_BLOCK (n_basic_blocks - 1);
318      rtx insn = bb->end;
319
320      /* Back up past insns that must be kept in the same block as a call.  */
321      while (insn != bb->head
322	     && keep_with_call_p (insn))
323	insn = PREV_INSN (insn);
324
325      if (need_fake_edge_p (insn))
326	{
327	  edge e;
328
329	  for (e = bb->succ; e; e = e->succ_next)
330	    if (e->dest == EXIT_BLOCK_PTR)
331	      break;
332
333	  insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
334	  commit_edge_insertions ();
335	}
336    }
337
338  /* Now add fake edges to the function exit for any non constant
339     calls since there is no way that we can determine if they will
340     return or not...  */
341
342  for (i = 0; i < bb_num; i++)
343    {
344      basic_block bb = bbs[i];
345      rtx insn;
346      rtx prev_insn;
347
348      for (insn = bb->end; ; insn = prev_insn)
349	{
350	  prev_insn = PREV_INSN (insn);
351	  if (need_fake_edge_p (insn))
352	    {
353	      edge e;
354	      rtx split_at_insn = insn;
355
356	      /* Don't split the block between a call and an insn that should
357	         remain in the same block as the call.  */
358	      if (GET_CODE (insn) == CALL_INSN)
359		while (split_at_insn != bb->end
360		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
361		  split_at_insn = NEXT_INSN (split_at_insn);
362
363	      /* The handling above of the final block before the epilogue
364	         should be enough to verify that there is no edge to the exit
365		 block in CFG already.  Calling make_edge in such case would
366		 cause us to mark that edge as fake and remove it later.  */
367
368#ifdef ENABLE_CHECKING
369	      if (split_at_insn == bb->end)
370		for (e = bb->succ; e; e = e->succ_next)
371		  if (e->dest == EXIT_BLOCK_PTR)
372		    abort ();
373#endif
374
375	      /* Note that the following may create a new basic block
376		 and renumber the existing basic blocks.  */
377	      e = split_block (bb, split_at_insn);
378	      if (e)
379		blocks_split++;
380
381	      make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
382	    }
383
384	  if (insn == bb->head)
385	    break;
386	}
387    }
388
389  if (blocks_split)
390    verify_flow_info ();
391
392  free (bbs);
393  return blocks_split;
394}
395
396/* Find unreachable blocks.  An unreachable block will have 0 in
397   the reachable bit in block->flags.  A non-zero value indicates the
398   block is reachable.  */
399
400void
401find_unreachable_blocks ()
402{
403  edge e;
404  int i, n;
405  basic_block *tos, *worklist;
406
407  n = n_basic_blocks;
408  tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
409
410  /* Clear all the reachability flags.  */
411
412  for (i = 0; i < n; ++i)
413    BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
414
415  /* Add our starting points to the worklist.  Almost always there will
416     be only one.  It isn't inconceivable that we might one day directly
417     support Fortran alternate entry points.  */
418
419  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
420    {
421      *tos++ = e->dest;
422
423      /* Mark the block reachable.  */
424      e->dest->flags |= BB_REACHABLE;
425    }
426
427  /* Iterate: find everything reachable from what we've already seen.  */
428
429  while (tos != worklist)
430    {
431      basic_block b = *--tos;
432
433      for (e = b->succ; e; e = e->succ_next)
434	if (!(e->dest->flags & BB_REACHABLE))
435	  {
436	    *tos++ = e->dest;
437	    e->dest->flags |= BB_REACHABLE;
438	  }
439    }
440
441  free (worklist);
442}
443
444/* Functions to access an edge list with a vector representation.
445   Enough data is kept such that given an index number, the
446   pred and succ that edge represents can be determined, or
447   given a pred and a succ, its index number can be returned.
448   This allows algorithms which consume a lot of memory to
449   represent the normally full matrix of edge (pred,succ) with a
450   single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
451   wasted space in the client code due to sparse flow graphs.  */
452
453/* This functions initializes the edge list. Basically the entire
454   flowgraph is processed, and all edges are assigned a number,
455   and the data structure is filled in.  */
456
457struct edge_list *
458create_edge_list ()
459{
460  struct edge_list *elist;
461  edge e;
462  int num_edges;
463  int x;
464  int block_count;
465
466  block_count = n_basic_blocks + 2;   /* Include the entry and exit blocks.  */
467
468  num_edges = 0;
469
470  /* Determine the number of edges in the flow graph by counting successor
471     edges on each basic block.  */
472  for (x = 0; x < n_basic_blocks; x++)
473    {
474      basic_block bb = BASIC_BLOCK (x);
475
476      for (e = bb->succ; e; e = e->succ_next)
477	num_edges++;
478    }
479
480  /* Don't forget successors of the entry block.  */
481  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
482    num_edges++;
483
484  elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
485  elist->num_blocks = block_count;
486  elist->num_edges = num_edges;
487  elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
488
489  num_edges = 0;
490
491  /* Follow successors of the entry block, and register these edges.  */
492  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
493    elist->index_to_edge[num_edges++] = e;
494
495  for (x = 0; x < n_basic_blocks; x++)
496    {
497      basic_block bb = BASIC_BLOCK (x);
498
499      /* Follow all successors of blocks, and register these edges.  */
500      for (e = bb->succ; e; e = e->succ_next)
501	elist->index_to_edge[num_edges++] = e;
502    }
503
504  return elist;
505}
506
507/* This function free's memory associated with an edge list.  */
508
509void
510free_edge_list (elist)
511     struct edge_list *elist;
512{
513  if (elist)
514    {
515      free (elist->index_to_edge);
516      free (elist);
517    }
518}
519
520/* This function provides debug output showing an edge list.  */
521
522void
523print_edge_list (f, elist)
524     FILE *f;
525     struct edge_list *elist;
526{
527  int x;
528
529  fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
530	   elist->num_blocks - 2, elist->num_edges);
531
532  for (x = 0; x < elist->num_edges; x++)
533    {
534      fprintf (f, " %-4d - edge(", x);
535      if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
536	fprintf (f, "entry,");
537      else
538	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
539
540      if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
541	fprintf (f, "exit)\n");
542      else
543	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
544    }
545}
546
547/* This function provides an internal consistency check of an edge list,
548   verifying that all edges are present, and that there are no
549   extra edges.  */
550
551void
552verify_edge_list (f, elist)
553     FILE *f;
554     struct edge_list *elist;
555{
556  int x, pred, succ, index;
557  edge e;
558
559  for (x = 0; x < n_basic_blocks; x++)
560    {
561      basic_block bb = BASIC_BLOCK (x);
562
563      for (e = bb->succ; e; e = e->succ_next)
564	{
565	  pred = e->src->index;
566	  succ = e->dest->index;
567	  index = EDGE_INDEX (elist, e->src, e->dest);
568	  if (index == EDGE_INDEX_NO_EDGE)
569	    {
570	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
571	      continue;
572	    }
573
574	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
575	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
576		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
577	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
578	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
579		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
580	}
581    }
582
583  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
584    {
585      pred = e->src->index;
586      succ = e->dest->index;
587      index = EDGE_INDEX (elist, e->src, e->dest);
588      if (index == EDGE_INDEX_NO_EDGE)
589	{
590	  fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
591	  continue;
592	}
593
594      if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
595	fprintf (f, "*p* Pred for index %d should be %d not %d\n",
596		 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
597      if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
598	fprintf (f, "*p* Succ for index %d should be %d not %d\n",
599		 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
600    }
601
602  /* We've verified that all the edges are in the list, no lets make sure
603     there are no spurious edges in the list.  */
604
605  for (pred = 0; pred < n_basic_blocks; pred++)
606    for (succ = 0; succ < n_basic_blocks; succ++)
607      {
608	basic_block p = BASIC_BLOCK (pred);
609	basic_block s = BASIC_BLOCK (succ);
610	int found_edge = 0;
611
612	for (e = p->succ; e; e = e->succ_next)
613	  if (e->dest == s)
614	    {
615	      found_edge = 1;
616	      break;
617	    }
618
619	for (e = s->pred; e; e = e->pred_next)
620	  if (e->src == p)
621	    {
622	      found_edge = 1;
623	      break;
624	    }
625
626	if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
627	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
628	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
629		   pred, succ);
630	if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
631	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
632	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
633		   pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
634					   BASIC_BLOCK (succ)));
635      }
636
637  for (succ = 0; succ < n_basic_blocks; succ++)
638    {
639      basic_block p = ENTRY_BLOCK_PTR;
640      basic_block s = BASIC_BLOCK (succ);
641      int found_edge = 0;
642
643      for (e = p->succ; e; e = e->succ_next)
644	if (e->dest == s)
645	  {
646	    found_edge = 1;
647	    break;
648	  }
649
650      for (e = s->pred; e; e = e->pred_next)
651	if (e->src == p)
652	  {
653	    found_edge = 1;
654	    break;
655	  }
656
657      if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
658	  == EDGE_INDEX_NO_EDGE && found_edge != 0)
659	fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
660		 succ);
661      if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
662	  != EDGE_INDEX_NO_EDGE && found_edge == 0)
663	fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
664		 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
665				   BASIC_BLOCK (succ)));
666    }
667
668  for (pred = 0; pred < n_basic_blocks; pred++)
669    {
670      basic_block p = BASIC_BLOCK (pred);
671      basic_block s = EXIT_BLOCK_PTR;
672      int found_edge = 0;
673
674      for (e = p->succ; e; e = e->succ_next)
675	if (e->dest == s)
676	  {
677	    found_edge = 1;
678	    break;
679	  }
680
681      for (e = s->pred; e; e = e->pred_next)
682	if (e->src == p)
683	  {
684	    found_edge = 1;
685	    break;
686	  }
687
688      if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
689	  == EDGE_INDEX_NO_EDGE && found_edge != 0)
690	fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
691		 pred);
692      if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
693	  != EDGE_INDEX_NO_EDGE && found_edge == 0)
694	fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
695		 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
696				   EXIT_BLOCK_PTR));
697    }
698}
699
700/* This routine will determine what, if any, edge there is between
701   a specified predecessor and successor.  */
702
703int
704find_edge_index (edge_list, pred, succ)
705     struct edge_list *edge_list;
706     basic_block pred, succ;
707{
708  int x;
709
710  for (x = 0; x < NUM_EDGES (edge_list); x++)
711    if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
712	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
713      return x;
714
715  return (EDGE_INDEX_NO_EDGE);
716}
717
718/* Dump the list of basic blocks in the bitmap NODES.  */
719
720void
721flow_nodes_print (str, nodes, file)
722     const char *str;
723     const sbitmap nodes;
724     FILE *file;
725{
726  int node;
727
728  if (! nodes)
729    return;
730
731  fprintf (file, "%s { ", str);
732  EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
733  fputs ("}\n", file);
734}
735
736/* Dump the list of edges in the array EDGE_LIST.  */
737
738void
739flow_edge_list_print (str, edge_list, num_edges, file)
740     const char *str;
741     const edge *edge_list;
742     int num_edges;
743     FILE *file;
744{
745  int i;
746
747  if (! edge_list)
748    return;
749
750  fprintf (file, "%s { ", str);
751  for (i = 0; i < num_edges; i++)
752    fprintf (file, "%d->%d ", edge_list[i]->src->index,
753	     edge_list[i]->dest->index);
754
755  fputs ("}\n", file);
756}
757
758
759/* This routine will remove any fake successor edges for a basic block.
760   When the edge is removed, it is also removed from whatever predecessor
761   list it is in.  */
762
763static void
764remove_fake_successors (bb)
765     basic_block bb;
766{
767  edge e;
768
769  for (e = bb->succ; e;)
770    {
771      edge tmp = e;
772
773      e = e->succ_next;
774      if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
775	remove_edge (tmp);
776    }
777}
778
779/* This routine will remove all fake edges from the flow graph.  If
780   we remove all fake successors, it will automatically remove all
781   fake predecessors.  */
782
783void
784remove_fake_edges ()
785{
786  int x;
787
788  for (x = 0; x < n_basic_blocks; x++)
789    remove_fake_successors (BASIC_BLOCK (x));
790
791  /* We've handled all successors except the entry block's.  */
792  remove_fake_successors (ENTRY_BLOCK_PTR);
793}
794
795/* This function will add a fake edge between any block which has no
796   successors, and the exit block. Some data flow equations require these
797   edges to exist.  */
798
799void
800add_noreturn_fake_exit_edges ()
801{
802  int x;
803
804  for (x = 0; x < n_basic_blocks; x++)
805    if (BASIC_BLOCK (x)->succ == NULL)
806      make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
807}
808
809/* This function adds a fake edge between any infinite loops to the
810   exit block.  Some optimizations require a path from each node to
811   the exit node.
812
813   See also Morgan, Figure 3.10, pp. 82-83.
814
815   The current implementation is ugly, not attempting to minimize the
816   number of inserted fake edges.  To reduce the number of fake edges
817   to insert, add fake edges from _innermost_ loops containing only
818   nodes not reachable from the exit block.  */
819
820void
821connect_infinite_loops_to_exit ()
822{
823  basic_block unvisited_block;
824  struct depth_first_search_dsS dfs_ds;
825
826  /* Perform depth-first search in the reverse graph to find nodes
827     reachable from the exit block.  */
828  flow_dfs_compute_reverse_init (&dfs_ds);
829  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
830
831  /* Repeatedly add fake edges, updating the unreachable nodes.  */
832  while (1)
833    {
834      unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
835      if (!unvisited_block)
836	break;
837
838      make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
839      flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
840    }
841
842  flow_dfs_compute_reverse_finish (&dfs_ds);
843  return;
844}
845
846/* Compute reverse top sort order */
847
848void
849flow_reverse_top_sort_order_compute (rts_order)
850     int *rts_order;
851{
852  edge *stack;
853  int sp;
854  int postnum = 0;
855  sbitmap visited;
856
857  /* Allocate stack for back-tracking up CFG.  */
858  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
859  sp = 0;
860
861  /* Allocate bitmap to track nodes that have been visited.  */
862  visited = sbitmap_alloc (n_basic_blocks);
863
864  /* None of the nodes in the CFG have been visited yet.  */
865  sbitmap_zero (visited);
866
867  /* Push the first edge on to the stack.  */
868  stack[sp++] = ENTRY_BLOCK_PTR->succ;
869
870  while (sp)
871    {
872      edge e;
873      basic_block src;
874      basic_block dest;
875
876      /* Look at the edge on the top of the stack.  */
877      e = stack[sp - 1];
878      src = e->src;
879      dest = e->dest;
880
881      /* Check if the edge destination has been visited yet.  */
882      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
883	{
884	  /* Mark that we have visited the destination.  */
885	  SET_BIT (visited, dest->index);
886
887	  if (dest->succ)
888	    /* Since the DEST node has been visited for the first
889	       time, check its successors.  */
890	    stack[sp++] = dest->succ;
891	  else
892	    rts_order[postnum++] = dest->index;
893	}
894      else
895	{
896	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
897	   rts_order[postnum++] = src->index;
898
899	  if (e->succ_next)
900	    stack[sp - 1] = e->succ_next;
901	  else
902	    sp--;
903	}
904    }
905
906  free (stack);
907  sbitmap_free (visited);
908}
909
910/* Compute the depth first search order and store in the array
911  DFS_ORDER if non-zero, marking the nodes visited in VISITED.  If
912  RC_ORDER is non-zero, return the reverse completion number for each
913  node.  Returns the number of nodes visited.  A depth first search
914  tries to get as far away from the starting point as quickly as
915  possible.  */
916
917int
918flow_depth_first_order_compute (dfs_order, rc_order)
919     int *dfs_order;
920     int *rc_order;
921{
922  edge *stack;
923  int sp;
924  int dfsnum = 0;
925  int rcnum = n_basic_blocks - 1;
926  sbitmap visited;
927
928  /* Allocate stack for back-tracking up CFG.  */
929  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
930  sp = 0;
931
932  /* Allocate bitmap to track nodes that have been visited.  */
933  visited = sbitmap_alloc (n_basic_blocks);
934
935  /* None of the nodes in the CFG have been visited yet.  */
936  sbitmap_zero (visited);
937
938  /* Push the first edge on to the stack.  */
939  stack[sp++] = ENTRY_BLOCK_PTR->succ;
940
941  while (sp)
942    {
943      edge e;
944      basic_block src;
945      basic_block dest;
946
947      /* Look at the edge on the top of the stack.  */
948      e = stack[sp - 1];
949      src = e->src;
950      dest = e->dest;
951
952      /* Check if the edge destination has been visited yet.  */
953      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
954	{
955	  /* Mark that we have visited the destination.  */
956	  SET_BIT (visited, dest->index);
957
958	  if (dfs_order)
959	    dfs_order[dfsnum] = dest->index;
960
961	  dfsnum++;
962
963	  if (dest->succ)
964	    /* Since the DEST node has been visited for the first
965	       time, check its successors.  */
966	    stack[sp++] = dest->succ;
967	  else if (rc_order)
968	    /* There are no successors for the DEST node so assign
969	       its reverse completion number.  */
970	    rc_order[rcnum--] = dest->index;
971	}
972      else
973	{
974	  if (! e->succ_next && src != ENTRY_BLOCK_PTR
975	      && rc_order)
976	    /* There are no more successors for the SRC node
977	       so assign its reverse completion number.  */
978	    rc_order[rcnum--] = src->index;
979
980	  if (e->succ_next)
981	    stack[sp - 1] = e->succ_next;
982	  else
983	    sp--;
984	}
985    }
986
987  free (stack);
988  sbitmap_free (visited);
989
990  /* The number of nodes visited should not be greater than
991     n_basic_blocks.  */
992  if (dfsnum > n_basic_blocks)
993    abort ();
994
995  /* There are some nodes left in the CFG that are unreachable.  */
996  if (dfsnum < n_basic_blocks)
997    abort ();
998
999  return dfsnum;
1000}
1001
1002struct dfst_node
1003{
1004    unsigned nnodes;
1005    struct dfst_node **node;
1006    struct dfst_node *up;
1007};
1008
1009/* Compute a preorder transversal ordering such that a sub-tree which
1010   is the source of a cross edge appears before the sub-tree which is
1011   the destination of the cross edge.  This allows for easy detection
1012   of all the entry blocks for a loop.
1013
1014   The ordering is compute by:
1015
1016     1) Generating a depth first spanning tree.
1017
1018     2) Walking the resulting tree from right to left.  */
1019
1020void
1021flow_preorder_transversal_compute (pot_order)
1022     int *pot_order;
1023{
1024  edge e;
1025  edge *stack;
1026  int i;
1027  int max_successors;
1028  int sp;
1029  sbitmap visited;
1030  struct dfst_node *node;
1031  struct dfst_node *dfst;
1032
1033  /* Allocate stack for back-tracking up CFG.  */
1034  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1035  sp = 0;
1036
1037  /* Allocate the tree.  */
1038  dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
1039				       sizeof (struct dfst_node));
1040
1041  for (i = 0; i < n_basic_blocks; i++)
1042    {
1043      max_successors = 0;
1044      for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
1045	max_successors++;
1046
1047      dfst[i].node
1048	= (max_successors
1049	   ? (struct dfst_node **) xcalloc (max_successors,
1050					    sizeof (struct dfst_node *))
1051	   : NULL);
1052    }
1053
1054  /* Allocate bitmap to track nodes that have been visited.  */
1055  visited = sbitmap_alloc (n_basic_blocks);
1056
1057  /* None of the nodes in the CFG have been visited yet.  */
1058  sbitmap_zero (visited);
1059
1060  /* Push the first edge on to the stack.  */
1061  stack[sp++] = ENTRY_BLOCK_PTR->succ;
1062
1063  while (sp)
1064    {
1065      basic_block src;
1066      basic_block dest;
1067
1068      /* Look at the edge on the top of the stack.  */
1069      e = stack[sp - 1];
1070      src = e->src;
1071      dest = e->dest;
1072
1073      /* Check if the edge destination has been visited yet.  */
1074      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1075	{
1076	  /* Mark that we have visited the destination.  */
1077	  SET_BIT (visited, dest->index);
1078
1079	  /* Add the destination to the preorder tree.  */
1080	  if (src != ENTRY_BLOCK_PTR)
1081	    {
1082	      dfst[src->index].node[dfst[src->index].nnodes++]
1083		= &dfst[dest->index];
1084	      dfst[dest->index].up = &dfst[src->index];
1085	    }
1086
1087	  if (dest->succ)
1088	    /* Since the DEST node has been visited for the first
1089	       time, check its successors.  */
1090	    stack[sp++] = dest->succ;
1091	}
1092
1093      else if (e->succ_next)
1094	stack[sp - 1] = e->succ_next;
1095      else
1096	sp--;
1097    }
1098
1099  free (stack);
1100  sbitmap_free (visited);
1101
1102  /* Record the preorder transversal order by
1103     walking the tree from right to left.  */
1104
1105  i = 0;
1106  node = &dfst[0];
1107  pot_order[i++] = 0;
1108
1109  while (node)
1110    {
1111      if (node->nnodes)
1112	{
1113	  node = node->node[--node->nnodes];
1114	  pot_order[i++] = node - dfst;
1115	}
1116      else
1117	node = node->up;
1118    }
1119
1120  /* Free the tree.  */
1121
1122  for (i = 0; i < n_basic_blocks; i++)
1123    if (dfst[i].node)
1124      free (dfst[i].node);
1125
1126  free (dfst);
1127}
1128
1129/* Compute the depth first search order on the _reverse_ graph and
1130   store in the array DFS_ORDER, marking the nodes visited in VISITED.
1131   Returns the number of nodes visited.
1132
1133   The computation is split into three pieces:
1134
1135   flow_dfs_compute_reverse_init () creates the necessary data
1136   structures.
1137
1138   flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1139   structures.  The block will start the search.
1140
1141   flow_dfs_compute_reverse_execute () continues (or starts) the
1142   search using the block on the top of the stack, stopping when the
1143   stack is empty.
1144
1145   flow_dfs_compute_reverse_finish () destroys the necessary data
1146   structures.
1147
1148   Thus, the user will probably call ..._init(), call ..._add_bb() to
1149   add a beginning basic block to the stack, call ..._execute(),
1150   possibly add another bb to the stack and again call ..._execute(),
1151   ..., and finally call _finish().  */
1152
1153/* Initialize the data structures used for depth-first search on the
1154   reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1155   added to the basic block stack.  DATA is the current depth-first
1156   search context.  If INITIALIZE_STACK is non-zero, there is an
1157   element on the stack.  */
1158
1159static void
1160flow_dfs_compute_reverse_init (data)
1161     depth_first_search_ds data;
1162{
1163  /* Allocate stack for back-tracking up CFG.  */
1164  data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1165					 * sizeof (basic_block));
1166  data->sp = 0;
1167
1168  /* Allocate bitmap to track nodes that have been visited.  */
1169  data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
1170
1171  /* None of the nodes in the CFG have been visited yet.  */
1172  sbitmap_zero (data->visited_blocks);
1173
1174  return;
1175}
1176
1177/* Add the specified basic block to the top of the dfs data
1178   structures.  When the search continues, it will start at the
1179   block.  */
1180
1181static void
1182flow_dfs_compute_reverse_add_bb (data, bb)
1183     depth_first_search_ds data;
1184     basic_block bb;
1185{
1186  data->stack[data->sp++] = bb;
1187  SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1188}
1189
1190/* Continue the depth-first search through the reverse graph starting with the
1191   block at the stack's top and ending when the stack is empty.  Visited nodes
1192   are marked.  Returns an unvisited basic block, or NULL if there is none
1193   available.  */
1194
1195static basic_block
1196flow_dfs_compute_reverse_execute (data)
1197     depth_first_search_ds data;
1198{
1199  basic_block bb;
1200  edge e;
1201  int i;
1202
1203  while (data->sp > 0)
1204    {
1205      bb = data->stack[--data->sp];
1206
1207      /* Perform depth-first search on adjacent vertices.  */
1208      for (e = bb->pred; e; e = e->pred_next)
1209	if (!TEST_BIT (data->visited_blocks,
1210		       e->src->index - (INVALID_BLOCK + 1)))
1211	  flow_dfs_compute_reverse_add_bb (data, e->src);
1212    }
1213
1214  /* Determine if there are unvisited basic blocks.  */
1215  for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
1216    if (!TEST_BIT (data->visited_blocks, i))
1217      return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1218
1219  return NULL;
1220}
1221
1222/* Destroy the data structures needed for depth-first search on the
1223   reverse graph.  */
1224
1225static void
1226flow_dfs_compute_reverse_finish (data)
1227     depth_first_search_ds data;
1228{
1229  free (data->stack);
1230  sbitmap_free (data->visited_blocks);
1231}
1232