cfganal.c revision 110611
1/* Control flow graph analysis code for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This file contains various simple utilities to analyze the CFG.  */
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "hard-reg-set.h"
27#include "basic-block.h"
28#include "insn-config.h"
29#include "recog.h"
30#include "toplev.h"
31#include "obstack.h"
32#include "tm_p.h"
33
34/* Store the data structures necessary for depth-first search.  */
35struct depth_first_search_dsS {
36  /* stack for backtracking during the algorithm */
37  basic_block *stack;
38
39  /* number of edges in the stack.  That is, positions 0, ..., sp-1
40     have edges.  */
41  unsigned int sp;
42
43  /* record of basic blocks already seen by depth-first search */
44  sbitmap visited_blocks;
45};
46typedef struct depth_first_search_dsS *depth_first_search_ds;
47
48static void flow_dfs_compute_reverse_init
49  PARAMS ((depth_first_search_ds));
50static void flow_dfs_compute_reverse_add_bb
51  PARAMS ((depth_first_search_ds, basic_block));
52static basic_block flow_dfs_compute_reverse_execute
53  PARAMS ((depth_first_search_ds));
54static void flow_dfs_compute_reverse_finish
55  PARAMS ((depth_first_search_ds));
56static void remove_fake_successors	PARAMS ((basic_block));
57static bool need_fake_edge_p		PARAMS ((rtx));
58static bool keep_with_call_p		PARAMS ((rtx));
59static bool flow_active_insn_p		PARAMS ((rtx));
60
61/* Like active_insn_p, except keep the return value clobber around
62   even after reload.  */
63
64static bool
65flow_active_insn_p (insn)
66     rtx insn;
67{
68  if (active_insn_p (insn))
69    return true;
70
71  /* A clobber of the function return value exists for buggy
72     programs that fail to return a value.  It's effect is to
73     keep the return value from being live across the entire
74     function.  If we allow it to be skipped, we introduce the
75     possibility for register livetime aborts.  */
76  if (GET_CODE (PATTERN (insn)) == CLOBBER
77      && GET_CODE (XEXP (PATTERN (insn), 0)) == REG
78      && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
79    return true;
80
81  return false;
82}
83
84/* Return true if the block has no effect and only forwards control flow to
85   its single destination.  */
86
87bool
88forwarder_block_p (bb)
89     basic_block bb;
90{
91  rtx insn;
92
93  if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
94      || !bb->succ || bb->succ->succ_next)
95    return false;
96
97  for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
98    if (INSN_P (insn) && flow_active_insn_p (insn))
99      return false;
100
101  return (!INSN_P (insn)
102	  || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
103	  || !flow_active_insn_p (insn));
104}
105
106/* Return nonzero if we can reach target from src by falling through.  */
107
108bool
109can_fallthru (src, target)
110     basic_block src, target;
111{
112  rtx insn = src->end;
113  rtx insn2 = target->head;
114
115  if (src->index + 1 == target->index && !active_insn_p (insn2))
116    insn2 = next_active_insn (insn2);
117
118  /* ??? Later we may add code to move jump tables offline.  */
119  return next_active_insn (insn) == insn2;
120}
121
122/* Mark the back edges in DFS traversal.
123   Return non-zero if a loop (natural or otherwise) is present.
124   Inspired by Depth_First_Search_PP described in:
125
126     Advanced Compiler Design and Implementation
127     Steven Muchnick
128     Morgan Kaufmann, 1997
129
130   and heavily borrowed from flow_depth_first_order_compute.  */
131
132bool
133mark_dfs_back_edges ()
134{
135  edge *stack;
136  int *pre;
137  int *post;
138  int sp;
139  int prenum = 1;
140  int postnum = 1;
141  sbitmap visited;
142  bool found = false;
143
144  /* Allocate the preorder and postorder number arrays.  */
145  pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
146  post = (int *) xcalloc (n_basic_blocks, sizeof (int));
147
148  /* Allocate stack for back-tracking up CFG.  */
149  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
150  sp = 0;
151
152  /* Allocate bitmap to track nodes that have been visited.  */
153  visited = sbitmap_alloc (n_basic_blocks);
154
155  /* None of the nodes in the CFG have been visited yet.  */
156  sbitmap_zero (visited);
157
158  /* Push the first edge on to the stack.  */
159  stack[sp++] = ENTRY_BLOCK_PTR->succ;
160
161  while (sp)
162    {
163      edge e;
164      basic_block src;
165      basic_block dest;
166
167      /* Look at the edge on the top of the stack.  */
168      e = stack[sp - 1];
169      src = e->src;
170      dest = e->dest;
171      e->flags &= ~EDGE_DFS_BACK;
172
173      /* Check if the edge destination has been visited yet.  */
174      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
175	{
176	  /* Mark that we have visited the destination.  */
177	  SET_BIT (visited, dest->index);
178
179	  pre[dest->index] = prenum++;
180	  if (dest->succ)
181	    {
182	      /* Since the DEST node has been visited for the first
183		 time, check its successors.  */
184	      stack[sp++] = dest->succ;
185	    }
186	  else
187	    post[dest->index] = postnum++;
188	}
189      else
190	{
191	  if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
192	      && pre[src->index] >= pre[dest->index]
193	      && post[dest->index] == 0)
194	    e->flags |= EDGE_DFS_BACK, found = true;
195
196	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
197	    post[src->index] = postnum++;
198
199	  if (e->succ_next)
200	    stack[sp - 1] = e->succ_next;
201	  else
202	    sp--;
203	}
204    }
205
206  free (pre);
207  free (post);
208  free (stack);
209  sbitmap_free (visited);
210
211  return found;
212}
213
214/* Return true if we need to add fake edge to exit.
215   Helper function for the flow_call_edges_add.  */
216
217static bool
218need_fake_edge_p (insn)
219     rtx insn;
220{
221  if (!INSN_P (insn))
222    return false;
223
224  if ((GET_CODE (insn) == CALL_INSN
225       && !SIBLING_CALL_P (insn)
226       && !find_reg_note (insn, REG_NORETURN, NULL)
227       && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
228       && !CONST_OR_PURE_CALL_P (insn)))
229    return true;
230
231  return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
232	   && MEM_VOLATILE_P (PATTERN (insn)))
233	  || (GET_CODE (PATTERN (insn)) == PARALLEL
234	      && asm_noperands (insn) != -1
235	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
236	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
237}
238
239/* Return true if INSN should be kept in the same block as a preceding call.
240   This is done for a single-set whose destination is a fixed register or
241   whose source is the function return value.  This is a helper function for
242   flow_call_edges_add.  */
243
244static bool
245keep_with_call_p (insn)
246     rtx insn;
247{
248  rtx set;
249
250  if (INSN_P (insn) && (set = single_set (insn)) != NULL)
251    {
252      if (GET_CODE (SET_DEST (set)) == REG
253	  && fixed_regs[REGNO (SET_DEST (set))]
254	  && general_operand (SET_SRC (set), VOIDmode))
255	return true;
256      if (GET_CODE (SET_SRC (set)) == REG
257	  && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
258	  && GET_CODE (SET_DEST (set)) == REG
259	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
260	return true;
261    }
262  return false;
263}
264
265/* Add fake edges to the function exit for any non constant and non noreturn
266   calls, volatile inline assembly in the bitmap of blocks specified by
267   BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
268   that were split.
269
270   The goal is to expose cases in which entering a basic block does not imply
271   that all subsequent instructions must be executed.  */
272
273int
274flow_call_edges_add (blocks)
275     sbitmap blocks;
276{
277  int i;
278  int blocks_split = 0;
279  int bb_num = 0;
280  basic_block *bbs;
281  bool check_last_block = false;
282
283  /* Map bb indices into basic block pointers since split_block
284     will renumber the basic blocks.  */
285
286  bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
287
288  if (! blocks)
289    {
290      for (i = 0; i < n_basic_blocks; i++)
291	bbs[bb_num++] = BASIC_BLOCK (i);
292
293      check_last_block = true;
294    }
295  else
296    EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
297			       {
298				 bbs[bb_num++] = BASIC_BLOCK (i);
299				 if (i == n_basic_blocks - 1)
300				   check_last_block = true;
301			       });
302
303  /* In the last basic block, before epilogue generation, there will be
304     a fallthru edge to EXIT.  Special care is required if the last insn
305     of the last basic block is a call because make_edge folds duplicate
306     edges, which would result in the fallthru edge also being marked
307     fake, which would result in the fallthru edge being removed by
308     remove_fake_edges, which would result in an invalid CFG.
309
310     Moreover, we can't elide the outgoing fake edge, since the block
311     profiler needs to take this into account in order to solve the minimal
312     spanning tree in the case that the call doesn't return.
313
314     Handle this by adding a dummy instruction in a new last basic block.  */
315  if (check_last_block)
316    {
317      basic_block bb = BASIC_BLOCK (n_basic_blocks - 1);
318      rtx insn = bb->end;
319
320      /* Back up past insns that must be kept in the same block as a call.  */
321      while (insn != bb->head
322	     && keep_with_call_p (insn))
323	insn = PREV_INSN (insn);
324
325      if (need_fake_edge_p (insn))
326	{
327	  edge e;
328
329	  for (e = bb->succ; e; e = e->succ_next)
330	    if (e->dest == EXIT_BLOCK_PTR)
331	      {
332		insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
333		commit_edge_insertions ();
334		break;
335	      }
336	}
337    }
338
339  /* Now add fake edges to the function exit for any non constant
340     calls since there is no way that we can determine if they will
341     return or not...  */
342
343  for (i = 0; i < bb_num; i++)
344    {
345      basic_block bb = bbs[i];
346      rtx insn;
347      rtx prev_insn;
348
349      for (insn = bb->end; ; insn = prev_insn)
350	{
351	  prev_insn = PREV_INSN (insn);
352	  if (need_fake_edge_p (insn))
353	    {
354	      edge e;
355	      rtx split_at_insn = insn;
356
357	      /* Don't split the block between a call and an insn that should
358	         remain in the same block as the call.  */
359	      if (GET_CODE (insn) == CALL_INSN)
360		while (split_at_insn != bb->end
361		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
362		  split_at_insn = NEXT_INSN (split_at_insn);
363
364	      /* The handling above of the final block before the epilogue
365	         should be enough to verify that there is no edge to the exit
366		 block in CFG already.  Calling make_edge in such case would
367		 cause us to mark that edge as fake and remove it later.  */
368
369#ifdef ENABLE_CHECKING
370	      if (split_at_insn == bb->end)
371		for (e = bb->succ; e; e = e->succ_next)
372		  if (e->dest == EXIT_BLOCK_PTR)
373		    abort ();
374#endif
375
376	      /* Note that the following may create a new basic block
377		 and renumber the existing basic blocks.  */
378	      e = split_block (bb, split_at_insn);
379	      if (e)
380		blocks_split++;
381
382	      make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
383	    }
384
385	  if (insn == bb->head)
386	    break;
387	}
388    }
389
390  if (blocks_split)
391    verify_flow_info ();
392
393  free (bbs);
394  return blocks_split;
395}
396
397/* Find unreachable blocks.  An unreachable block will have 0 in
398   the reachable bit in block->flags.  A non-zero value indicates the
399   block is reachable.  */
400
401void
402find_unreachable_blocks ()
403{
404  edge e;
405  int i, n;
406  basic_block *tos, *worklist;
407
408  n = n_basic_blocks;
409  tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
410
411  /* Clear all the reachability flags.  */
412
413  for (i = 0; i < n; ++i)
414    BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
415
416  /* Add our starting points to the worklist.  Almost always there will
417     be only one.  It isn't inconceivable that we might one day directly
418     support Fortran alternate entry points.  */
419
420  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
421    {
422      *tos++ = e->dest;
423
424      /* Mark the block reachable.  */
425      e->dest->flags |= BB_REACHABLE;
426    }
427
428  /* Iterate: find everything reachable from what we've already seen.  */
429
430  while (tos != worklist)
431    {
432      basic_block b = *--tos;
433
434      for (e = b->succ; e; e = e->succ_next)
435	if (!(e->dest->flags & BB_REACHABLE))
436	  {
437	    *tos++ = e->dest;
438	    e->dest->flags |= BB_REACHABLE;
439	  }
440    }
441
442  free (worklist);
443}
444
445/* Functions to access an edge list with a vector representation.
446   Enough data is kept such that given an index number, the
447   pred and succ that edge represents can be determined, or
448   given a pred and a succ, its index number can be returned.
449   This allows algorithms which consume a lot of memory to
450   represent the normally full matrix of edge (pred,succ) with a
451   single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
452   wasted space in the client code due to sparse flow graphs.  */
453
454/* This functions initializes the edge list. Basically the entire
455   flowgraph is processed, and all edges are assigned a number,
456   and the data structure is filled in.  */
457
458struct edge_list *
459create_edge_list ()
460{
461  struct edge_list *elist;
462  edge e;
463  int num_edges;
464  int x;
465  int block_count;
466
467  block_count = n_basic_blocks + 2;   /* Include the entry and exit blocks.  */
468
469  num_edges = 0;
470
471  /* Determine the number of edges in the flow graph by counting successor
472     edges on each basic block.  */
473  for (x = 0; x < n_basic_blocks; x++)
474    {
475      basic_block bb = BASIC_BLOCK (x);
476
477      for (e = bb->succ; e; e = e->succ_next)
478	num_edges++;
479    }
480
481  /* Don't forget successors of the entry block.  */
482  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
483    num_edges++;
484
485  elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
486  elist->num_blocks = block_count;
487  elist->num_edges = num_edges;
488  elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
489
490  num_edges = 0;
491
492  /* Follow successors of the entry block, and register these edges.  */
493  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
494    elist->index_to_edge[num_edges++] = e;
495
496  for (x = 0; x < n_basic_blocks; x++)
497    {
498      basic_block bb = BASIC_BLOCK (x);
499
500      /* Follow all successors of blocks, and register these edges.  */
501      for (e = bb->succ; e; e = e->succ_next)
502	elist->index_to_edge[num_edges++] = e;
503    }
504
505  return elist;
506}
507
508/* This function free's memory associated with an edge list.  */
509
510void
511free_edge_list (elist)
512     struct edge_list *elist;
513{
514  if (elist)
515    {
516      free (elist->index_to_edge);
517      free (elist);
518    }
519}
520
521/* This function provides debug output showing an edge list.  */
522
523void
524print_edge_list (f, elist)
525     FILE *f;
526     struct edge_list *elist;
527{
528  int x;
529
530  fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
531	   elist->num_blocks - 2, elist->num_edges);
532
533  for (x = 0; x < elist->num_edges; x++)
534    {
535      fprintf (f, " %-4d - edge(", x);
536      if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
537	fprintf (f, "entry,");
538      else
539	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
540
541      if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
542	fprintf (f, "exit)\n");
543      else
544	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
545    }
546}
547
548/* This function provides an internal consistency check of an edge list,
549   verifying that all edges are present, and that there are no
550   extra edges.  */
551
552void
553verify_edge_list (f, elist)
554     FILE *f;
555     struct edge_list *elist;
556{
557  int x, pred, succ, index;
558  edge e;
559
560  for (x = 0; x < n_basic_blocks; x++)
561    {
562      basic_block bb = BASIC_BLOCK (x);
563
564      for (e = bb->succ; e; e = e->succ_next)
565	{
566	  pred = e->src->index;
567	  succ = e->dest->index;
568	  index = EDGE_INDEX (elist, e->src, e->dest);
569	  if (index == EDGE_INDEX_NO_EDGE)
570	    {
571	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
572	      continue;
573	    }
574
575	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
576	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
577		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
578	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
579	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
580		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
581	}
582    }
583
584  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
585    {
586      pred = e->src->index;
587      succ = e->dest->index;
588      index = EDGE_INDEX (elist, e->src, e->dest);
589      if (index == EDGE_INDEX_NO_EDGE)
590	{
591	  fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
592	  continue;
593	}
594
595      if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
596	fprintf (f, "*p* Pred for index %d should be %d not %d\n",
597		 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
598      if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
599	fprintf (f, "*p* Succ for index %d should be %d not %d\n",
600		 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
601    }
602
603  /* We've verified that all the edges are in the list, no lets make sure
604     there are no spurious edges in the list.  */
605
606  for (pred = 0; pred < n_basic_blocks; pred++)
607    for (succ = 0; succ < n_basic_blocks; succ++)
608      {
609	basic_block p = BASIC_BLOCK (pred);
610	basic_block s = BASIC_BLOCK (succ);
611	int found_edge = 0;
612
613	for (e = p->succ; e; e = e->succ_next)
614	  if (e->dest == s)
615	    {
616	      found_edge = 1;
617	      break;
618	    }
619
620	for (e = s->pred; e; e = e->pred_next)
621	  if (e->src == p)
622	    {
623	      found_edge = 1;
624	      break;
625	    }
626
627	if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
628	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
629	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
630		   pred, succ);
631	if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
632	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
633	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
634		   pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
635					   BASIC_BLOCK (succ)));
636      }
637
638  for (succ = 0; succ < n_basic_blocks; succ++)
639    {
640      basic_block p = ENTRY_BLOCK_PTR;
641      basic_block s = BASIC_BLOCK (succ);
642      int found_edge = 0;
643
644      for (e = p->succ; e; e = e->succ_next)
645	if (e->dest == s)
646	  {
647	    found_edge = 1;
648	    break;
649	  }
650
651      for (e = s->pred; e; e = e->pred_next)
652	if (e->src == p)
653	  {
654	    found_edge = 1;
655	    break;
656	  }
657
658      if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
659	  == EDGE_INDEX_NO_EDGE && found_edge != 0)
660	fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
661		 succ);
662      if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
663	  != EDGE_INDEX_NO_EDGE && found_edge == 0)
664	fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
665		 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
666				   BASIC_BLOCK (succ)));
667    }
668
669  for (pred = 0; pred < n_basic_blocks; pred++)
670    {
671      basic_block p = BASIC_BLOCK (pred);
672      basic_block s = EXIT_BLOCK_PTR;
673      int found_edge = 0;
674
675      for (e = p->succ; e; e = e->succ_next)
676	if (e->dest == s)
677	  {
678	    found_edge = 1;
679	    break;
680	  }
681
682      for (e = s->pred; e; e = e->pred_next)
683	if (e->src == p)
684	  {
685	    found_edge = 1;
686	    break;
687	  }
688
689      if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
690	  == EDGE_INDEX_NO_EDGE && found_edge != 0)
691	fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
692		 pred);
693      if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
694	  != EDGE_INDEX_NO_EDGE && found_edge == 0)
695	fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
696		 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
697				   EXIT_BLOCK_PTR));
698    }
699}
700
701/* This routine will determine what, if any, edge there is between
702   a specified predecessor and successor.  */
703
704int
705find_edge_index (edge_list, pred, succ)
706     struct edge_list *edge_list;
707     basic_block pred, succ;
708{
709  int x;
710
711  for (x = 0; x < NUM_EDGES (edge_list); x++)
712    if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
713	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
714      return x;
715
716  return (EDGE_INDEX_NO_EDGE);
717}
718
719/* Dump the list of basic blocks in the bitmap NODES.  */
720
721void
722flow_nodes_print (str, nodes, file)
723     const char *str;
724     const sbitmap nodes;
725     FILE *file;
726{
727  int node;
728
729  if (! nodes)
730    return;
731
732  fprintf (file, "%s { ", str);
733  EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
734  fputs ("}\n", file);
735}
736
737/* Dump the list of edges in the array EDGE_LIST.  */
738
739void
740flow_edge_list_print (str, edge_list, num_edges, file)
741     const char *str;
742     const edge *edge_list;
743     int num_edges;
744     FILE *file;
745{
746  int i;
747
748  if (! edge_list)
749    return;
750
751  fprintf (file, "%s { ", str);
752  for (i = 0; i < num_edges; i++)
753    fprintf (file, "%d->%d ", edge_list[i]->src->index,
754	     edge_list[i]->dest->index);
755
756  fputs ("}\n", file);
757}
758
759
760/* This routine will remove any fake successor edges for a basic block.
761   When the edge is removed, it is also removed from whatever predecessor
762   list it is in.  */
763
764static void
765remove_fake_successors (bb)
766     basic_block bb;
767{
768  edge e;
769
770  for (e = bb->succ; e;)
771    {
772      edge tmp = e;
773
774      e = e->succ_next;
775      if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
776	remove_edge (tmp);
777    }
778}
779
780/* This routine will remove all fake edges from the flow graph.  If
781   we remove all fake successors, it will automatically remove all
782   fake predecessors.  */
783
784void
785remove_fake_edges ()
786{
787  int x;
788
789  for (x = 0; x < n_basic_blocks; x++)
790    remove_fake_successors (BASIC_BLOCK (x));
791
792  /* We've handled all successors except the entry block's.  */
793  remove_fake_successors (ENTRY_BLOCK_PTR);
794}
795
796/* This function will add a fake edge between any block which has no
797   successors, and the exit block. Some data flow equations require these
798   edges to exist.  */
799
800void
801add_noreturn_fake_exit_edges ()
802{
803  int x;
804
805  for (x = 0; x < n_basic_blocks; x++)
806    if (BASIC_BLOCK (x)->succ == NULL)
807      make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
808}
809
810/* This function adds a fake edge between any infinite loops to the
811   exit block.  Some optimizations require a path from each node to
812   the exit node.
813
814   See also Morgan, Figure 3.10, pp. 82-83.
815
816   The current implementation is ugly, not attempting to minimize the
817   number of inserted fake edges.  To reduce the number of fake edges
818   to insert, add fake edges from _innermost_ loops containing only
819   nodes not reachable from the exit block.  */
820
821void
822connect_infinite_loops_to_exit ()
823{
824  basic_block unvisited_block;
825  struct depth_first_search_dsS dfs_ds;
826
827  /* Perform depth-first search in the reverse graph to find nodes
828     reachable from the exit block.  */
829  flow_dfs_compute_reverse_init (&dfs_ds);
830  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
831
832  /* Repeatedly add fake edges, updating the unreachable nodes.  */
833  while (1)
834    {
835      unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
836      if (!unvisited_block)
837	break;
838
839      make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
840      flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
841    }
842
843  flow_dfs_compute_reverse_finish (&dfs_ds);
844  return;
845}
846
847/* Compute reverse top sort order */
848
849void
850flow_reverse_top_sort_order_compute (rts_order)
851     int *rts_order;
852{
853  edge *stack;
854  int sp;
855  int postnum = 0;
856  sbitmap visited;
857
858  /* Allocate stack for back-tracking up CFG.  */
859  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
860  sp = 0;
861
862  /* Allocate bitmap to track nodes that have been visited.  */
863  visited = sbitmap_alloc (n_basic_blocks);
864
865  /* None of the nodes in the CFG have been visited yet.  */
866  sbitmap_zero (visited);
867
868  /* Push the first edge on to the stack.  */
869  stack[sp++] = ENTRY_BLOCK_PTR->succ;
870
871  while (sp)
872    {
873      edge e;
874      basic_block src;
875      basic_block dest;
876
877      /* Look at the edge on the top of the stack.  */
878      e = stack[sp - 1];
879      src = e->src;
880      dest = e->dest;
881
882      /* Check if the edge destination has been visited yet.  */
883      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
884	{
885	  /* Mark that we have visited the destination.  */
886	  SET_BIT (visited, dest->index);
887
888	  if (dest->succ)
889	    /* Since the DEST node has been visited for the first
890	       time, check its successors.  */
891	    stack[sp++] = dest->succ;
892	  else
893	    rts_order[postnum++] = dest->index;
894	}
895      else
896	{
897	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
898	   rts_order[postnum++] = src->index;
899
900	  if (e->succ_next)
901	    stack[sp - 1] = e->succ_next;
902	  else
903	    sp--;
904	}
905    }
906
907  free (stack);
908  sbitmap_free (visited);
909}
910
911/* Compute the depth first search order and store in the array
912  DFS_ORDER if non-zero, marking the nodes visited in VISITED.  If
913  RC_ORDER is non-zero, return the reverse completion number for each
914  node.  Returns the number of nodes visited.  A depth first search
915  tries to get as far away from the starting point as quickly as
916  possible.  */
917
918int
919flow_depth_first_order_compute (dfs_order, rc_order)
920     int *dfs_order;
921     int *rc_order;
922{
923  edge *stack;
924  int sp;
925  int dfsnum = 0;
926  int rcnum = n_basic_blocks - 1;
927  sbitmap visited;
928
929  /* Allocate stack for back-tracking up CFG.  */
930  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
931  sp = 0;
932
933  /* Allocate bitmap to track nodes that have been visited.  */
934  visited = sbitmap_alloc (n_basic_blocks);
935
936  /* None of the nodes in the CFG have been visited yet.  */
937  sbitmap_zero (visited);
938
939  /* Push the first edge on to the stack.  */
940  stack[sp++] = ENTRY_BLOCK_PTR->succ;
941
942  while (sp)
943    {
944      edge e;
945      basic_block src;
946      basic_block dest;
947
948      /* Look at the edge on the top of the stack.  */
949      e = stack[sp - 1];
950      src = e->src;
951      dest = e->dest;
952
953      /* Check if the edge destination has been visited yet.  */
954      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
955	{
956	  /* Mark that we have visited the destination.  */
957	  SET_BIT (visited, dest->index);
958
959	  if (dfs_order)
960	    dfs_order[dfsnum] = dest->index;
961
962	  dfsnum++;
963
964	  if (dest->succ)
965	    /* Since the DEST node has been visited for the first
966	       time, check its successors.  */
967	    stack[sp++] = dest->succ;
968	  else if (rc_order)
969	    /* There are no successors for the DEST node so assign
970	       its reverse completion number.  */
971	    rc_order[rcnum--] = dest->index;
972	}
973      else
974	{
975	  if (! e->succ_next && src != ENTRY_BLOCK_PTR
976	      && rc_order)
977	    /* There are no more successors for the SRC node
978	       so assign its reverse completion number.  */
979	    rc_order[rcnum--] = src->index;
980
981	  if (e->succ_next)
982	    stack[sp - 1] = e->succ_next;
983	  else
984	    sp--;
985	}
986    }
987
988  free (stack);
989  sbitmap_free (visited);
990
991  /* The number of nodes visited should not be greater than
992     n_basic_blocks.  */
993  if (dfsnum > n_basic_blocks)
994    abort ();
995
996  /* There are some nodes left in the CFG that are unreachable.  */
997  if (dfsnum < n_basic_blocks)
998    abort ();
999
1000  return dfsnum;
1001}
1002
1003struct dfst_node
1004{
1005    unsigned nnodes;
1006    struct dfst_node **node;
1007    struct dfst_node *up;
1008};
1009
1010/* Compute a preorder transversal ordering such that a sub-tree which
1011   is the source of a cross edge appears before the sub-tree which is
1012   the destination of the cross edge.  This allows for easy detection
1013   of all the entry blocks for a loop.
1014
1015   The ordering is compute by:
1016
1017     1) Generating a depth first spanning tree.
1018
1019     2) Walking the resulting tree from right to left.  */
1020
1021void
1022flow_preorder_transversal_compute (pot_order)
1023     int *pot_order;
1024{
1025  edge e;
1026  edge *stack;
1027  int i;
1028  int max_successors;
1029  int sp;
1030  sbitmap visited;
1031  struct dfst_node *node;
1032  struct dfst_node *dfst;
1033
1034  /* Allocate stack for back-tracking up CFG.  */
1035  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1036  sp = 0;
1037
1038  /* Allocate the tree.  */
1039  dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
1040				       sizeof (struct dfst_node));
1041
1042  for (i = 0; i < n_basic_blocks; i++)
1043    {
1044      max_successors = 0;
1045      for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
1046	max_successors++;
1047
1048      dfst[i].node
1049	= (max_successors
1050	   ? (struct dfst_node **) xcalloc (max_successors,
1051					    sizeof (struct dfst_node *))
1052	   : NULL);
1053    }
1054
1055  /* Allocate bitmap to track nodes that have been visited.  */
1056  visited = sbitmap_alloc (n_basic_blocks);
1057
1058  /* None of the nodes in the CFG have been visited yet.  */
1059  sbitmap_zero (visited);
1060
1061  /* Push the first edge on to the stack.  */
1062  stack[sp++] = ENTRY_BLOCK_PTR->succ;
1063
1064  while (sp)
1065    {
1066      basic_block src;
1067      basic_block dest;
1068
1069      /* Look at the edge on the top of the stack.  */
1070      e = stack[sp - 1];
1071      src = e->src;
1072      dest = e->dest;
1073
1074      /* Check if the edge destination has been visited yet.  */
1075      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1076	{
1077	  /* Mark that we have visited the destination.  */
1078	  SET_BIT (visited, dest->index);
1079
1080	  /* Add the destination to the preorder tree.  */
1081	  if (src != ENTRY_BLOCK_PTR)
1082	    {
1083	      dfst[src->index].node[dfst[src->index].nnodes++]
1084		= &dfst[dest->index];
1085	      dfst[dest->index].up = &dfst[src->index];
1086	    }
1087
1088	  if (dest->succ)
1089	    /* Since the DEST node has been visited for the first
1090	       time, check its successors.  */
1091	    stack[sp++] = dest->succ;
1092	}
1093
1094      else if (e->succ_next)
1095	stack[sp - 1] = e->succ_next;
1096      else
1097	sp--;
1098    }
1099
1100  free (stack);
1101  sbitmap_free (visited);
1102
1103  /* Record the preorder transversal order by
1104     walking the tree from right to left.  */
1105
1106  i = 0;
1107  node = &dfst[0];
1108  pot_order[i++] = 0;
1109
1110  while (node)
1111    {
1112      if (node->nnodes)
1113	{
1114	  node = node->node[--node->nnodes];
1115	  pot_order[i++] = node - dfst;
1116	}
1117      else
1118	node = node->up;
1119    }
1120
1121  /* Free the tree.  */
1122
1123  for (i = 0; i < n_basic_blocks; i++)
1124    if (dfst[i].node)
1125      free (dfst[i].node);
1126
1127  free (dfst);
1128}
1129
1130/* Compute the depth first search order on the _reverse_ graph and
1131   store in the array DFS_ORDER, marking the nodes visited in VISITED.
1132   Returns the number of nodes visited.
1133
1134   The computation is split into three pieces:
1135
1136   flow_dfs_compute_reverse_init () creates the necessary data
1137   structures.
1138
1139   flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1140   structures.  The block will start the search.
1141
1142   flow_dfs_compute_reverse_execute () continues (or starts) the
1143   search using the block on the top of the stack, stopping when the
1144   stack is empty.
1145
1146   flow_dfs_compute_reverse_finish () destroys the necessary data
1147   structures.
1148
1149   Thus, the user will probably call ..._init(), call ..._add_bb() to
1150   add a beginning basic block to the stack, call ..._execute(),
1151   possibly add another bb to the stack and again call ..._execute(),
1152   ..., and finally call _finish().  */
1153
1154/* Initialize the data structures used for depth-first search on the
1155   reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1156   added to the basic block stack.  DATA is the current depth-first
1157   search context.  If INITIALIZE_STACK is non-zero, there is an
1158   element on the stack.  */
1159
1160static void
1161flow_dfs_compute_reverse_init (data)
1162     depth_first_search_ds data;
1163{
1164  /* Allocate stack for back-tracking up CFG.  */
1165  data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1166					 * sizeof (basic_block));
1167  data->sp = 0;
1168
1169  /* Allocate bitmap to track nodes that have been visited.  */
1170  data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
1171
1172  /* None of the nodes in the CFG have been visited yet.  */
1173  sbitmap_zero (data->visited_blocks);
1174
1175  return;
1176}
1177
1178/* Add the specified basic block to the top of the dfs data
1179   structures.  When the search continues, it will start at the
1180   block.  */
1181
1182static void
1183flow_dfs_compute_reverse_add_bb (data, bb)
1184     depth_first_search_ds data;
1185     basic_block bb;
1186{
1187  data->stack[data->sp++] = bb;
1188  SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1189}
1190
1191/* Continue the depth-first search through the reverse graph starting with the
1192   block at the stack's top and ending when the stack is empty.  Visited nodes
1193   are marked.  Returns an unvisited basic block, or NULL if there is none
1194   available.  */
1195
1196static basic_block
1197flow_dfs_compute_reverse_execute (data)
1198     depth_first_search_ds data;
1199{
1200  basic_block bb;
1201  edge e;
1202  int i;
1203
1204  while (data->sp > 0)
1205    {
1206      bb = data->stack[--data->sp];
1207
1208      /* Perform depth-first search on adjacent vertices.  */
1209      for (e = bb->pred; e; e = e->pred_next)
1210	if (!TEST_BIT (data->visited_blocks,
1211		       e->src->index - (INVALID_BLOCK + 1)))
1212	  flow_dfs_compute_reverse_add_bb (data, e->src);
1213    }
1214
1215  /* Determine if there are unvisited basic blocks.  */
1216  for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
1217    if (!TEST_BIT (data->visited_blocks, i))
1218      return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1219
1220  return NULL;
1221}
1222
1223/* Destroy the data structures needed for depth-first search on the
1224   reverse graph.  */
1225
1226static void
1227flow_dfs_compute_reverse_finish (data)
1228     depth_first_search_ds data;
1229{
1230  free (data->stack);
1231  sbitmap_free (data->visited_blocks);
1232}
1233