cfganal.c revision 132718
1212420Sken/* Control flow graph analysis code for GNU compiler.
2212420Sken   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3212420Sken   1999, 2000, 2001, 2003 Free Software Foundation, Inc.
4212420Sken
5212420SkenThis file is part of GCC.
6212420Sken
7212420SkenGCC is free software; you can redistribute it and/or modify it under
8212420Skenthe terms of the GNU General Public License as published by the Free
9212420SkenSoftware Foundation; either version 2, or (at your option) any later
10212420Skenversion.
11212420Sken
12212420SkenGCC is distributed in the hope that it will be useful, but WITHOUT ANY
13212420SkenWARRANTY; without even the implied warranty of MERCHANTABILITY or
14212420SkenFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15212420Skenfor more details.
16212420Sken
17212420SkenYou should have received a copy of the GNU General Public License
18212420Skenalong with GCC; see the file COPYING.  If not, write to the Free
19212420SkenSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
20212420Sken02111-1307, USA.  */
21212420Sken
22212420Sken/* This file contains various simple utilities to analyze the CFG.  */
23212420Sken#include "config.h"
24212420Sken#include "system.h"
25212420Sken#include "coretypes.h"
26212420Sken#include "tm.h"
27212420Sken#include "rtl.h"
28212420Sken#include "hard-reg-set.h"
29212420Sken#include "basic-block.h"
30212420Sken#include "insn-config.h"
31212420Sken#include "recog.h"
32212420Sken#include "toplev.h"
33212420Sken#include "tm_p.h"
34212420Sken
35212420Sken/* Store the data structures necessary for depth-first search.  */
36212420Skenstruct depth_first_search_dsS {
37212420Sken  /* stack for backtracking during the algorithm */
38212420Sken  basic_block *stack;
39212420Sken
40212420Sken  /* number of edges in the stack.  That is, positions 0, ..., sp-1
41212420Sken     have edges.  */
42212420Sken  unsigned int sp;
43212420Sken
44212420Sken  /* record of basic blocks already seen by depth-first search */
45212420Sken  sbitmap visited_blocks;
46212420Sken};
47212420Skentypedef struct depth_first_search_dsS *depth_first_search_ds;
48212420Sken
49212420Skenstatic void flow_dfs_compute_reverse_init (depth_first_search_ds);
50212420Skenstatic void flow_dfs_compute_reverse_add_bb (depth_first_search_ds,
51212420Sken					     basic_block);
52212420Skenstatic basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds);
53212420Skenstatic void flow_dfs_compute_reverse_finish (depth_first_search_ds);
54212420Skenstatic void remove_fake_successors (basic_block);
55212420Skenstatic bool need_fake_edge_p (rtx);
56212420Skenstatic bool flow_active_insn_p (rtx);
57212420Sken
58212420Sken/* Like active_insn_p, except keep the return value clobber around
59212420Sken   even after reload.  */
60212420Sken
61212420Skenstatic bool
62212420Skenflow_active_insn_p (rtx insn)
63212420Sken{
64212420Sken  if (active_insn_p (insn))
65212420Sken    return true;
66212420Sken
67212420Sken  /* A clobber of the function return value exists for buggy
68212420Sken     programs that fail to return a value.  Its effect is to
69212420Sken     keep the return value from being live across the entire
70212420Sken     function.  If we allow it to be skipped, we introduce the
71212420Sken     possibility for register livetime aborts.  */
72212420Sken  if (GET_CODE (PATTERN (insn)) == CLOBBER
73212420Sken      && GET_CODE (XEXP (PATTERN (insn), 0)) == REG
74212420Sken      && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
75212420Sken    return true;
76212420Sken
77212420Sken  return false;
78212420Sken}
79212420Sken
80212420Sken/* Return true if the block has no effect and only forwards control flow to
81212420Sken   its single destination.  */
82212420Sken
83212420Skenbool
84212420Skenforwarder_block_p (basic_block bb)
85212420Sken{
86212420Sken  rtx insn;
87212420Sken
88212420Sken  if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
89212420Sken      || !bb->succ || bb->succ->succ_next)
90212420Sken    return false;
91212420Sken
92212420Sken  for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
93212420Sken    if (INSN_P (insn) && flow_active_insn_p (insn))
94212420Sken      return false;
95212420Sken
96212420Sken  return (!INSN_P (insn)
97212420Sken	  || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
98212420Sken	  || !flow_active_insn_p (insn));
99212420Sken}
100212420Sken
101212420Sken/* Return nonzero if we can reach target from src by falling through.  */
102212420Sken
103212420Skenbool
104212420Skencan_fallthru (basic_block src, basic_block target)
105212420Sken{
106212420Sken  rtx insn = BB_END (src);
107212420Sken  rtx insn2 = target == EXIT_BLOCK_PTR ? NULL : BB_HEAD (target);
108212420Sken
109212420Sken  if (src->next_bb != target)
110212420Sken    return 0;
111212420Sken
112212420Sken  if (insn2 && !active_insn_p (insn2))
113212420Sken    insn2 = next_active_insn (insn2);
114212420Sken
115212420Sken  /* ??? Later we may add code to move jump tables offline.  */
116212420Sken  return next_active_insn (insn) == insn2;
117212420Sken}
118212420Sken
119212420Sken/* Mark the back edges in DFS traversal.
120212420Sken   Return nonzero if a loop (natural or otherwise) is present.
121212420Sken   Inspired by Depth_First_Search_PP described in:
122212420Sken
123212420Sken     Advanced Compiler Design and Implementation
124212420Sken     Steven Muchnick
125212420Sken     Morgan Kaufmann, 1997
126212420Sken
127212420Sken   and heavily borrowed from flow_depth_first_order_compute.  */
128212420Sken
129212420Skenbool
130212420Skenmark_dfs_back_edges (void)
131212420Sken{
132212420Sken  edge *stack;
133212420Sken  int *pre;
134212420Sken  int *post;
135212420Sken  int sp;
136212420Sken  int prenum = 1;
137212420Sken  int postnum = 1;
138212420Sken  sbitmap visited;
139212420Sken  bool found = false;
140212420Sken
141212420Sken  /* Allocate the preorder and postorder number arrays.  */
142212420Sken  pre = xcalloc (last_basic_block, sizeof (int));
143212420Sken  post = xcalloc (last_basic_block, sizeof (int));
144212420Sken
145212420Sken  /* Allocate stack for back-tracking up CFG.  */
146212420Sken  stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
147212420Sken  sp = 0;
148212420Sken
149212420Sken  /* Allocate bitmap to track nodes that have been visited.  */
150212420Sken  visited = sbitmap_alloc (last_basic_block);
151212420Sken
152212420Sken  /* None of the nodes in the CFG have been visited yet.  */
153212420Sken  sbitmap_zero (visited);
154212420Sken
155212420Sken  /* Push the first edge on to the stack.  */
156212420Sken  stack[sp++] = ENTRY_BLOCK_PTR->succ;
157212420Sken
158212420Sken  while (sp)
159212420Sken    {
160212420Sken      edge e;
161212420Sken      basic_block src;
162212420Sken      basic_block dest;
163212420Sken
164212420Sken      /* Look at the edge on the top of the stack.  */
165212420Sken      e = stack[sp - 1];
166212420Sken      src = e->src;
167212420Sken      dest = e->dest;
168212420Sken      e->flags &= ~EDGE_DFS_BACK;
169212420Sken
170212420Sken      /* Check if the edge destination has been visited yet.  */
171212420Sken      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
172212420Sken	{
173212420Sken	  /* Mark that we have visited the destination.  */
174212420Sken	  SET_BIT (visited, dest->index);
175212420Sken
176212420Sken	  pre[dest->index] = prenum++;
177212420Sken	  if (dest->succ)
178212420Sken	    {
179212420Sken	      /* Since the DEST node has been visited for the first
180212420Sken		 time, check its successors.  */
181212420Sken	      stack[sp++] = dest->succ;
182212420Sken	    }
183212420Sken	  else
184212420Sken	    post[dest->index] = postnum++;
185212420Sken	}
186212420Sken      else
187212420Sken	{
188212420Sken	  if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
189212420Sken	      && pre[src->index] >= pre[dest->index]
190212420Sken	      && post[dest->index] == 0)
191212420Sken	    e->flags |= EDGE_DFS_BACK, found = true;
192212420Sken
193212420Sken	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
194212420Sken	    post[src->index] = postnum++;
195212420Sken
196212420Sken	  if (e->succ_next)
197212420Sken	    stack[sp - 1] = e->succ_next;
198212420Sken	  else
199212420Sken	    sp--;
200212420Sken	}
201212420Sken    }
202212420Sken
203212420Sken  free (pre);
204212420Sken  free (post);
205212420Sken  free (stack);
206212420Sken  sbitmap_free (visited);
207212420Sken
208212420Sken  return found;
209212420Sken}
210212420Sken
211212420Sken/* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru.  */
212212420Sken
213212420Skenvoid
214212420Skenset_edge_can_fallthru_flag (void)
215212420Sken{
216212420Sken  basic_block bb;
217212420Sken
218212420Sken  FOR_EACH_BB (bb)
219212420Sken    {
220212420Sken      edge e;
221212420Sken
222212420Sken      for (e = bb->succ; e; e = e->succ_next)
223212420Sken	{
224212420Sken	  e->flags &= ~EDGE_CAN_FALLTHRU;
225212420Sken
226212420Sken	  /* The FALLTHRU edge is also CAN_FALLTHRU edge.  */
227212420Sken	  if (e->flags & EDGE_FALLTHRU)
228212420Sken	    e->flags |= EDGE_CAN_FALLTHRU;
229212420Sken	}
230212420Sken
231212420Sken      /* If the BB ends with an invertible condjump all (2) edges are
232212420Sken	 CAN_FALLTHRU edges.  */
233212420Sken      if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
234212420Sken	continue;
235212420Sken      if (!any_condjump_p (BB_END (bb)))
236212420Sken	continue;
237212420Sken      if (!invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0))
238212420Sken	continue;
239212420Sken      invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0);
240212420Sken      bb->succ->flags |= EDGE_CAN_FALLTHRU;
241212420Sken      bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU;
242212420Sken    }
243212420Sken}
244212420Sken
245212420Sken/* Return true if we need to add fake edge to exit.
246212420Sken   Helper function for the flow_call_edges_add.  */
247212420Sken
248212420Skenstatic bool
249212420Skenneed_fake_edge_p (rtx insn)
250212420Sken{
251212420Sken  if (!INSN_P (insn))
252212420Sken    return false;
253212420Sken
254212420Sken  if ((GET_CODE (insn) == CALL_INSN
255212420Sken       && !SIBLING_CALL_P (insn)
256212420Sken       && !find_reg_note (insn, REG_NORETURN, NULL)
257212420Sken       && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
258212420Sken       && !CONST_OR_PURE_CALL_P (insn)))
259212420Sken    return true;
260212420Sken
261212420Sken  return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
262212420Sken	   && MEM_VOLATILE_P (PATTERN (insn)))
263212420Sken	  || (GET_CODE (PATTERN (insn)) == PARALLEL
264212420Sken	      && asm_noperands (insn) != -1
265212420Sken	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
266212420Sken	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
267212420Sken}
268212420Sken
269212420Sken/* Add fake edges to the function exit for any non constant and non noreturn
270212420Sken   calls, volatile inline assembly in the bitmap of blocks specified by
271212420Sken   BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
272212420Sken   that were split.
273212420Sken
274212420Sken   The goal is to expose cases in which entering a basic block does not imply
275212420Sken   that all subsequent instructions must be executed.  */
276212420Sken
277212420Skenint
278212420Skenflow_call_edges_add (sbitmap blocks)
279212420Sken{
280212420Sken  int i;
281212420Sken  int blocks_split = 0;
282212420Sken  int last_bb = last_basic_block;
283212420Sken  bool check_last_block = false;
284212420Sken
285212420Sken  if (n_basic_blocks == 0)
286    return 0;
287
288  if (! blocks)
289    check_last_block = true;
290  else
291    check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
292
293  /* In the last basic block, before epilogue generation, there will be
294     a fallthru edge to EXIT.  Special care is required if the last insn
295     of the last basic block is a call because make_edge folds duplicate
296     edges, which would result in the fallthru edge also being marked
297     fake, which would result in the fallthru edge being removed by
298     remove_fake_edges, which would result in an invalid CFG.
299
300     Moreover, we can't elide the outgoing fake edge, since the block
301     profiler needs to take this into account in order to solve the minimal
302     spanning tree in the case that the call doesn't return.
303
304     Handle this by adding a dummy instruction in a new last basic block.  */
305  if (check_last_block)
306    {
307      basic_block bb = EXIT_BLOCK_PTR->prev_bb;
308      rtx insn = BB_END (bb);
309
310      /* Back up past insns that must be kept in the same block as a call.  */
311      while (insn != BB_HEAD (bb)
312	     && keep_with_call_p (insn))
313	insn = PREV_INSN (insn);
314
315      if (need_fake_edge_p (insn))
316	{
317	  edge e;
318
319	  for (e = bb->succ; e; e = e->succ_next)
320	    if (e->dest == EXIT_BLOCK_PTR)
321	      {
322		insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
323		commit_edge_insertions ();
324		break;
325	      }
326	}
327    }
328
329  /* Now add fake edges to the function exit for any non constant
330     calls since there is no way that we can determine if they will
331     return or not...  */
332
333  for (i = 0; i < last_bb; i++)
334    {
335      basic_block bb = BASIC_BLOCK (i);
336      rtx libcall_end = NULL_RTX;
337      rtx insn;
338      rtx prev_insn;
339
340      if (!bb)
341	continue;
342
343      if (blocks && !TEST_BIT (blocks, i))
344	continue;
345
346      for (insn = BB_END (bb); ; insn = prev_insn)
347	{
348	  prev_insn = PREV_INSN (insn);
349	  if (need_fake_edge_p (insn))
350	    {
351	      edge e;
352	      rtx split_at_insn = insn;
353
354	      /* Don't split libcalls.  */
355	      if (libcall_end)
356		split_at_insn = libcall_end;
357
358	      /* Don't split the block between a call and an insn that should
359	         remain in the same block as the call.  */
360	      else if (GET_CODE (insn) == CALL_INSN)
361		while (split_at_insn != BB_END (bb)
362		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
363		  split_at_insn = NEXT_INSN (split_at_insn);
364
365	      /* The handling above of the final block before the epilogue
366	         should be enough to verify that there is no edge to the exit
367		 block in CFG already.  Calling make_edge in such case would
368		 cause us to mark that edge as fake and remove it later.  */
369
370#ifdef ENABLE_CHECKING
371	      if (split_at_insn == BB_END (bb))
372		for (e = bb->succ; e; e = e->succ_next)
373		  if (e->dest == EXIT_BLOCK_PTR)
374		    abort ();
375#endif
376
377	      /* Note that the following may create a new basic block
378		 and renumber the existing basic blocks.  */
379	      if (split_at_insn != BB_END (bb))
380		{
381		  e = split_block (bb, split_at_insn);
382		  if (e)
383		    blocks_split++;
384		}
385
386	      make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
387	    }
388
389	  /* Watch out for REG_LIBCALL/REG_RETVAL notes so that we know
390	     whether we are currently in a libcall or not.  Remember that
391	     we are scanning backwards!  */
392	  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
393	    libcall_end = insn;
394	  if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
395	    libcall_end = NULL_RTX;
396
397	  if (insn == BB_HEAD (bb))
398	    break;
399	}
400    }
401
402  if (blocks_split)
403    verify_flow_info ();
404
405  return blocks_split;
406}
407
408/* Find unreachable blocks.  An unreachable block will have 0 in
409   the reachable bit in block->flags.  A nonzero value indicates the
410   block is reachable.  */
411
412void
413find_unreachable_blocks (void)
414{
415  edge e;
416  basic_block *tos, *worklist, bb;
417
418  tos = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
419
420  /* Clear all the reachability flags.  */
421
422  FOR_EACH_BB (bb)
423    bb->flags &= ~BB_REACHABLE;
424
425  /* Add our starting points to the worklist.  Almost always there will
426     be only one.  It isn't inconceivable that we might one day directly
427     support Fortran alternate entry points.  */
428
429  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
430    {
431      *tos++ = e->dest;
432
433      /* Mark the block reachable.  */
434      e->dest->flags |= BB_REACHABLE;
435    }
436
437  /* Iterate: find everything reachable from what we've already seen.  */
438
439  while (tos != worklist)
440    {
441      basic_block b = *--tos;
442
443      for (e = b->succ; e; e = e->succ_next)
444	if (!(e->dest->flags & BB_REACHABLE))
445	  {
446	    *tos++ = e->dest;
447	    e->dest->flags |= BB_REACHABLE;
448	  }
449    }
450
451  free (worklist);
452}
453
454/* Functions to access an edge list with a vector representation.
455   Enough data is kept such that given an index number, the
456   pred and succ that edge represents can be determined, or
457   given a pred and a succ, its index number can be returned.
458   This allows algorithms which consume a lot of memory to
459   represent the normally full matrix of edge (pred,succ) with a
460   single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
461   wasted space in the client code due to sparse flow graphs.  */
462
463/* This functions initializes the edge list. Basically the entire
464   flowgraph is processed, and all edges are assigned a number,
465   and the data structure is filled in.  */
466
467struct edge_list *
468create_edge_list (void)
469{
470  struct edge_list *elist;
471  edge e;
472  int num_edges;
473  int block_count;
474  basic_block bb;
475
476  block_count = n_basic_blocks + 2;   /* Include the entry and exit blocks.  */
477
478  num_edges = 0;
479
480  /* Determine the number of edges in the flow graph by counting successor
481     edges on each basic block.  */
482  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
483    {
484      for (e = bb->succ; e; e = e->succ_next)
485	num_edges++;
486    }
487
488  elist = xmalloc (sizeof (struct edge_list));
489  elist->num_blocks = block_count;
490  elist->num_edges = num_edges;
491  elist->index_to_edge = xmalloc (sizeof (edge) * num_edges);
492
493  num_edges = 0;
494
495  /* Follow successors of blocks, and register these edges.  */
496  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
497    for (e = bb->succ; e; e = e->succ_next)
498      elist->index_to_edge[num_edges++] = e;
499
500  return elist;
501}
502
503/* This function free's memory associated with an edge list.  */
504
505void
506free_edge_list (struct edge_list *elist)
507{
508  if (elist)
509    {
510      free (elist->index_to_edge);
511      free (elist);
512    }
513}
514
515/* This function provides debug output showing an edge list.  */
516
517void
518print_edge_list (FILE *f, struct edge_list *elist)
519{
520  int x;
521
522  fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
523	   elist->num_blocks - 2, elist->num_edges);
524
525  for (x = 0; x < elist->num_edges; x++)
526    {
527      fprintf (f, " %-4d - edge(", x);
528      if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
529	fprintf (f, "entry,");
530      else
531	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
532
533      if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
534	fprintf (f, "exit)\n");
535      else
536	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
537    }
538}
539
540/* This function provides an internal consistency check of an edge list,
541   verifying that all edges are present, and that there are no
542   extra edges.  */
543
544void
545verify_edge_list (FILE *f, struct edge_list *elist)
546{
547  int pred, succ, index;
548  edge e;
549  basic_block bb, p, s;
550
551  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
552    {
553      for (e = bb->succ; e; e = e->succ_next)
554	{
555	  pred = e->src->index;
556	  succ = e->dest->index;
557	  index = EDGE_INDEX (elist, e->src, e->dest);
558	  if (index == EDGE_INDEX_NO_EDGE)
559	    {
560	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
561	      continue;
562	    }
563
564	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
565	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
566		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
567	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
568	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
569		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
570	}
571    }
572
573  /* We've verified that all the edges are in the list, now lets make sure
574     there are no spurious edges in the list.  */
575
576  FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
577    FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
578      {
579	int found_edge = 0;
580
581	for (e = p->succ; e; e = e->succ_next)
582	  if (e->dest == s)
583	    {
584	      found_edge = 1;
585	      break;
586	    }
587
588	for (e = s->pred; e; e = e->pred_next)
589	  if (e->src == p)
590	    {
591	      found_edge = 1;
592	      break;
593	    }
594
595	if (EDGE_INDEX (elist, p, s)
596	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
597	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
598		   p->index, s->index);
599	if (EDGE_INDEX (elist, p, s)
600	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
601	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
602		   p->index, s->index, EDGE_INDEX (elist, p, s));
603      }
604}
605
606/* This routine will determine what, if any, edge there is between
607   a specified predecessor and successor.  */
608
609int
610find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
611{
612  int x;
613
614  for (x = 0; x < NUM_EDGES (edge_list); x++)
615    if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
616	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
617      return x;
618
619  return (EDGE_INDEX_NO_EDGE);
620}
621
622/* Dump the list of basic blocks in the bitmap NODES.  */
623
624void
625flow_nodes_print (const char *str, const sbitmap nodes, FILE *file)
626{
627  int node;
628
629  if (! nodes)
630    return;
631
632  fprintf (file, "%s { ", str);
633  EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
634  fputs ("}\n", file);
635}
636
637/* Dump the list of edges in the array EDGE_LIST.  */
638
639void
640flow_edge_list_print (const char *str, const edge *edge_list, int num_edges, FILE *file)
641{
642  int i;
643
644  if (! edge_list)
645    return;
646
647  fprintf (file, "%s { ", str);
648  for (i = 0; i < num_edges; i++)
649    fprintf (file, "%d->%d ", edge_list[i]->src->index,
650	     edge_list[i]->dest->index);
651
652  fputs ("}\n", file);
653}
654
655
656/* This routine will remove any fake successor edges for a basic block.
657   When the edge is removed, it is also removed from whatever predecessor
658   list it is in.  */
659
660static void
661remove_fake_successors (basic_block bb)
662{
663  edge e;
664
665  for (e = bb->succ; e;)
666    {
667      edge tmp = e;
668
669      e = e->succ_next;
670      if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
671	remove_edge (tmp);
672    }
673}
674
675/* This routine will remove all fake edges from the flow graph.  If
676   we remove all fake successors, it will automatically remove all
677   fake predecessors.  */
678
679void
680remove_fake_edges (void)
681{
682  basic_block bb;
683
684  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
685    remove_fake_successors (bb);
686}
687
688/* This function will add a fake edge between any block which has no
689   successors, and the exit block. Some data flow equations require these
690   edges to exist.  */
691
692void
693add_noreturn_fake_exit_edges (void)
694{
695  basic_block bb;
696
697  FOR_EACH_BB (bb)
698    if (bb->succ == NULL)
699      make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
700}
701
702/* This function adds a fake edge between any infinite loops to the
703   exit block.  Some optimizations require a path from each node to
704   the exit node.
705
706   See also Morgan, Figure 3.10, pp. 82-83.
707
708   The current implementation is ugly, not attempting to minimize the
709   number of inserted fake edges.  To reduce the number of fake edges
710   to insert, add fake edges from _innermost_ loops containing only
711   nodes not reachable from the exit block.  */
712
713void
714connect_infinite_loops_to_exit (void)
715{
716  basic_block unvisited_block;
717  struct depth_first_search_dsS dfs_ds;
718
719  /* Perform depth-first search in the reverse graph to find nodes
720     reachable from the exit block.  */
721  flow_dfs_compute_reverse_init (&dfs_ds);
722  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
723
724  /* Repeatedly add fake edges, updating the unreachable nodes.  */
725  while (1)
726    {
727      unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
728      if (!unvisited_block)
729	break;
730
731      make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
732      flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
733    }
734
735  flow_dfs_compute_reverse_finish (&dfs_ds);
736  return;
737}
738
739/* Compute reverse top sort order.  */
740
741void
742flow_reverse_top_sort_order_compute (int *rts_order)
743{
744  edge *stack;
745  int sp;
746  int postnum = 0;
747  sbitmap visited;
748
749  /* Allocate stack for back-tracking up CFG.  */
750  stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
751  sp = 0;
752
753  /* Allocate bitmap to track nodes that have been visited.  */
754  visited = sbitmap_alloc (last_basic_block);
755
756  /* None of the nodes in the CFG have been visited yet.  */
757  sbitmap_zero (visited);
758
759  /* Push the first edge on to the stack.  */
760  stack[sp++] = ENTRY_BLOCK_PTR->succ;
761
762  while (sp)
763    {
764      edge e;
765      basic_block src;
766      basic_block dest;
767
768      /* Look at the edge on the top of the stack.  */
769      e = stack[sp - 1];
770      src = e->src;
771      dest = e->dest;
772
773      /* Check if the edge destination has been visited yet.  */
774      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
775	{
776	  /* Mark that we have visited the destination.  */
777	  SET_BIT (visited, dest->index);
778
779	  if (dest->succ)
780	    /* Since the DEST node has been visited for the first
781	       time, check its successors.  */
782	    stack[sp++] = dest->succ;
783	  else
784	    rts_order[postnum++] = dest->index;
785	}
786      else
787	{
788	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
789	   rts_order[postnum++] = src->index;
790
791	  if (e->succ_next)
792	    stack[sp - 1] = e->succ_next;
793	  else
794	    sp--;
795	}
796    }
797
798  free (stack);
799  sbitmap_free (visited);
800}
801
802/* Compute the depth first search order and store in the array
803  DFS_ORDER if nonzero, marking the nodes visited in VISITED.  If
804  RC_ORDER is nonzero, return the reverse completion number for each
805  node.  Returns the number of nodes visited.  A depth first search
806  tries to get as far away from the starting point as quickly as
807  possible.  */
808
809int
810flow_depth_first_order_compute (int *dfs_order, int *rc_order)
811{
812  edge *stack;
813  int sp;
814  int dfsnum = 0;
815  int rcnum = n_basic_blocks - 1;
816  sbitmap visited;
817
818  /* Allocate stack for back-tracking up CFG.  */
819  stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
820  sp = 0;
821
822  /* Allocate bitmap to track nodes that have been visited.  */
823  visited = sbitmap_alloc (last_basic_block);
824
825  /* None of the nodes in the CFG have been visited yet.  */
826  sbitmap_zero (visited);
827
828  /* Push the first edge on to the stack.  */
829  stack[sp++] = ENTRY_BLOCK_PTR->succ;
830
831  while (sp)
832    {
833      edge e;
834      basic_block src;
835      basic_block dest;
836
837      /* Look at the edge on the top of the stack.  */
838      e = stack[sp - 1];
839      src = e->src;
840      dest = e->dest;
841
842      /* Check if the edge destination has been visited yet.  */
843      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
844	{
845	  /* Mark that we have visited the destination.  */
846	  SET_BIT (visited, dest->index);
847
848	  if (dfs_order)
849	    dfs_order[dfsnum] = dest->index;
850
851	  dfsnum++;
852
853	  if (dest->succ)
854	    /* Since the DEST node has been visited for the first
855	       time, check its successors.  */
856	    stack[sp++] = dest->succ;
857	  else if (rc_order)
858	    /* There are no successors for the DEST node so assign
859	       its reverse completion number.  */
860	    rc_order[rcnum--] = dest->index;
861	}
862      else
863	{
864	  if (! e->succ_next && src != ENTRY_BLOCK_PTR
865	      && rc_order)
866	    /* There are no more successors for the SRC node
867	       so assign its reverse completion number.  */
868	    rc_order[rcnum--] = src->index;
869
870	  if (e->succ_next)
871	    stack[sp - 1] = e->succ_next;
872	  else
873	    sp--;
874	}
875    }
876
877  free (stack);
878  sbitmap_free (visited);
879
880  /* The number of nodes visited should not be greater than
881     n_basic_blocks.  */
882  if (dfsnum > n_basic_blocks)
883    abort ();
884
885  /* There are some nodes left in the CFG that are unreachable.  */
886  if (dfsnum < n_basic_blocks)
887    abort ();
888
889  return dfsnum;
890}
891
892struct dfst_node
893{
894    unsigned nnodes;
895    struct dfst_node **node;
896    struct dfst_node *up;
897};
898
899/* Compute a preorder transversal ordering such that a sub-tree which
900   is the source of a cross edge appears before the sub-tree which is
901   the destination of the cross edge.  This allows for easy detection
902   of all the entry blocks for a loop.
903
904   The ordering is compute by:
905
906     1) Generating a depth first spanning tree.
907
908     2) Walking the resulting tree from right to left.  */
909
910void
911flow_preorder_transversal_compute (int *pot_order)
912{
913  edge e;
914  edge *stack;
915  int i;
916  int max_successors;
917  int sp;
918  sbitmap visited;
919  struct dfst_node *node;
920  struct dfst_node *dfst;
921  basic_block bb;
922
923  /* Allocate stack for back-tracking up CFG.  */
924  stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
925  sp = 0;
926
927  /* Allocate the tree.  */
928  dfst = xcalloc (last_basic_block, sizeof (struct dfst_node));
929
930  FOR_EACH_BB (bb)
931    {
932      max_successors = 0;
933      for (e = bb->succ; e; e = e->succ_next)
934	max_successors++;
935
936      dfst[bb->index].node
937	= (max_successors
938	   ? xcalloc (max_successors, sizeof (struct dfst_node *)) : NULL);
939    }
940
941  /* Allocate bitmap to track nodes that have been visited.  */
942  visited = sbitmap_alloc (last_basic_block);
943
944  /* None of the nodes in the CFG have been visited yet.  */
945  sbitmap_zero (visited);
946
947  /* Push the first edge on to the stack.  */
948  stack[sp++] = ENTRY_BLOCK_PTR->succ;
949
950  while (sp)
951    {
952      basic_block src;
953      basic_block dest;
954
955      /* Look at the edge on the top of the stack.  */
956      e = stack[sp - 1];
957      src = e->src;
958      dest = e->dest;
959
960      /* Check if the edge destination has been visited yet.  */
961      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
962	{
963	  /* Mark that we have visited the destination.  */
964	  SET_BIT (visited, dest->index);
965
966	  /* Add the destination to the preorder tree.  */
967	  if (src != ENTRY_BLOCK_PTR)
968	    {
969	      dfst[src->index].node[dfst[src->index].nnodes++]
970		= &dfst[dest->index];
971	      dfst[dest->index].up = &dfst[src->index];
972	    }
973
974	  if (dest->succ)
975	    /* Since the DEST node has been visited for the first
976	       time, check its successors.  */
977	    stack[sp++] = dest->succ;
978	}
979
980      else if (e->succ_next)
981	stack[sp - 1] = e->succ_next;
982      else
983	sp--;
984    }
985
986  free (stack);
987  sbitmap_free (visited);
988
989  /* Record the preorder transversal order by
990     walking the tree from right to left.  */
991
992  i = 0;
993  node = &dfst[ENTRY_BLOCK_PTR->next_bb->index];
994  pot_order[i++] = 0;
995
996  while (node)
997    {
998      if (node->nnodes)
999	{
1000	  node = node->node[--node->nnodes];
1001	  pot_order[i++] = node - dfst;
1002	}
1003      else
1004	node = node->up;
1005    }
1006
1007  /* Free the tree.  */
1008
1009  for (i = 0; i < last_basic_block; i++)
1010    if (dfst[i].node)
1011      free (dfst[i].node);
1012
1013  free (dfst);
1014}
1015
1016/* Compute the depth first search order on the _reverse_ graph and
1017   store in the array DFS_ORDER, marking the nodes visited in VISITED.
1018   Returns the number of nodes visited.
1019
1020   The computation is split into three pieces:
1021
1022   flow_dfs_compute_reverse_init () creates the necessary data
1023   structures.
1024
1025   flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1026   structures.  The block will start the search.
1027
1028   flow_dfs_compute_reverse_execute () continues (or starts) the
1029   search using the block on the top of the stack, stopping when the
1030   stack is empty.
1031
1032   flow_dfs_compute_reverse_finish () destroys the necessary data
1033   structures.
1034
1035   Thus, the user will probably call ..._init(), call ..._add_bb() to
1036   add a beginning basic block to the stack, call ..._execute(),
1037   possibly add another bb to the stack and again call ..._execute(),
1038   ..., and finally call _finish().  */
1039
1040/* Initialize the data structures used for depth-first search on the
1041   reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1042   added to the basic block stack.  DATA is the current depth-first
1043   search context.  If INITIALIZE_STACK is nonzero, there is an
1044   element on the stack.  */
1045
1046static void
1047flow_dfs_compute_reverse_init (depth_first_search_ds data)
1048{
1049  /* Allocate stack for back-tracking up CFG.  */
1050  data->stack = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1051			 * sizeof (basic_block));
1052  data->sp = 0;
1053
1054  /* Allocate bitmap to track nodes that have been visited.  */
1055  data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
1056
1057  /* None of the nodes in the CFG have been visited yet.  */
1058  sbitmap_zero (data->visited_blocks);
1059
1060  return;
1061}
1062
1063/* Add the specified basic block to the top of the dfs data
1064   structures.  When the search continues, it will start at the
1065   block.  */
1066
1067static void
1068flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
1069{
1070  data->stack[data->sp++] = bb;
1071  SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1072}
1073
1074/* Continue the depth-first search through the reverse graph starting with the
1075   block at the stack's top and ending when the stack is empty.  Visited nodes
1076   are marked.  Returns an unvisited basic block, or NULL if there is none
1077   available.  */
1078
1079static basic_block
1080flow_dfs_compute_reverse_execute (depth_first_search_ds data)
1081{
1082  basic_block bb;
1083  edge e;
1084
1085  while (data->sp > 0)
1086    {
1087      bb = data->stack[--data->sp];
1088
1089      /* Perform depth-first search on adjacent vertices.  */
1090      for (e = bb->pred; e; e = e->pred_next)
1091	if (!TEST_BIT (data->visited_blocks,
1092		       e->src->index - (INVALID_BLOCK + 1)))
1093	  flow_dfs_compute_reverse_add_bb (data, e->src);
1094    }
1095
1096  /* Determine if there are unvisited basic blocks.  */
1097  FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
1098    if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
1099      return bb;
1100
1101  return NULL;
1102}
1103
1104/* Destroy the data structures needed for depth-first search on the
1105   reverse graph.  */
1106
1107static void
1108flow_dfs_compute_reverse_finish (depth_first_search_ds data)
1109{
1110  free (data->stack);
1111  sbitmap_free (data->visited_blocks);
1112}
1113
1114/* Performs dfs search from BB over vertices satisfying PREDICATE;
1115   if REVERSE, go against direction of edges.  Returns number of blocks
1116   found and their list in RSLT.  RSLT can contain at most RSLT_MAX items.  */
1117int
1118dfs_enumerate_from (basic_block bb, int reverse,
1119		    bool (*predicate) (basic_block, void *),
1120		    basic_block *rslt, int rslt_max, void *data)
1121{
1122  basic_block *st, lbb;
1123  int sp = 0, tv = 0;
1124
1125  st = xcalloc (rslt_max, sizeof (basic_block));
1126  rslt[tv++] = st[sp++] = bb;
1127  bb->flags |= BB_VISITED;
1128  while (sp)
1129    {
1130      edge e;
1131      lbb = st[--sp];
1132      if (reverse)
1133        {
1134          for (e = lbb->pred; e; e = e->pred_next)
1135	    if (!(e->src->flags & BB_VISITED) && predicate (e->src, data))
1136	      {
1137	        if (tv == rslt_max)
1138	          abort ();
1139	        rslt[tv++] = st[sp++] = e->src;
1140	        e->src->flags |= BB_VISITED;
1141	      }
1142        }
1143      else
1144        {
1145          for (e = lbb->succ; e; e = e->succ_next)
1146	    if (!(e->dest->flags & BB_VISITED) && predicate (e->dest, data))
1147	      {
1148	        if (tv == rslt_max)
1149	          abort ();
1150	        rslt[tv++] = st[sp++] = e->dest;
1151	        e->dest->flags |= BB_VISITED;
1152	      }
1153	}
1154    }
1155  free (st);
1156  for (sp = 0; sp < tv; sp++)
1157    rslt[sp]->flags &= ~BB_VISITED;
1158  return tv;
1159}
1160