cfganal.c revision 132718
1/* Control flow graph analysis code for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This file contains various simple utilities to analyze the CFG.  */
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "rtl.h"
28#include "hard-reg-set.h"
29#include "basic-block.h"
30#include "insn-config.h"
31#include "recog.h"
32#include "toplev.h"
33#include "tm_p.h"
34
35/* Store the data structures necessary for depth-first search.  */
36struct depth_first_search_dsS {
37  /* stack for backtracking during the algorithm */
38  basic_block *stack;
39
40  /* number of edges in the stack.  That is, positions 0, ..., sp-1
41     have edges.  */
42  unsigned int sp;
43
44  /* record of basic blocks already seen by depth-first search */
45  sbitmap visited_blocks;
46};
47typedef struct depth_first_search_dsS *depth_first_search_ds;
48
49static void flow_dfs_compute_reverse_init (depth_first_search_ds);
50static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds,
51					     basic_block);
52static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds);
53static void flow_dfs_compute_reverse_finish (depth_first_search_ds);
54static void remove_fake_successors (basic_block);
55static bool need_fake_edge_p (rtx);
56static bool flow_active_insn_p (rtx);
57
58/* Like active_insn_p, except keep the return value clobber around
59   even after reload.  */
60
61static bool
62flow_active_insn_p (rtx insn)
63{
64  if (active_insn_p (insn))
65    return true;
66
67  /* A clobber of the function return value exists for buggy
68     programs that fail to return a value.  Its effect is to
69     keep the return value from being live across the entire
70     function.  If we allow it to be skipped, we introduce the
71     possibility for register livetime aborts.  */
72  if (GET_CODE (PATTERN (insn)) == CLOBBER
73      && GET_CODE (XEXP (PATTERN (insn), 0)) == REG
74      && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
75    return true;
76
77  return false;
78}
79
80/* Return true if the block has no effect and only forwards control flow to
81   its single destination.  */
82
83bool
84forwarder_block_p (basic_block bb)
85{
86  rtx insn;
87
88  if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
89      || !bb->succ || bb->succ->succ_next)
90    return false;
91
92  for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
93    if (INSN_P (insn) && flow_active_insn_p (insn))
94      return false;
95
96  return (!INSN_P (insn)
97	  || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
98	  || !flow_active_insn_p (insn));
99}
100
101/* Return nonzero if we can reach target from src by falling through.  */
102
103bool
104can_fallthru (basic_block src, basic_block target)
105{
106  rtx insn = BB_END (src);
107  rtx insn2 = target == EXIT_BLOCK_PTR ? NULL : BB_HEAD (target);
108
109  if (src->next_bb != target)
110    return 0;
111
112  if (insn2 && !active_insn_p (insn2))
113    insn2 = next_active_insn (insn2);
114
115  /* ??? Later we may add code to move jump tables offline.  */
116  return next_active_insn (insn) == insn2;
117}
118
119/* Mark the back edges in DFS traversal.
120   Return nonzero if a loop (natural or otherwise) is present.
121   Inspired by Depth_First_Search_PP described in:
122
123     Advanced Compiler Design and Implementation
124     Steven Muchnick
125     Morgan Kaufmann, 1997
126
127   and heavily borrowed from flow_depth_first_order_compute.  */
128
129bool
130mark_dfs_back_edges (void)
131{
132  edge *stack;
133  int *pre;
134  int *post;
135  int sp;
136  int prenum = 1;
137  int postnum = 1;
138  sbitmap visited;
139  bool found = false;
140
141  /* Allocate the preorder and postorder number arrays.  */
142  pre = xcalloc (last_basic_block, sizeof (int));
143  post = xcalloc (last_basic_block, sizeof (int));
144
145  /* Allocate stack for back-tracking up CFG.  */
146  stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
147  sp = 0;
148
149  /* Allocate bitmap to track nodes that have been visited.  */
150  visited = sbitmap_alloc (last_basic_block);
151
152  /* None of the nodes in the CFG have been visited yet.  */
153  sbitmap_zero (visited);
154
155  /* Push the first edge on to the stack.  */
156  stack[sp++] = ENTRY_BLOCK_PTR->succ;
157
158  while (sp)
159    {
160      edge e;
161      basic_block src;
162      basic_block dest;
163
164      /* Look at the edge on the top of the stack.  */
165      e = stack[sp - 1];
166      src = e->src;
167      dest = e->dest;
168      e->flags &= ~EDGE_DFS_BACK;
169
170      /* Check if the edge destination has been visited yet.  */
171      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
172	{
173	  /* Mark that we have visited the destination.  */
174	  SET_BIT (visited, dest->index);
175
176	  pre[dest->index] = prenum++;
177	  if (dest->succ)
178	    {
179	      /* Since the DEST node has been visited for the first
180		 time, check its successors.  */
181	      stack[sp++] = dest->succ;
182	    }
183	  else
184	    post[dest->index] = postnum++;
185	}
186      else
187	{
188	  if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
189	      && pre[src->index] >= pre[dest->index]
190	      && post[dest->index] == 0)
191	    e->flags |= EDGE_DFS_BACK, found = true;
192
193	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
194	    post[src->index] = postnum++;
195
196	  if (e->succ_next)
197	    stack[sp - 1] = e->succ_next;
198	  else
199	    sp--;
200	}
201    }
202
203  free (pre);
204  free (post);
205  free (stack);
206  sbitmap_free (visited);
207
208  return found;
209}
210
211/* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru.  */
212
213void
214set_edge_can_fallthru_flag (void)
215{
216  basic_block bb;
217
218  FOR_EACH_BB (bb)
219    {
220      edge e;
221
222      for (e = bb->succ; e; e = e->succ_next)
223	{
224	  e->flags &= ~EDGE_CAN_FALLTHRU;
225
226	  /* The FALLTHRU edge is also CAN_FALLTHRU edge.  */
227	  if (e->flags & EDGE_FALLTHRU)
228	    e->flags |= EDGE_CAN_FALLTHRU;
229	}
230
231      /* If the BB ends with an invertible condjump all (2) edges are
232	 CAN_FALLTHRU edges.  */
233      if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
234	continue;
235      if (!any_condjump_p (BB_END (bb)))
236	continue;
237      if (!invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0))
238	continue;
239      invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0);
240      bb->succ->flags |= EDGE_CAN_FALLTHRU;
241      bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU;
242    }
243}
244
245/* Return true if we need to add fake edge to exit.
246   Helper function for the flow_call_edges_add.  */
247
248static bool
249need_fake_edge_p (rtx insn)
250{
251  if (!INSN_P (insn))
252    return false;
253
254  if ((GET_CODE (insn) == CALL_INSN
255       && !SIBLING_CALL_P (insn)
256       && !find_reg_note (insn, REG_NORETURN, NULL)
257       && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
258       && !CONST_OR_PURE_CALL_P (insn)))
259    return true;
260
261  return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
262	   && MEM_VOLATILE_P (PATTERN (insn)))
263	  || (GET_CODE (PATTERN (insn)) == PARALLEL
264	      && asm_noperands (insn) != -1
265	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
266	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
267}
268
269/* Add fake edges to the function exit for any non constant and non noreturn
270   calls, volatile inline assembly in the bitmap of blocks specified by
271   BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
272   that were split.
273
274   The goal is to expose cases in which entering a basic block does not imply
275   that all subsequent instructions must be executed.  */
276
277int
278flow_call_edges_add (sbitmap blocks)
279{
280  int i;
281  int blocks_split = 0;
282  int last_bb = last_basic_block;
283  bool check_last_block = false;
284
285  if (n_basic_blocks == 0)
286    return 0;
287
288  if (! blocks)
289    check_last_block = true;
290  else
291    check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
292
293  /* In the last basic block, before epilogue generation, there will be
294     a fallthru edge to EXIT.  Special care is required if the last insn
295     of the last basic block is a call because make_edge folds duplicate
296     edges, which would result in the fallthru edge also being marked
297     fake, which would result in the fallthru edge being removed by
298     remove_fake_edges, which would result in an invalid CFG.
299
300     Moreover, we can't elide the outgoing fake edge, since the block
301     profiler needs to take this into account in order to solve the minimal
302     spanning tree in the case that the call doesn't return.
303
304     Handle this by adding a dummy instruction in a new last basic block.  */
305  if (check_last_block)
306    {
307      basic_block bb = EXIT_BLOCK_PTR->prev_bb;
308      rtx insn = BB_END (bb);
309
310      /* Back up past insns that must be kept in the same block as a call.  */
311      while (insn != BB_HEAD (bb)
312	     && keep_with_call_p (insn))
313	insn = PREV_INSN (insn);
314
315      if (need_fake_edge_p (insn))
316	{
317	  edge e;
318
319	  for (e = bb->succ; e; e = e->succ_next)
320	    if (e->dest == EXIT_BLOCK_PTR)
321	      {
322		insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
323		commit_edge_insertions ();
324		break;
325	      }
326	}
327    }
328
329  /* Now add fake edges to the function exit for any non constant
330     calls since there is no way that we can determine if they will
331     return or not...  */
332
333  for (i = 0; i < last_bb; i++)
334    {
335      basic_block bb = BASIC_BLOCK (i);
336      rtx libcall_end = NULL_RTX;
337      rtx insn;
338      rtx prev_insn;
339
340      if (!bb)
341	continue;
342
343      if (blocks && !TEST_BIT (blocks, i))
344	continue;
345
346      for (insn = BB_END (bb); ; insn = prev_insn)
347	{
348	  prev_insn = PREV_INSN (insn);
349	  if (need_fake_edge_p (insn))
350	    {
351	      edge e;
352	      rtx split_at_insn = insn;
353
354	      /* Don't split libcalls.  */
355	      if (libcall_end)
356		split_at_insn = libcall_end;
357
358	      /* Don't split the block between a call and an insn that should
359	         remain in the same block as the call.  */
360	      else if (GET_CODE (insn) == CALL_INSN)
361		while (split_at_insn != BB_END (bb)
362		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
363		  split_at_insn = NEXT_INSN (split_at_insn);
364
365	      /* The handling above of the final block before the epilogue
366	         should be enough to verify that there is no edge to the exit
367		 block in CFG already.  Calling make_edge in such case would
368		 cause us to mark that edge as fake and remove it later.  */
369
370#ifdef ENABLE_CHECKING
371	      if (split_at_insn == BB_END (bb))
372		for (e = bb->succ; e; e = e->succ_next)
373		  if (e->dest == EXIT_BLOCK_PTR)
374		    abort ();
375#endif
376
377	      /* Note that the following may create a new basic block
378		 and renumber the existing basic blocks.  */
379	      if (split_at_insn != BB_END (bb))
380		{
381		  e = split_block (bb, split_at_insn);
382		  if (e)
383		    blocks_split++;
384		}
385
386	      make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
387	    }
388
389	  /* Watch out for REG_LIBCALL/REG_RETVAL notes so that we know
390	     whether we are currently in a libcall or not.  Remember that
391	     we are scanning backwards!  */
392	  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
393	    libcall_end = insn;
394	  if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
395	    libcall_end = NULL_RTX;
396
397	  if (insn == BB_HEAD (bb))
398	    break;
399	}
400    }
401
402  if (blocks_split)
403    verify_flow_info ();
404
405  return blocks_split;
406}
407
408/* Find unreachable blocks.  An unreachable block will have 0 in
409   the reachable bit in block->flags.  A nonzero value indicates the
410   block is reachable.  */
411
412void
413find_unreachable_blocks (void)
414{
415  edge e;
416  basic_block *tos, *worklist, bb;
417
418  tos = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
419
420  /* Clear all the reachability flags.  */
421
422  FOR_EACH_BB (bb)
423    bb->flags &= ~BB_REACHABLE;
424
425  /* Add our starting points to the worklist.  Almost always there will
426     be only one.  It isn't inconceivable that we might one day directly
427     support Fortran alternate entry points.  */
428
429  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
430    {
431      *tos++ = e->dest;
432
433      /* Mark the block reachable.  */
434      e->dest->flags |= BB_REACHABLE;
435    }
436
437  /* Iterate: find everything reachable from what we've already seen.  */
438
439  while (tos != worklist)
440    {
441      basic_block b = *--tos;
442
443      for (e = b->succ; e; e = e->succ_next)
444	if (!(e->dest->flags & BB_REACHABLE))
445	  {
446	    *tos++ = e->dest;
447	    e->dest->flags |= BB_REACHABLE;
448	  }
449    }
450
451  free (worklist);
452}
453
454/* Functions to access an edge list with a vector representation.
455   Enough data is kept such that given an index number, the
456   pred and succ that edge represents can be determined, or
457   given a pred and a succ, its index number can be returned.
458   This allows algorithms which consume a lot of memory to
459   represent the normally full matrix of edge (pred,succ) with a
460   single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
461   wasted space in the client code due to sparse flow graphs.  */
462
463/* This functions initializes the edge list. Basically the entire
464   flowgraph is processed, and all edges are assigned a number,
465   and the data structure is filled in.  */
466
467struct edge_list *
468create_edge_list (void)
469{
470  struct edge_list *elist;
471  edge e;
472  int num_edges;
473  int block_count;
474  basic_block bb;
475
476  block_count = n_basic_blocks + 2;   /* Include the entry and exit blocks.  */
477
478  num_edges = 0;
479
480  /* Determine the number of edges in the flow graph by counting successor
481     edges on each basic block.  */
482  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
483    {
484      for (e = bb->succ; e; e = e->succ_next)
485	num_edges++;
486    }
487
488  elist = xmalloc (sizeof (struct edge_list));
489  elist->num_blocks = block_count;
490  elist->num_edges = num_edges;
491  elist->index_to_edge = xmalloc (sizeof (edge) * num_edges);
492
493  num_edges = 0;
494
495  /* Follow successors of blocks, and register these edges.  */
496  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
497    for (e = bb->succ; e; e = e->succ_next)
498      elist->index_to_edge[num_edges++] = e;
499
500  return elist;
501}
502
503/* This function free's memory associated with an edge list.  */
504
505void
506free_edge_list (struct edge_list *elist)
507{
508  if (elist)
509    {
510      free (elist->index_to_edge);
511      free (elist);
512    }
513}
514
515/* This function provides debug output showing an edge list.  */
516
517void
518print_edge_list (FILE *f, struct edge_list *elist)
519{
520  int x;
521
522  fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
523	   elist->num_blocks - 2, elist->num_edges);
524
525  for (x = 0; x < elist->num_edges; x++)
526    {
527      fprintf (f, " %-4d - edge(", x);
528      if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
529	fprintf (f, "entry,");
530      else
531	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
532
533      if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
534	fprintf (f, "exit)\n");
535      else
536	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
537    }
538}
539
540/* This function provides an internal consistency check of an edge list,
541   verifying that all edges are present, and that there are no
542   extra edges.  */
543
544void
545verify_edge_list (FILE *f, struct edge_list *elist)
546{
547  int pred, succ, index;
548  edge e;
549  basic_block bb, p, s;
550
551  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
552    {
553      for (e = bb->succ; e; e = e->succ_next)
554	{
555	  pred = e->src->index;
556	  succ = e->dest->index;
557	  index = EDGE_INDEX (elist, e->src, e->dest);
558	  if (index == EDGE_INDEX_NO_EDGE)
559	    {
560	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
561	      continue;
562	    }
563
564	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
565	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
566		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
567	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
568	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
569		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
570	}
571    }
572
573  /* We've verified that all the edges are in the list, now lets make sure
574     there are no spurious edges in the list.  */
575
576  FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
577    FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
578      {
579	int found_edge = 0;
580
581	for (e = p->succ; e; e = e->succ_next)
582	  if (e->dest == s)
583	    {
584	      found_edge = 1;
585	      break;
586	    }
587
588	for (e = s->pred; e; e = e->pred_next)
589	  if (e->src == p)
590	    {
591	      found_edge = 1;
592	      break;
593	    }
594
595	if (EDGE_INDEX (elist, p, s)
596	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
597	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
598		   p->index, s->index);
599	if (EDGE_INDEX (elist, p, s)
600	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
601	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
602		   p->index, s->index, EDGE_INDEX (elist, p, s));
603      }
604}
605
606/* This routine will determine what, if any, edge there is between
607   a specified predecessor and successor.  */
608
609int
610find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
611{
612  int x;
613
614  for (x = 0; x < NUM_EDGES (edge_list); x++)
615    if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
616	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
617      return x;
618
619  return (EDGE_INDEX_NO_EDGE);
620}
621
622/* Dump the list of basic blocks in the bitmap NODES.  */
623
624void
625flow_nodes_print (const char *str, const sbitmap nodes, FILE *file)
626{
627  int node;
628
629  if (! nodes)
630    return;
631
632  fprintf (file, "%s { ", str);
633  EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
634  fputs ("}\n", file);
635}
636
637/* Dump the list of edges in the array EDGE_LIST.  */
638
639void
640flow_edge_list_print (const char *str, const edge *edge_list, int num_edges, FILE *file)
641{
642  int i;
643
644  if (! edge_list)
645    return;
646
647  fprintf (file, "%s { ", str);
648  for (i = 0; i < num_edges; i++)
649    fprintf (file, "%d->%d ", edge_list[i]->src->index,
650	     edge_list[i]->dest->index);
651
652  fputs ("}\n", file);
653}
654
655
656/* This routine will remove any fake successor edges for a basic block.
657   When the edge is removed, it is also removed from whatever predecessor
658   list it is in.  */
659
660static void
661remove_fake_successors (basic_block bb)
662{
663  edge e;
664
665  for (e = bb->succ; e;)
666    {
667      edge tmp = e;
668
669      e = e->succ_next;
670      if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
671	remove_edge (tmp);
672    }
673}
674
675/* This routine will remove all fake edges from the flow graph.  If
676   we remove all fake successors, it will automatically remove all
677   fake predecessors.  */
678
679void
680remove_fake_edges (void)
681{
682  basic_block bb;
683
684  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
685    remove_fake_successors (bb);
686}
687
688/* This function will add a fake edge between any block which has no
689   successors, and the exit block. Some data flow equations require these
690   edges to exist.  */
691
692void
693add_noreturn_fake_exit_edges (void)
694{
695  basic_block bb;
696
697  FOR_EACH_BB (bb)
698    if (bb->succ == NULL)
699      make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
700}
701
702/* This function adds a fake edge between any infinite loops to the
703   exit block.  Some optimizations require a path from each node to
704   the exit node.
705
706   See also Morgan, Figure 3.10, pp. 82-83.
707
708   The current implementation is ugly, not attempting to minimize the
709   number of inserted fake edges.  To reduce the number of fake edges
710   to insert, add fake edges from _innermost_ loops containing only
711   nodes not reachable from the exit block.  */
712
713void
714connect_infinite_loops_to_exit (void)
715{
716  basic_block unvisited_block;
717  struct depth_first_search_dsS dfs_ds;
718
719  /* Perform depth-first search in the reverse graph to find nodes
720     reachable from the exit block.  */
721  flow_dfs_compute_reverse_init (&dfs_ds);
722  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
723
724  /* Repeatedly add fake edges, updating the unreachable nodes.  */
725  while (1)
726    {
727      unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
728      if (!unvisited_block)
729	break;
730
731      make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
732      flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
733    }
734
735  flow_dfs_compute_reverse_finish (&dfs_ds);
736  return;
737}
738
739/* Compute reverse top sort order.  */
740
741void
742flow_reverse_top_sort_order_compute (int *rts_order)
743{
744  edge *stack;
745  int sp;
746  int postnum = 0;
747  sbitmap visited;
748
749  /* Allocate stack for back-tracking up CFG.  */
750  stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
751  sp = 0;
752
753  /* Allocate bitmap to track nodes that have been visited.  */
754  visited = sbitmap_alloc (last_basic_block);
755
756  /* None of the nodes in the CFG have been visited yet.  */
757  sbitmap_zero (visited);
758
759  /* Push the first edge on to the stack.  */
760  stack[sp++] = ENTRY_BLOCK_PTR->succ;
761
762  while (sp)
763    {
764      edge e;
765      basic_block src;
766      basic_block dest;
767
768      /* Look at the edge on the top of the stack.  */
769      e = stack[sp - 1];
770      src = e->src;
771      dest = e->dest;
772
773      /* Check if the edge destination has been visited yet.  */
774      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
775	{
776	  /* Mark that we have visited the destination.  */
777	  SET_BIT (visited, dest->index);
778
779	  if (dest->succ)
780	    /* Since the DEST node has been visited for the first
781	       time, check its successors.  */
782	    stack[sp++] = dest->succ;
783	  else
784	    rts_order[postnum++] = dest->index;
785	}
786      else
787	{
788	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
789	   rts_order[postnum++] = src->index;
790
791	  if (e->succ_next)
792	    stack[sp - 1] = e->succ_next;
793	  else
794	    sp--;
795	}
796    }
797
798  free (stack);
799  sbitmap_free (visited);
800}
801
802/* Compute the depth first search order and store in the array
803  DFS_ORDER if nonzero, marking the nodes visited in VISITED.  If
804  RC_ORDER is nonzero, return the reverse completion number for each
805  node.  Returns the number of nodes visited.  A depth first search
806  tries to get as far away from the starting point as quickly as
807  possible.  */
808
809int
810flow_depth_first_order_compute (int *dfs_order, int *rc_order)
811{
812  edge *stack;
813  int sp;
814  int dfsnum = 0;
815  int rcnum = n_basic_blocks - 1;
816  sbitmap visited;
817
818  /* Allocate stack for back-tracking up CFG.  */
819  stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
820  sp = 0;
821
822  /* Allocate bitmap to track nodes that have been visited.  */
823  visited = sbitmap_alloc (last_basic_block);
824
825  /* None of the nodes in the CFG have been visited yet.  */
826  sbitmap_zero (visited);
827
828  /* Push the first edge on to the stack.  */
829  stack[sp++] = ENTRY_BLOCK_PTR->succ;
830
831  while (sp)
832    {
833      edge e;
834      basic_block src;
835      basic_block dest;
836
837      /* Look at the edge on the top of the stack.  */
838      e = stack[sp - 1];
839      src = e->src;
840      dest = e->dest;
841
842      /* Check if the edge destination has been visited yet.  */
843      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
844	{
845	  /* Mark that we have visited the destination.  */
846	  SET_BIT (visited, dest->index);
847
848	  if (dfs_order)
849	    dfs_order[dfsnum] = dest->index;
850
851	  dfsnum++;
852
853	  if (dest->succ)
854	    /* Since the DEST node has been visited for the first
855	       time, check its successors.  */
856	    stack[sp++] = dest->succ;
857	  else if (rc_order)
858	    /* There are no successors for the DEST node so assign
859	       its reverse completion number.  */
860	    rc_order[rcnum--] = dest->index;
861	}
862      else
863	{
864	  if (! e->succ_next && src != ENTRY_BLOCK_PTR
865	      && rc_order)
866	    /* There are no more successors for the SRC node
867	       so assign its reverse completion number.  */
868	    rc_order[rcnum--] = src->index;
869
870	  if (e->succ_next)
871	    stack[sp - 1] = e->succ_next;
872	  else
873	    sp--;
874	}
875    }
876
877  free (stack);
878  sbitmap_free (visited);
879
880  /* The number of nodes visited should not be greater than
881     n_basic_blocks.  */
882  if (dfsnum > n_basic_blocks)
883    abort ();
884
885  /* There are some nodes left in the CFG that are unreachable.  */
886  if (dfsnum < n_basic_blocks)
887    abort ();
888
889  return dfsnum;
890}
891
892struct dfst_node
893{
894    unsigned nnodes;
895    struct dfst_node **node;
896    struct dfst_node *up;
897};
898
899/* Compute a preorder transversal ordering such that a sub-tree which
900   is the source of a cross edge appears before the sub-tree which is
901   the destination of the cross edge.  This allows for easy detection
902   of all the entry blocks for a loop.
903
904   The ordering is compute by:
905
906     1) Generating a depth first spanning tree.
907
908     2) Walking the resulting tree from right to left.  */
909
910void
911flow_preorder_transversal_compute (int *pot_order)
912{
913  edge e;
914  edge *stack;
915  int i;
916  int max_successors;
917  int sp;
918  sbitmap visited;
919  struct dfst_node *node;
920  struct dfst_node *dfst;
921  basic_block bb;
922
923  /* Allocate stack for back-tracking up CFG.  */
924  stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
925  sp = 0;
926
927  /* Allocate the tree.  */
928  dfst = xcalloc (last_basic_block, sizeof (struct dfst_node));
929
930  FOR_EACH_BB (bb)
931    {
932      max_successors = 0;
933      for (e = bb->succ; e; e = e->succ_next)
934	max_successors++;
935
936      dfst[bb->index].node
937	= (max_successors
938	   ? xcalloc (max_successors, sizeof (struct dfst_node *)) : NULL);
939    }
940
941  /* Allocate bitmap to track nodes that have been visited.  */
942  visited = sbitmap_alloc (last_basic_block);
943
944  /* None of the nodes in the CFG have been visited yet.  */
945  sbitmap_zero (visited);
946
947  /* Push the first edge on to the stack.  */
948  stack[sp++] = ENTRY_BLOCK_PTR->succ;
949
950  while (sp)
951    {
952      basic_block src;
953      basic_block dest;
954
955      /* Look at the edge on the top of the stack.  */
956      e = stack[sp - 1];
957      src = e->src;
958      dest = e->dest;
959
960      /* Check if the edge destination has been visited yet.  */
961      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
962	{
963	  /* Mark that we have visited the destination.  */
964	  SET_BIT (visited, dest->index);
965
966	  /* Add the destination to the preorder tree.  */
967	  if (src != ENTRY_BLOCK_PTR)
968	    {
969	      dfst[src->index].node[dfst[src->index].nnodes++]
970		= &dfst[dest->index];
971	      dfst[dest->index].up = &dfst[src->index];
972	    }
973
974	  if (dest->succ)
975	    /* Since the DEST node has been visited for the first
976	       time, check its successors.  */
977	    stack[sp++] = dest->succ;
978	}
979
980      else if (e->succ_next)
981	stack[sp - 1] = e->succ_next;
982      else
983	sp--;
984    }
985
986  free (stack);
987  sbitmap_free (visited);
988
989  /* Record the preorder transversal order by
990     walking the tree from right to left.  */
991
992  i = 0;
993  node = &dfst[ENTRY_BLOCK_PTR->next_bb->index];
994  pot_order[i++] = 0;
995
996  while (node)
997    {
998      if (node->nnodes)
999	{
1000	  node = node->node[--node->nnodes];
1001	  pot_order[i++] = node - dfst;
1002	}
1003      else
1004	node = node->up;
1005    }
1006
1007  /* Free the tree.  */
1008
1009  for (i = 0; i < last_basic_block; i++)
1010    if (dfst[i].node)
1011      free (dfst[i].node);
1012
1013  free (dfst);
1014}
1015
1016/* Compute the depth first search order on the _reverse_ graph and
1017   store in the array DFS_ORDER, marking the nodes visited in VISITED.
1018   Returns the number of nodes visited.
1019
1020   The computation is split into three pieces:
1021
1022   flow_dfs_compute_reverse_init () creates the necessary data
1023   structures.
1024
1025   flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1026   structures.  The block will start the search.
1027
1028   flow_dfs_compute_reverse_execute () continues (or starts) the
1029   search using the block on the top of the stack, stopping when the
1030   stack is empty.
1031
1032   flow_dfs_compute_reverse_finish () destroys the necessary data
1033   structures.
1034
1035   Thus, the user will probably call ..._init(), call ..._add_bb() to
1036   add a beginning basic block to the stack, call ..._execute(),
1037   possibly add another bb to the stack and again call ..._execute(),
1038   ..., and finally call _finish().  */
1039
1040/* Initialize the data structures used for depth-first search on the
1041   reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1042   added to the basic block stack.  DATA is the current depth-first
1043   search context.  If INITIALIZE_STACK is nonzero, there is an
1044   element on the stack.  */
1045
1046static void
1047flow_dfs_compute_reverse_init (depth_first_search_ds data)
1048{
1049  /* Allocate stack for back-tracking up CFG.  */
1050  data->stack = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1051			 * sizeof (basic_block));
1052  data->sp = 0;
1053
1054  /* Allocate bitmap to track nodes that have been visited.  */
1055  data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
1056
1057  /* None of the nodes in the CFG have been visited yet.  */
1058  sbitmap_zero (data->visited_blocks);
1059
1060  return;
1061}
1062
1063/* Add the specified basic block to the top of the dfs data
1064   structures.  When the search continues, it will start at the
1065   block.  */
1066
1067static void
1068flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
1069{
1070  data->stack[data->sp++] = bb;
1071  SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1072}
1073
1074/* Continue the depth-first search through the reverse graph starting with the
1075   block at the stack's top and ending when the stack is empty.  Visited nodes
1076   are marked.  Returns an unvisited basic block, or NULL if there is none
1077   available.  */
1078
1079static basic_block
1080flow_dfs_compute_reverse_execute (depth_first_search_ds data)
1081{
1082  basic_block bb;
1083  edge e;
1084
1085  while (data->sp > 0)
1086    {
1087      bb = data->stack[--data->sp];
1088
1089      /* Perform depth-first search on adjacent vertices.  */
1090      for (e = bb->pred; e; e = e->pred_next)
1091	if (!TEST_BIT (data->visited_blocks,
1092		       e->src->index - (INVALID_BLOCK + 1)))
1093	  flow_dfs_compute_reverse_add_bb (data, e->src);
1094    }
1095
1096  /* Determine if there are unvisited basic blocks.  */
1097  FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
1098    if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
1099      return bb;
1100
1101  return NULL;
1102}
1103
1104/* Destroy the data structures needed for depth-first search on the
1105   reverse graph.  */
1106
1107static void
1108flow_dfs_compute_reverse_finish (depth_first_search_ds data)
1109{
1110  free (data->stack);
1111  sbitmap_free (data->visited_blocks);
1112}
1113
1114/* Performs dfs search from BB over vertices satisfying PREDICATE;
1115   if REVERSE, go against direction of edges.  Returns number of blocks
1116   found and their list in RSLT.  RSLT can contain at most RSLT_MAX items.  */
1117int
1118dfs_enumerate_from (basic_block bb, int reverse,
1119		    bool (*predicate) (basic_block, void *),
1120		    basic_block *rslt, int rslt_max, void *data)
1121{
1122  basic_block *st, lbb;
1123  int sp = 0, tv = 0;
1124
1125  st = xcalloc (rslt_max, sizeof (basic_block));
1126  rslt[tv++] = st[sp++] = bb;
1127  bb->flags |= BB_VISITED;
1128  while (sp)
1129    {
1130      edge e;
1131      lbb = st[--sp];
1132      if (reverse)
1133        {
1134          for (e = lbb->pred; e; e = e->pred_next)
1135	    if (!(e->src->flags & BB_VISITED) && predicate (e->src, data))
1136	      {
1137	        if (tv == rslt_max)
1138	          abort ();
1139	        rslt[tv++] = st[sp++] = e->src;
1140	        e->src->flags |= BB_VISITED;
1141	      }
1142        }
1143      else
1144        {
1145          for (e = lbb->succ; e; e = e->succ_next)
1146	    if (!(e->dest->flags & BB_VISITED) && predicate (e->dest, data))
1147	      {
1148	        if (tv == rslt_max)
1149	          abort ();
1150	        rslt[tv++] = st[sp++] = e->dest;
1151	        e->dest->flags |= BB_VISITED;
1152	      }
1153	}
1154    }
1155  free (st);
1156  for (sp = 0; sp < tv; sp++)
1157    rslt[sp]->flags &= ~BB_VISITED;
1158  return tv;
1159}
1160