cfganal.c revision 96263
1/* Control flow graph analysis code for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This file contains various simple utilities to analyze the CFG.  */
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "hard-reg-set.h"
27#include "basic-block.h"
28#include "insn-config.h"
29#include "recog.h"
30#include "toplev.h"
31#include "obstack.h"
32#include "tm_p.h"
33
34/* Store the data structures necessary for depth-first search.  */
35struct depth_first_search_dsS {
36  /* stack for backtracking during the algorithm */
37  basic_block *stack;
38
39  /* number of edges in the stack.  That is, positions 0, ..., sp-1
40     have edges.  */
41  unsigned int sp;
42
43  /* record of basic blocks already seen by depth-first search */
44  sbitmap visited_blocks;
45};
46typedef struct depth_first_search_dsS *depth_first_search_ds;
47
48static void flow_dfs_compute_reverse_init
49  PARAMS ((depth_first_search_ds));
50static void flow_dfs_compute_reverse_add_bb
51  PARAMS ((depth_first_search_ds, basic_block));
52static basic_block flow_dfs_compute_reverse_execute
53  PARAMS ((depth_first_search_ds));
54static void flow_dfs_compute_reverse_finish
55  PARAMS ((depth_first_search_ds));
56static void remove_fake_successors	PARAMS ((basic_block));
57static bool need_fake_edge_p		PARAMS ((rtx));
58static bool keep_with_call_p		PARAMS ((rtx));
59
60/* Return true if the block has no effect and only forwards control flow to
61   its single destination.  */
62
63bool
64forwarder_block_p (bb)
65     basic_block bb;
66{
67  rtx insn;
68
69  if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
70      || !bb->succ || bb->succ->succ_next)
71    return false;
72
73  for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
74    if (INSN_P (insn) && active_insn_p (insn))
75      return false;
76
77  return (!INSN_P (insn)
78	  || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
79	  || !active_insn_p (insn));
80}
81
82/* Return nonzero if we can reach target from src by falling through.  */
83
84bool
85can_fallthru (src, target)
86     basic_block src, target;
87{
88  rtx insn = src->end;
89  rtx insn2 = target->head;
90
91  if (src->index + 1 == target->index && !active_insn_p (insn2))
92    insn2 = next_active_insn (insn2);
93
94  /* ??? Later we may add code to move jump tables offline.  */
95  return next_active_insn (insn) == insn2;
96}
97
98/* Mark the back edges in DFS traversal.
99   Return non-zero if a loop (natural or otherwise) is present.
100   Inspired by Depth_First_Search_PP described in:
101
102     Advanced Compiler Design and Implementation
103     Steven Muchnick
104     Morgan Kaufmann, 1997
105
106   and heavily borrowed from flow_depth_first_order_compute.  */
107
108bool
109mark_dfs_back_edges ()
110{
111  edge *stack;
112  int *pre;
113  int *post;
114  int sp;
115  int prenum = 1;
116  int postnum = 1;
117  sbitmap visited;
118  bool found = false;
119
120  /* Allocate the preorder and postorder number arrays.  */
121  pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
122  post = (int *) xcalloc (n_basic_blocks, sizeof (int));
123
124  /* Allocate stack for back-tracking up CFG.  */
125  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
126  sp = 0;
127
128  /* Allocate bitmap to track nodes that have been visited.  */
129  visited = sbitmap_alloc (n_basic_blocks);
130
131  /* None of the nodes in the CFG have been visited yet.  */
132  sbitmap_zero (visited);
133
134  /* Push the first edge on to the stack.  */
135  stack[sp++] = ENTRY_BLOCK_PTR->succ;
136
137  while (sp)
138    {
139      edge e;
140      basic_block src;
141      basic_block dest;
142
143      /* Look at the edge on the top of the stack.  */
144      e = stack[sp - 1];
145      src = e->src;
146      dest = e->dest;
147      e->flags &= ~EDGE_DFS_BACK;
148
149      /* Check if the edge destination has been visited yet.  */
150      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
151	{
152	  /* Mark that we have visited the destination.  */
153	  SET_BIT (visited, dest->index);
154
155	  pre[dest->index] = prenum++;
156	  if (dest->succ)
157	    {
158	      /* Since the DEST node has been visited for the first
159		 time, check its successors.  */
160	      stack[sp++] = dest->succ;
161	    }
162	  else
163	    post[dest->index] = postnum++;
164	}
165      else
166	{
167	  if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
168	      && pre[src->index] >= pre[dest->index]
169	      && post[dest->index] == 0)
170	    e->flags |= EDGE_DFS_BACK, found = true;
171
172	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
173	    post[src->index] = postnum++;
174
175	  if (e->succ_next)
176	    stack[sp - 1] = e->succ_next;
177	  else
178	    sp--;
179	}
180    }
181
182  free (pre);
183  free (post);
184  free (stack);
185  sbitmap_free (visited);
186
187  return found;
188}
189
190/* Return true if we need to add fake edge to exit.
191   Helper function for the flow_call_edges_add.  */
192
193static bool
194need_fake_edge_p (insn)
195     rtx insn;
196{
197  if (!INSN_P (insn))
198    return false;
199
200  if ((GET_CODE (insn) == CALL_INSN
201       && !SIBLING_CALL_P (insn)
202       && !find_reg_note (insn, REG_NORETURN, NULL)
203       && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
204       && !CONST_OR_PURE_CALL_P (insn)))
205    return true;
206
207  return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
208	   && MEM_VOLATILE_P (PATTERN (insn)))
209	  || (GET_CODE (PATTERN (insn)) == PARALLEL
210	      && asm_noperands (insn) != -1
211	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
212	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
213}
214
215/* Return true if INSN should be kept in the same block as a preceding call.
216   This is done for a single-set whose destination is a fixed register or
217   whose source is the function return value.  This is a helper function for
218   flow_call_edges_add.  */
219
220static bool
221keep_with_call_p (insn)
222     rtx insn;
223{
224  rtx set;
225
226  if (INSN_P (insn) && (set = single_set (insn)) != NULL)
227    {
228      if (GET_CODE (SET_DEST (set)) == REG
229	  && fixed_regs[REGNO (SET_DEST (set))]
230	  && general_operand (SET_SRC (set), VOIDmode))
231	return true;
232      if (GET_CODE (SET_SRC (set)) == REG
233	  && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
234	  && GET_CODE (SET_DEST (set)) == REG
235	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
236	return true;
237    }
238  return false;
239}
240
241/* Add fake edges to the function exit for any non constant and non noreturn
242   calls, volatile inline assembly in the bitmap of blocks specified by
243   BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
244   that were split.
245
246   The goal is to expose cases in which entering a basic block does not imply
247   that all subsequent instructions must be executed.  */
248
249int
250flow_call_edges_add (blocks)
251     sbitmap blocks;
252{
253  int i;
254  int blocks_split = 0;
255  int bb_num = 0;
256  basic_block *bbs;
257  bool check_last_block = false;
258
259  /* Map bb indices into basic block pointers since split_block
260     will renumber the basic blocks.  */
261
262  bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
263
264  if (! blocks)
265    {
266      for (i = 0; i < n_basic_blocks; i++)
267	bbs[bb_num++] = BASIC_BLOCK (i);
268
269      check_last_block = true;
270    }
271  else
272    EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
273			       {
274				 bbs[bb_num++] = BASIC_BLOCK (i);
275				 if (i == n_basic_blocks - 1)
276				   check_last_block = true;
277			       });
278
279  /* In the last basic block, before epilogue generation, there will be
280     a fallthru edge to EXIT.  Special care is required if the last insn
281     of the last basic block is a call because make_edge folds duplicate
282     edges, which would result in the fallthru edge also being marked
283     fake, which would result in the fallthru edge being removed by
284     remove_fake_edges, which would result in an invalid CFG.
285
286     Moreover, we can't elide the outgoing fake edge, since the block
287     profiler needs to take this into account in order to solve the minimal
288     spanning tree in the case that the call doesn't return.
289
290     Handle this by adding a dummy instruction in a new last basic block.  */
291  if (check_last_block)
292    {
293      basic_block bb = BASIC_BLOCK (n_basic_blocks - 1);
294      rtx insn = bb->end;
295
296      /* Back up past insns that must be kept in the same block as a call.  */
297      while (insn != bb->head
298	     && keep_with_call_p (insn))
299	insn = PREV_INSN (insn);
300
301      if (need_fake_edge_p (insn))
302	{
303	  edge e;
304
305	  for (e = bb->succ; e; e = e->succ_next)
306	    if (e->dest == EXIT_BLOCK_PTR)
307	      break;
308
309	  insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
310	  commit_edge_insertions ();
311	}
312    }
313
314  /* Now add fake edges to the function exit for any non constant
315     calls since there is no way that we can determine if they will
316     return or not...  */
317
318  for (i = 0; i < bb_num; i++)
319    {
320      basic_block bb = bbs[i];
321      rtx insn;
322      rtx prev_insn;
323
324      for (insn = bb->end; ; insn = prev_insn)
325	{
326	  prev_insn = PREV_INSN (insn);
327	  if (need_fake_edge_p (insn))
328	    {
329	      edge e;
330	      rtx split_at_insn = insn;
331
332	      /* Don't split the block between a call and an insn that should
333	         remain in the same block as the call.  */
334	      if (GET_CODE (insn) == CALL_INSN)
335		while (split_at_insn != bb->end
336		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
337		  split_at_insn = NEXT_INSN (split_at_insn);
338
339	      /* The handling above of the final block before the epilogue
340	         should be enough to verify that there is no edge to the exit
341		 block in CFG already.  Calling make_edge in such case would
342		 cause us to mark that edge as fake and remove it later.  */
343
344#ifdef ENABLE_CHECKING
345	      if (split_at_insn == bb->end)
346		for (e = bb->succ; e; e = e->succ_next)
347		  if (e->dest == EXIT_BLOCK_PTR)
348		    abort ();
349#endif
350
351	      /* Note that the following may create a new basic block
352		 and renumber the existing basic blocks.  */
353	      e = split_block (bb, split_at_insn);
354	      if (e)
355		blocks_split++;
356
357	      make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
358	    }
359
360	  if (insn == bb->head)
361	    break;
362	}
363    }
364
365  if (blocks_split)
366    verify_flow_info ();
367
368  free (bbs);
369  return blocks_split;
370}
371
372/* Find unreachable blocks.  An unreachable block will have 0 in
373   the reachable bit in block->flags.  A non-zero value indicates the
374   block is reachable.  */
375
376void
377find_unreachable_blocks ()
378{
379  edge e;
380  int i, n;
381  basic_block *tos, *worklist;
382
383  n = n_basic_blocks;
384  tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
385
386  /* Clear all the reachability flags.  */
387
388  for (i = 0; i < n; ++i)
389    BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
390
391  /* Add our starting points to the worklist.  Almost always there will
392     be only one.  It isn't inconceivable that we might one day directly
393     support Fortran alternate entry points.  */
394
395  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
396    {
397      *tos++ = e->dest;
398
399      /* Mark the block reachable.  */
400      e->dest->flags |= BB_REACHABLE;
401    }
402
403  /* Iterate: find everything reachable from what we've already seen.  */
404
405  while (tos != worklist)
406    {
407      basic_block b = *--tos;
408
409      for (e = b->succ; e; e = e->succ_next)
410	if (!(e->dest->flags & BB_REACHABLE))
411	  {
412	    *tos++ = e->dest;
413	    e->dest->flags |= BB_REACHABLE;
414	  }
415    }
416
417  free (worklist);
418}
419
420/* Functions to access an edge list with a vector representation.
421   Enough data is kept such that given an index number, the
422   pred and succ that edge represents can be determined, or
423   given a pred and a succ, its index number can be returned.
424   This allows algorithms which consume a lot of memory to
425   represent the normally full matrix of edge (pred,succ) with a
426   single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
427   wasted space in the client code due to sparse flow graphs.  */
428
429/* This functions initializes the edge list. Basically the entire
430   flowgraph is processed, and all edges are assigned a number,
431   and the data structure is filled in.  */
432
433struct edge_list *
434create_edge_list ()
435{
436  struct edge_list *elist;
437  edge e;
438  int num_edges;
439  int x;
440  int block_count;
441
442  block_count = n_basic_blocks + 2;   /* Include the entry and exit blocks.  */
443
444  num_edges = 0;
445
446  /* Determine the number of edges in the flow graph by counting successor
447     edges on each basic block.  */
448  for (x = 0; x < n_basic_blocks; x++)
449    {
450      basic_block bb = BASIC_BLOCK (x);
451
452      for (e = bb->succ; e; e = e->succ_next)
453	num_edges++;
454    }
455
456  /* Don't forget successors of the entry block.  */
457  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
458    num_edges++;
459
460  elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
461  elist->num_blocks = block_count;
462  elist->num_edges = num_edges;
463  elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
464
465  num_edges = 0;
466
467  /* Follow successors of the entry block, and register these edges.  */
468  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
469    elist->index_to_edge[num_edges++] = e;
470
471  for (x = 0; x < n_basic_blocks; x++)
472    {
473      basic_block bb = BASIC_BLOCK (x);
474
475      /* Follow all successors of blocks, and register these edges.  */
476      for (e = bb->succ; e; e = e->succ_next)
477	elist->index_to_edge[num_edges++] = e;
478    }
479
480  return elist;
481}
482
483/* This function free's memory associated with an edge list.  */
484
485void
486free_edge_list (elist)
487     struct edge_list *elist;
488{
489  if (elist)
490    {
491      free (elist->index_to_edge);
492      free (elist);
493    }
494}
495
496/* This function provides debug output showing an edge list.  */
497
498void
499print_edge_list (f, elist)
500     FILE *f;
501     struct edge_list *elist;
502{
503  int x;
504
505  fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
506	   elist->num_blocks - 2, elist->num_edges);
507
508  for (x = 0; x < elist->num_edges; x++)
509    {
510      fprintf (f, " %-4d - edge(", x);
511      if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
512	fprintf (f, "entry,");
513      else
514	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
515
516      if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
517	fprintf (f, "exit)\n");
518      else
519	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
520    }
521}
522
523/* This function provides an internal consistency check of an edge list,
524   verifying that all edges are present, and that there are no
525   extra edges.  */
526
527void
528verify_edge_list (f, elist)
529     FILE *f;
530     struct edge_list *elist;
531{
532  int x, pred, succ, index;
533  edge e;
534
535  for (x = 0; x < n_basic_blocks; x++)
536    {
537      basic_block bb = BASIC_BLOCK (x);
538
539      for (e = bb->succ; e; e = e->succ_next)
540	{
541	  pred = e->src->index;
542	  succ = e->dest->index;
543	  index = EDGE_INDEX (elist, e->src, e->dest);
544	  if (index == EDGE_INDEX_NO_EDGE)
545	    {
546	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
547	      continue;
548	    }
549
550	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
551	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
552		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
553	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
554	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
555		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
556	}
557    }
558
559  for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
560    {
561      pred = e->src->index;
562      succ = e->dest->index;
563      index = EDGE_INDEX (elist, e->src, e->dest);
564      if (index == EDGE_INDEX_NO_EDGE)
565	{
566	  fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
567	  continue;
568	}
569
570      if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
571	fprintf (f, "*p* Pred for index %d should be %d not %d\n",
572		 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
573      if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
574	fprintf (f, "*p* Succ for index %d should be %d not %d\n",
575		 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
576    }
577
578  /* We've verified that all the edges are in the list, no lets make sure
579     there are no spurious edges in the list.  */
580
581  for (pred = 0; pred < n_basic_blocks; pred++)
582    for (succ = 0; succ < n_basic_blocks; succ++)
583      {
584	basic_block p = BASIC_BLOCK (pred);
585	basic_block s = BASIC_BLOCK (succ);
586	int found_edge = 0;
587
588	for (e = p->succ; e; e = e->succ_next)
589	  if (e->dest == s)
590	    {
591	      found_edge = 1;
592	      break;
593	    }
594
595	for (e = s->pred; e; e = e->pred_next)
596	  if (e->src == p)
597	    {
598	      found_edge = 1;
599	      break;
600	    }
601
602	if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
603	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
604	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
605		   pred, succ);
606	if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
607	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
608	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
609		   pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
610					   BASIC_BLOCK (succ)));
611      }
612
613  for (succ = 0; succ < n_basic_blocks; succ++)
614    {
615      basic_block p = ENTRY_BLOCK_PTR;
616      basic_block s = BASIC_BLOCK (succ);
617      int found_edge = 0;
618
619      for (e = p->succ; e; e = e->succ_next)
620	if (e->dest == s)
621	  {
622	    found_edge = 1;
623	    break;
624	  }
625
626      for (e = s->pred; e; e = e->pred_next)
627	if (e->src == p)
628	  {
629	    found_edge = 1;
630	    break;
631	  }
632
633      if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
634	  == EDGE_INDEX_NO_EDGE && found_edge != 0)
635	fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
636		 succ);
637      if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
638	  != EDGE_INDEX_NO_EDGE && found_edge == 0)
639	fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
640		 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
641				   BASIC_BLOCK (succ)));
642    }
643
644  for (pred = 0; pred < n_basic_blocks; pred++)
645    {
646      basic_block p = BASIC_BLOCK (pred);
647      basic_block s = EXIT_BLOCK_PTR;
648      int found_edge = 0;
649
650      for (e = p->succ; e; e = e->succ_next)
651	if (e->dest == s)
652	  {
653	    found_edge = 1;
654	    break;
655	  }
656
657      for (e = s->pred; e; e = e->pred_next)
658	if (e->src == p)
659	  {
660	    found_edge = 1;
661	    break;
662	  }
663
664      if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
665	  == EDGE_INDEX_NO_EDGE && found_edge != 0)
666	fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
667		 pred);
668      if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
669	  != EDGE_INDEX_NO_EDGE && found_edge == 0)
670	fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
671		 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
672				   EXIT_BLOCK_PTR));
673    }
674}
675
676/* This routine will determine what, if any, edge there is between
677   a specified predecessor and successor.  */
678
679int
680find_edge_index (edge_list, pred, succ)
681     struct edge_list *edge_list;
682     basic_block pred, succ;
683{
684  int x;
685
686  for (x = 0; x < NUM_EDGES (edge_list); x++)
687    if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
688	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
689      return x;
690
691  return (EDGE_INDEX_NO_EDGE);
692}
693
694/* Dump the list of basic blocks in the bitmap NODES.  */
695
696void
697flow_nodes_print (str, nodes, file)
698     const char *str;
699     const sbitmap nodes;
700     FILE *file;
701{
702  int node;
703
704  if (! nodes)
705    return;
706
707  fprintf (file, "%s { ", str);
708  EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
709  fputs ("}\n", file);
710}
711
712/* Dump the list of edges in the array EDGE_LIST.  */
713
714void
715flow_edge_list_print (str, edge_list, num_edges, file)
716     const char *str;
717     const edge *edge_list;
718     int num_edges;
719     FILE *file;
720{
721  int i;
722
723  if (! edge_list)
724    return;
725
726  fprintf (file, "%s { ", str);
727  for (i = 0; i < num_edges; i++)
728    fprintf (file, "%d->%d ", edge_list[i]->src->index,
729	     edge_list[i]->dest->index);
730
731  fputs ("}\n", file);
732}
733
734
735/* This routine will remove any fake successor edges for a basic block.
736   When the edge is removed, it is also removed from whatever predecessor
737   list it is in.  */
738
739static void
740remove_fake_successors (bb)
741     basic_block bb;
742{
743  edge e;
744
745  for (e = bb->succ; e;)
746    {
747      edge tmp = e;
748
749      e = e->succ_next;
750      if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
751	remove_edge (tmp);
752    }
753}
754
755/* This routine will remove all fake edges from the flow graph.  If
756   we remove all fake successors, it will automatically remove all
757   fake predecessors.  */
758
759void
760remove_fake_edges ()
761{
762  int x;
763
764  for (x = 0; x < n_basic_blocks; x++)
765    remove_fake_successors (BASIC_BLOCK (x));
766
767  /* We've handled all successors except the entry block's.  */
768  remove_fake_successors (ENTRY_BLOCK_PTR);
769}
770
771/* This function will add a fake edge between any block which has no
772   successors, and the exit block. Some data flow equations require these
773   edges to exist.  */
774
775void
776add_noreturn_fake_exit_edges ()
777{
778  int x;
779
780  for (x = 0; x < n_basic_blocks; x++)
781    if (BASIC_BLOCK (x)->succ == NULL)
782      make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
783}
784
785/* This function adds a fake edge between any infinite loops to the
786   exit block.  Some optimizations require a path from each node to
787   the exit node.
788
789   See also Morgan, Figure 3.10, pp. 82-83.
790
791   The current implementation is ugly, not attempting to minimize the
792   number of inserted fake edges.  To reduce the number of fake edges
793   to insert, add fake edges from _innermost_ loops containing only
794   nodes not reachable from the exit block.  */
795
796void
797connect_infinite_loops_to_exit ()
798{
799  basic_block unvisited_block;
800  struct depth_first_search_dsS dfs_ds;
801
802  /* Perform depth-first search in the reverse graph to find nodes
803     reachable from the exit block.  */
804  flow_dfs_compute_reverse_init (&dfs_ds);
805  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
806
807  /* Repeatedly add fake edges, updating the unreachable nodes.  */
808  while (1)
809    {
810      unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
811      if (!unvisited_block)
812	break;
813
814      make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
815      flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
816    }
817
818  flow_dfs_compute_reverse_finish (&dfs_ds);
819  return;
820}
821
822/* Compute reverse top sort order */
823
824void
825flow_reverse_top_sort_order_compute (rts_order)
826     int *rts_order;
827{
828  edge *stack;
829  int sp;
830  int postnum = 0;
831  sbitmap visited;
832
833  /* Allocate stack for back-tracking up CFG.  */
834  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
835  sp = 0;
836
837  /* Allocate bitmap to track nodes that have been visited.  */
838  visited = sbitmap_alloc (n_basic_blocks);
839
840  /* None of the nodes in the CFG have been visited yet.  */
841  sbitmap_zero (visited);
842
843  /* Push the first edge on to the stack.  */
844  stack[sp++] = ENTRY_BLOCK_PTR->succ;
845
846  while (sp)
847    {
848      edge e;
849      basic_block src;
850      basic_block dest;
851
852      /* Look at the edge on the top of the stack.  */
853      e = stack[sp - 1];
854      src = e->src;
855      dest = e->dest;
856
857      /* Check if the edge destination has been visited yet.  */
858      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
859	{
860	  /* Mark that we have visited the destination.  */
861	  SET_BIT (visited, dest->index);
862
863	  if (dest->succ)
864	    /* Since the DEST node has been visited for the first
865	       time, check its successors.  */
866	    stack[sp++] = dest->succ;
867	  else
868	    rts_order[postnum++] = dest->index;
869	}
870      else
871	{
872	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
873	   rts_order[postnum++] = src->index;
874
875	  if (e->succ_next)
876	    stack[sp - 1] = e->succ_next;
877	  else
878	    sp--;
879	}
880    }
881
882  free (stack);
883  sbitmap_free (visited);
884}
885
886/* Compute the depth first search order and store in the array
887  DFS_ORDER if non-zero, marking the nodes visited in VISITED.  If
888  RC_ORDER is non-zero, return the reverse completion number for each
889  node.  Returns the number of nodes visited.  A depth first search
890  tries to get as far away from the starting point as quickly as
891  possible.  */
892
893int
894flow_depth_first_order_compute (dfs_order, rc_order)
895     int *dfs_order;
896     int *rc_order;
897{
898  edge *stack;
899  int sp;
900  int dfsnum = 0;
901  int rcnum = n_basic_blocks - 1;
902  sbitmap visited;
903
904  /* Allocate stack for back-tracking up CFG.  */
905  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
906  sp = 0;
907
908  /* Allocate bitmap to track nodes that have been visited.  */
909  visited = sbitmap_alloc (n_basic_blocks);
910
911  /* None of the nodes in the CFG have been visited yet.  */
912  sbitmap_zero (visited);
913
914  /* Push the first edge on to the stack.  */
915  stack[sp++] = ENTRY_BLOCK_PTR->succ;
916
917  while (sp)
918    {
919      edge e;
920      basic_block src;
921      basic_block dest;
922
923      /* Look at the edge on the top of the stack.  */
924      e = stack[sp - 1];
925      src = e->src;
926      dest = e->dest;
927
928      /* Check if the edge destination has been visited yet.  */
929      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
930	{
931	  /* Mark that we have visited the destination.  */
932	  SET_BIT (visited, dest->index);
933
934	  if (dfs_order)
935	    dfs_order[dfsnum] = dest->index;
936
937	  dfsnum++;
938
939	  if (dest->succ)
940	    /* Since the DEST node has been visited for the first
941	       time, check its successors.  */
942	    stack[sp++] = dest->succ;
943	  else if (rc_order)
944	    /* There are no successors for the DEST node so assign
945	       its reverse completion number.  */
946	    rc_order[rcnum--] = dest->index;
947	}
948      else
949	{
950	  if (! e->succ_next && src != ENTRY_BLOCK_PTR
951	      && rc_order)
952	    /* There are no more successors for the SRC node
953	       so assign its reverse completion number.  */
954	    rc_order[rcnum--] = src->index;
955
956	  if (e->succ_next)
957	    stack[sp - 1] = e->succ_next;
958	  else
959	    sp--;
960	}
961    }
962
963  free (stack);
964  sbitmap_free (visited);
965
966  /* The number of nodes visited should not be greater than
967     n_basic_blocks.  */
968  if (dfsnum > n_basic_blocks)
969    abort ();
970
971  /* There are some nodes left in the CFG that are unreachable.  */
972  if (dfsnum < n_basic_blocks)
973    abort ();
974
975  return dfsnum;
976}
977
978struct dfst_node
979{
980    unsigned nnodes;
981    struct dfst_node **node;
982    struct dfst_node *up;
983};
984
985/* Compute a preorder transversal ordering such that a sub-tree which
986   is the source of a cross edge appears before the sub-tree which is
987   the destination of the cross edge.  This allows for easy detection
988   of all the entry blocks for a loop.
989
990   The ordering is compute by:
991
992     1) Generating a depth first spanning tree.
993
994     2) Walking the resulting tree from right to left.  */
995
996void
997flow_preorder_transversal_compute (pot_order)
998     int *pot_order;
999{
1000  edge e;
1001  edge *stack;
1002  int i;
1003  int max_successors;
1004  int sp;
1005  sbitmap visited;
1006  struct dfst_node *node;
1007  struct dfst_node *dfst;
1008
1009  /* Allocate stack for back-tracking up CFG.  */
1010  stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1011  sp = 0;
1012
1013  /* Allocate the tree.  */
1014  dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
1015				       sizeof (struct dfst_node));
1016
1017  for (i = 0; i < n_basic_blocks; i++)
1018    {
1019      max_successors = 0;
1020      for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
1021	max_successors++;
1022
1023      dfst[i].node
1024	= (max_successors
1025	   ? (struct dfst_node **) xcalloc (max_successors,
1026					    sizeof (struct dfst_node *))
1027	   : NULL);
1028    }
1029
1030  /* Allocate bitmap to track nodes that have been visited.  */
1031  visited = sbitmap_alloc (n_basic_blocks);
1032
1033  /* None of the nodes in the CFG have been visited yet.  */
1034  sbitmap_zero (visited);
1035
1036  /* Push the first edge on to the stack.  */
1037  stack[sp++] = ENTRY_BLOCK_PTR->succ;
1038
1039  while (sp)
1040    {
1041      basic_block src;
1042      basic_block dest;
1043
1044      /* Look at the edge on the top of the stack.  */
1045      e = stack[sp - 1];
1046      src = e->src;
1047      dest = e->dest;
1048
1049      /* Check if the edge destination has been visited yet.  */
1050      if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1051	{
1052	  /* Mark that we have visited the destination.  */
1053	  SET_BIT (visited, dest->index);
1054
1055	  /* Add the destination to the preorder tree.  */
1056	  if (src != ENTRY_BLOCK_PTR)
1057	    {
1058	      dfst[src->index].node[dfst[src->index].nnodes++]
1059		= &dfst[dest->index];
1060	      dfst[dest->index].up = &dfst[src->index];
1061	    }
1062
1063	  if (dest->succ)
1064	    /* Since the DEST node has been visited for the first
1065	       time, check its successors.  */
1066	    stack[sp++] = dest->succ;
1067	}
1068
1069      else if (e->succ_next)
1070	stack[sp - 1] = e->succ_next;
1071      else
1072	sp--;
1073    }
1074
1075  free (stack);
1076  sbitmap_free (visited);
1077
1078  /* Record the preorder transversal order by
1079     walking the tree from right to left.  */
1080
1081  i = 0;
1082  node = &dfst[0];
1083  pot_order[i++] = 0;
1084
1085  while (node)
1086    {
1087      if (node->nnodes)
1088	{
1089	  node = node->node[--node->nnodes];
1090	  pot_order[i++] = node - dfst;
1091	}
1092      else
1093	node = node->up;
1094    }
1095
1096  /* Free the tree.  */
1097
1098  for (i = 0; i < n_basic_blocks; i++)
1099    if (dfst[i].node)
1100      free (dfst[i].node);
1101
1102  free (dfst);
1103}
1104
1105/* Compute the depth first search order on the _reverse_ graph and
1106   store in the array DFS_ORDER, marking the nodes visited in VISITED.
1107   Returns the number of nodes visited.
1108
1109   The computation is split into three pieces:
1110
1111   flow_dfs_compute_reverse_init () creates the necessary data
1112   structures.
1113
1114   flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1115   structures.  The block will start the search.
1116
1117   flow_dfs_compute_reverse_execute () continues (or starts) the
1118   search using the block on the top of the stack, stopping when the
1119   stack is empty.
1120
1121   flow_dfs_compute_reverse_finish () destroys the necessary data
1122   structures.
1123
1124   Thus, the user will probably call ..._init(), call ..._add_bb() to
1125   add a beginning basic block to the stack, call ..._execute(),
1126   possibly add another bb to the stack and again call ..._execute(),
1127   ..., and finally call _finish().  */
1128
1129/* Initialize the data structures used for depth-first search on the
1130   reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1131   added to the basic block stack.  DATA is the current depth-first
1132   search context.  If INITIALIZE_STACK is non-zero, there is an
1133   element on the stack.  */
1134
1135static void
1136flow_dfs_compute_reverse_init (data)
1137     depth_first_search_ds data;
1138{
1139  /* Allocate stack for back-tracking up CFG.  */
1140  data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1141					 * sizeof (basic_block));
1142  data->sp = 0;
1143
1144  /* Allocate bitmap to track nodes that have been visited.  */
1145  data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
1146
1147  /* None of the nodes in the CFG have been visited yet.  */
1148  sbitmap_zero (data->visited_blocks);
1149
1150  return;
1151}
1152
1153/* Add the specified basic block to the top of the dfs data
1154   structures.  When the search continues, it will start at the
1155   block.  */
1156
1157static void
1158flow_dfs_compute_reverse_add_bb (data, bb)
1159     depth_first_search_ds data;
1160     basic_block bb;
1161{
1162  data->stack[data->sp++] = bb;
1163  SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1164}
1165
1166/* Continue the depth-first search through the reverse graph starting with the
1167   block at the stack's top and ending when the stack is empty.  Visited nodes
1168   are marked.  Returns an unvisited basic block, or NULL if there is none
1169   available.  */
1170
1171static basic_block
1172flow_dfs_compute_reverse_execute (data)
1173     depth_first_search_ds data;
1174{
1175  basic_block bb;
1176  edge e;
1177  int i;
1178
1179  while (data->sp > 0)
1180    {
1181      bb = data->stack[--data->sp];
1182
1183      /* Perform depth-first search on adjacent vertices.  */
1184      for (e = bb->pred; e; e = e->pred_next)
1185	if (!TEST_BIT (data->visited_blocks,
1186		       e->src->index - (INVALID_BLOCK + 1)))
1187	  flow_dfs_compute_reverse_add_bb (data, e->src);
1188    }
1189
1190  /* Determine if there are unvisited basic blocks.  */
1191  for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
1192    if (!TEST_BIT (data->visited_blocks, i))
1193      return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1194
1195  return NULL;
1196}
1197
1198/* Destroy the data structures needed for depth-first search on the
1199   reverse graph.  */
1200
1201static void
1202flow_dfs_compute_reverse_finish (data)
1203     depth_first_search_ds data;
1204{
1205  free (data->stack);
1206  sbitmap_free (data->visited_blocks);
1207}
1208