1/* Generic SSA value propagation engine.
2   Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3   Contributed by Diego Novillo <dnovillo@redhat.com>
4
5   This file is part of GCC.
6
7   GCC is free software; you can redistribute it and/or modify it
8   under the terms of the GNU General Public License as published by the
9   Free Software Foundation; either version 2, or (at your option) any
10   later version.
11
12   GCC is distributed in the hope that it will be useful, but WITHOUT
13   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14   FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15   for more details.
16
17   You should have received a copy of the GNU General Public License
18   along with GCC; see the file COPYING.  If not, write to the Free
19   Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20   02110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "flags.h"
28#include "rtl.h"
29#include "tm_p.h"
30#include "ggc.h"
31#include "basic-block.h"
32#include "output.h"
33#include "expr.h"
34#include "function.h"
35#include "diagnostic.h"
36#include "timevar.h"
37#include "tree-dump.h"
38#include "tree-flow.h"
39#include "tree-pass.h"
40#include "tree-ssa-propagate.h"
41#include "langhooks.h"
42#include "varray.h"
43#include "vec.h"
44
45/* This file implements a generic value propagation engine based on
46   the same propagation used by the SSA-CCP algorithm [1].
47
48   Propagation is performed by simulating the execution of every
49   statement that produces the value being propagated.  Simulation
50   proceeds as follows:
51
52   1- Initially, all edges of the CFG are marked not executable and
53      the CFG worklist is seeded with all the statements in the entry
54      basic block (block 0).
55
56   2- Every statement S is simulated with a call to the call-back
57      function SSA_PROP_VISIT_STMT.  This evaluation may produce 3
58      results:
59
60      	SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
61	    interest and does not affect any of the work lists.
62
63	SSA_PROP_VARYING: The value produced by S cannot be determined
64	    at compile time.  Further simulation of S is not required.
65	    If S is a conditional jump, all the outgoing edges for the
66	    block are considered executable and added to the work
67	    list.
68
69	SSA_PROP_INTERESTING: S produces a value that can be computed
70	    at compile time.  Its result can be propagated into the
71	    statements that feed from S.  Furthermore, if S is a
72	    conditional jump, only the edge known to be taken is added
73	    to the work list.  Edges that are known not to execute are
74	    never simulated.
75
76   3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI.  The
77      return value from SSA_PROP_VISIT_PHI has the same semantics as
78      described in #2.
79
80   4- Three work lists are kept.  Statements are only added to these
81      lists if they produce one of SSA_PROP_INTERESTING or
82      SSA_PROP_VARYING.
83
84   	CFG_BLOCKS contains the list of blocks to be simulated.
85	    Blocks are added to this list if their incoming edges are
86	    found executable.
87
88	VARYING_SSA_EDGES contains the list of statements that feed
89	    from statements that produce an SSA_PROP_VARYING result.
90	    These are simulated first to speed up processing.
91
92	INTERESTING_SSA_EDGES contains the list of statements that
93	    feed from statements that produce an SSA_PROP_INTERESTING
94	    result.
95
96   5- Simulation terminates when all three work lists are drained.
97
98   Before calling ssa_propagate, it is important to clear
99   DONT_SIMULATE_AGAIN for all the statements in the program that
100   should be simulated.  This initialization allows an implementation
101   to specify which statements should never be simulated.
102
103   It is also important to compute def-use information before calling
104   ssa_propagate.
105
106   References:
107
108     [1] Constant propagation with conditional branches,
109         Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
110
111     [2] Building an Optimizing Compiler,
112	 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
113
114     [3] Advanced Compiler Design and Implementation,
115	 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6  */
116
117/* Function pointers used to parameterize the propagation engine.  */
118static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
119static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
120
121/* Use the TREE_DEPRECATED bitflag to mark statements that have been
122   added to one of the SSA edges worklists.  This flag is used to
123   avoid visiting statements unnecessarily when draining an SSA edge
124   worklist.  If while simulating a basic block, we find a statement with
125   STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
126   processing from visiting it again.  */
127#define STMT_IN_SSA_EDGE_WORKLIST(T)	TREE_DEPRECATED (T)
128
129/* A bitmap to keep track of executable blocks in the CFG.  */
130static sbitmap executable_blocks;
131
132/* Array of control flow edges on the worklist.  */
133static VEC(basic_block,heap) *cfg_blocks;
134
135static unsigned int cfg_blocks_num = 0;
136static int cfg_blocks_tail;
137static int cfg_blocks_head;
138
139static sbitmap bb_in_list;
140
141/* Worklist of SSA edges which will need reexamination as their
142   definition has changed.  SSA edges are def-use edges in the SSA
143   web.  For each D-U edge, we store the target statement or PHI node
144   U.  */
145static GTY(()) VEC(tree,gc) *interesting_ssa_edges;
146
147/* Identical to INTERESTING_SSA_EDGES.  For performance reasons, the
148   list of SSA edges is split into two.  One contains all SSA edges
149   who need to be reexamined because their lattice value changed to
150   varying (this worklist), and the other contains all other SSA edges
151   to be reexamined (INTERESTING_SSA_EDGES).
152
153   Since most values in the program are VARYING, the ideal situation
154   is to move them to that lattice value as quickly as possible.
155   Thus, it doesn't make sense to process any other type of lattice
156   value until all VARYING values are propagated fully, which is one
157   thing using the VARYING worklist achieves.  In addition, if we
158   don't use a separate worklist for VARYING edges, we end up with
159   situations where lattice values move from
160   UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING.  */
161static GTY(()) VEC(tree,gc) *varying_ssa_edges;
162
163
164/* Return true if the block worklist empty.  */
165
166static inline bool
167cfg_blocks_empty_p (void)
168{
169  return (cfg_blocks_num == 0);
170}
171
172
173/* Add a basic block to the worklist.  The block must not be already
174   in the worklist, and it must not be the ENTRY or EXIT block.  */
175
176static void
177cfg_blocks_add (basic_block bb)
178{
179  bool head = false;
180
181  gcc_assert (bb != ENTRY_BLOCK_PTR && bb != EXIT_BLOCK_PTR);
182  gcc_assert (!TEST_BIT (bb_in_list, bb->index));
183
184  if (cfg_blocks_empty_p ())
185    {
186      cfg_blocks_tail = cfg_blocks_head = 0;
187      cfg_blocks_num = 1;
188    }
189  else
190    {
191      cfg_blocks_num++;
192      if (cfg_blocks_num > VEC_length (basic_block, cfg_blocks))
193	{
194	  /* We have to grow the array now.  Adjust to queue to occupy
195	     the full space of the original array.  We do not need to
196	     initialize the newly allocated portion of the array
197	     because we keep track of CFG_BLOCKS_HEAD and
198	     CFG_BLOCKS_HEAD.  */
199	  cfg_blocks_tail = VEC_length (basic_block, cfg_blocks);
200	  cfg_blocks_head = 0;
201	  VEC_safe_grow (basic_block, heap, cfg_blocks, 2 * cfg_blocks_tail);
202	}
203      /* Minor optimization: we prefer to see blocks with more
204	 predecessors later, because there is more of a chance that
205	 the incoming edges will be executable.  */
206      else if (EDGE_COUNT (bb->preds)
207	       >= EDGE_COUNT (VEC_index (basic_block, cfg_blocks,
208					 cfg_blocks_head)->preds))
209	cfg_blocks_tail = ((cfg_blocks_tail + 1)
210			   % VEC_length (basic_block, cfg_blocks));
211      else
212	{
213	  if (cfg_blocks_head == 0)
214	    cfg_blocks_head = VEC_length (basic_block, cfg_blocks);
215	  --cfg_blocks_head;
216	  head = true;
217	}
218    }
219
220  VEC_replace (basic_block, cfg_blocks,
221	       head ? cfg_blocks_head : cfg_blocks_tail,
222	       bb);
223  SET_BIT (bb_in_list, bb->index);
224}
225
226
227/* Remove a block from the worklist.  */
228
229static basic_block
230cfg_blocks_get (void)
231{
232  basic_block bb;
233
234  bb = VEC_index (basic_block, cfg_blocks, cfg_blocks_head);
235
236  gcc_assert (!cfg_blocks_empty_p ());
237  gcc_assert (bb);
238
239  cfg_blocks_head = ((cfg_blocks_head + 1)
240		     % VEC_length (basic_block, cfg_blocks));
241  --cfg_blocks_num;
242  RESET_BIT (bb_in_list, bb->index);
243
244  return bb;
245}
246
247
248/* We have just defined a new value for VAR.  If IS_VARYING is true,
249   add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
250   them to INTERESTING_SSA_EDGES.  */
251
252static void
253add_ssa_edge (tree var, bool is_varying)
254{
255  imm_use_iterator iter;
256  use_operand_p use_p;
257
258  FOR_EACH_IMM_USE_FAST (use_p, iter, var)
259    {
260      tree use_stmt = USE_STMT (use_p);
261
262      if (!DONT_SIMULATE_AGAIN (use_stmt)
263	  && !STMT_IN_SSA_EDGE_WORKLIST (use_stmt))
264	{
265	  STMT_IN_SSA_EDGE_WORKLIST (use_stmt) = 1;
266	  if (is_varying)
267	    VEC_safe_push (tree, gc, varying_ssa_edges, use_stmt);
268	  else
269	    VEC_safe_push (tree, gc, interesting_ssa_edges, use_stmt);
270	}
271    }
272}
273
274
275/* Add edge E to the control flow worklist.  */
276
277static void
278add_control_edge (edge e)
279{
280  basic_block bb = e->dest;
281  if (bb == EXIT_BLOCK_PTR)
282    return;
283
284  /* If the edge had already been executed, skip it.  */
285  if (e->flags & EDGE_EXECUTABLE)
286    return;
287
288  e->flags |= EDGE_EXECUTABLE;
289
290  /* If the block is already in the list, we're done.  */
291  if (TEST_BIT (bb_in_list, bb->index))
292    return;
293
294  cfg_blocks_add (bb);
295
296  if (dump_file && (dump_flags & TDF_DETAILS))
297    fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
298	e->src->index, e->dest->index);
299}
300
301
302/* Simulate the execution of STMT and update the work lists accordingly.  */
303
304static void
305simulate_stmt (tree stmt)
306{
307  enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
308  edge taken_edge = NULL;
309  tree output_name = NULL_TREE;
310
311  /* Don't bother visiting statements that are already
312     considered varying by the propagator.  */
313  if (DONT_SIMULATE_AGAIN (stmt))
314    return;
315
316  if (TREE_CODE (stmt) == PHI_NODE)
317    {
318      val = ssa_prop_visit_phi (stmt);
319      output_name = PHI_RESULT (stmt);
320    }
321  else
322    val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
323
324  if (val == SSA_PROP_VARYING)
325    {
326      DONT_SIMULATE_AGAIN (stmt) = 1;
327
328      /* If the statement produced a new varying value, add the SSA
329	 edges coming out of OUTPUT_NAME.  */
330      if (output_name)
331	add_ssa_edge (output_name, true);
332
333      /* If STMT transfers control out of its basic block, add
334	 all outgoing edges to the work list.  */
335      if (stmt_ends_bb_p (stmt))
336	{
337	  edge e;
338	  edge_iterator ei;
339	  basic_block bb = bb_for_stmt (stmt);
340	  FOR_EACH_EDGE (e, ei, bb->succs)
341	    add_control_edge (e);
342	}
343    }
344  else if (val == SSA_PROP_INTERESTING)
345    {
346      /* If the statement produced new value, add the SSA edges coming
347	 out of OUTPUT_NAME.  */
348      if (output_name)
349	add_ssa_edge (output_name, false);
350
351      /* If we know which edge is going to be taken out of this block,
352	 add it to the CFG work list.  */
353      if (taken_edge)
354	add_control_edge (taken_edge);
355    }
356}
357
358/* Process an SSA edge worklist.  WORKLIST is the SSA edge worklist to
359   drain.  This pops statements off the given WORKLIST and processes
360   them until there are no more statements on WORKLIST.
361   We take a pointer to WORKLIST because it may be reallocated when an
362   SSA edge is added to it in simulate_stmt.  */
363
364static void
365process_ssa_edge_worklist (VEC(tree,gc) **worklist)
366{
367  /* Drain the entire worklist.  */
368  while (VEC_length (tree, *worklist) > 0)
369    {
370      basic_block bb;
371
372      /* Pull the statement to simulate off the worklist.  */
373      tree stmt = VEC_pop (tree, *worklist);
374
375      /* If this statement was already visited by simulate_block, then
376	 we don't need to visit it again here.  */
377      if (!STMT_IN_SSA_EDGE_WORKLIST (stmt))
378	continue;
379
380      /* STMT is no longer in a worklist.  */
381      STMT_IN_SSA_EDGE_WORKLIST (stmt) = 0;
382
383      if (dump_file && (dump_flags & TDF_DETAILS))
384	{
385	  fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
386	  print_generic_stmt (dump_file, stmt, dump_flags);
387	}
388
389      bb = bb_for_stmt (stmt);
390
391      /* PHI nodes are always visited, regardless of whether or not
392	 the destination block is executable.  Otherwise, visit the
393	 statement only if its block is marked executable.  */
394      if (TREE_CODE (stmt) == PHI_NODE
395	  || TEST_BIT (executable_blocks, bb->index))
396	simulate_stmt (stmt);
397    }
398}
399
400
401/* Simulate the execution of BLOCK.  Evaluate the statement associated
402   with each variable reference inside the block.  */
403
404static void
405simulate_block (basic_block block)
406{
407  tree phi;
408
409  /* There is nothing to do for the exit block.  */
410  if (block == EXIT_BLOCK_PTR)
411    return;
412
413  if (dump_file && (dump_flags & TDF_DETAILS))
414    fprintf (dump_file, "\nSimulating block %d\n", block->index);
415
416  /* Always simulate PHI nodes, even if we have simulated this block
417     before.  */
418  for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
419    simulate_stmt (phi);
420
421  /* If this is the first time we've simulated this block, then we
422     must simulate each of its statements.  */
423  if (!TEST_BIT (executable_blocks, block->index))
424    {
425      block_stmt_iterator j;
426      unsigned int normal_edge_count;
427      edge e, normal_edge;
428      edge_iterator ei;
429
430      /* Note that we have simulated this block.  */
431      SET_BIT (executable_blocks, block->index);
432
433      for (j = bsi_start (block); !bsi_end_p (j); bsi_next (&j))
434	{
435	  tree stmt = bsi_stmt (j);
436
437	  /* If this statement is already in the worklist then
438	     "cancel" it.  The reevaluation implied by the worklist
439	     entry will produce the same value we generate here and
440	     thus reevaluating it again from the worklist is
441	     pointless.  */
442	  if (STMT_IN_SSA_EDGE_WORKLIST (stmt))
443	    STMT_IN_SSA_EDGE_WORKLIST (stmt) = 0;
444
445	  simulate_stmt (stmt);
446	}
447
448      /* We can not predict when abnormal edges will be executed, so
449	 once a block is considered executable, we consider any
450	 outgoing abnormal edges as executable.
451
452	 At the same time, if this block has only one successor that is
453	 reached by non-abnormal edges, then add that successor to the
454	 worklist.  */
455      normal_edge_count = 0;
456      normal_edge = NULL;
457      FOR_EACH_EDGE (e, ei, block->succs)
458	{
459	  if (e->flags & EDGE_ABNORMAL)
460	    add_control_edge (e);
461	  else
462	    {
463	      normal_edge_count++;
464	      normal_edge = e;
465	    }
466	}
467
468      if (normal_edge_count == 1)
469	add_control_edge (normal_edge);
470    }
471}
472
473
474/* Initialize local data structures and work lists.  */
475
476static void
477ssa_prop_init (void)
478{
479  edge e;
480  edge_iterator ei;
481  basic_block bb;
482  size_t i;
483
484  /* Worklists of SSA edges.  */
485  interesting_ssa_edges = VEC_alloc (tree, gc, 20);
486  varying_ssa_edges = VEC_alloc (tree, gc, 20);
487
488  executable_blocks = sbitmap_alloc (last_basic_block);
489  sbitmap_zero (executable_blocks);
490
491  bb_in_list = sbitmap_alloc (last_basic_block);
492  sbitmap_zero (bb_in_list);
493
494  if (dump_file && (dump_flags & TDF_DETAILS))
495    dump_immediate_uses (dump_file);
496
497  cfg_blocks = VEC_alloc (basic_block, heap, 20);
498  VEC_safe_grow (basic_block, heap, cfg_blocks, 20);
499
500  /* Initialize the values for every SSA_NAME.  */
501  for (i = 1; i < num_ssa_names; i++)
502    if (ssa_name (i))
503      SSA_NAME_VALUE (ssa_name (i)) = NULL_TREE;
504
505  /* Initially assume that every edge in the CFG is not executable.
506     (including the edges coming out of ENTRY_BLOCK_PTR).  */
507  FOR_ALL_BB (bb)
508    {
509      block_stmt_iterator si;
510
511      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
512	STMT_IN_SSA_EDGE_WORKLIST (bsi_stmt (si)) = 0;
513
514      FOR_EACH_EDGE (e, ei, bb->succs)
515	e->flags &= ~EDGE_EXECUTABLE;
516    }
517
518  /* Seed the algorithm by adding the successors of the entry block to the
519     edge worklist.  */
520  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
521    add_control_edge (e);
522}
523
524
525/* Free allocated storage.  */
526
527static void
528ssa_prop_fini (void)
529{
530  VEC_free (tree, gc, interesting_ssa_edges);
531  VEC_free (tree, gc, varying_ssa_edges);
532  VEC_free (basic_block, heap, cfg_blocks);
533  cfg_blocks = NULL;
534  sbitmap_free (bb_in_list);
535  sbitmap_free (executable_blocks);
536}
537
538
539/* Get the main expression from statement STMT.  */
540
541tree
542get_rhs (tree stmt)
543{
544  enum tree_code code = TREE_CODE (stmt);
545
546  switch (code)
547    {
548    case RETURN_EXPR:
549      stmt = TREE_OPERAND (stmt, 0);
550      if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
551	return stmt;
552      /* FALLTHRU */
553
554    case MODIFY_EXPR:
555      stmt = TREE_OPERAND (stmt, 1);
556      if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
557	return TREE_OPERAND (stmt, 0);
558      else
559	return stmt;
560
561    case COND_EXPR:
562      return COND_EXPR_COND (stmt);
563    case SWITCH_EXPR:
564      return SWITCH_COND (stmt);
565    case GOTO_EXPR:
566      return GOTO_DESTINATION (stmt);
567    case LABEL_EXPR:
568      return LABEL_EXPR_LABEL (stmt);
569
570    default:
571      return stmt;
572    }
573}
574
575
576/* Set the main expression of *STMT_P to EXPR.  If EXPR is not a valid
577   GIMPLE expression no changes are done and the function returns
578   false.  */
579
580bool
581set_rhs (tree *stmt_p, tree expr)
582{
583  tree stmt = *stmt_p, op;
584  enum tree_code code = TREE_CODE (expr);
585  stmt_ann_t ann;
586  tree var;
587  ssa_op_iter iter;
588
589  /* Verify the constant folded result is valid gimple.  */
590  if (TREE_CODE_CLASS (code) == tcc_binary)
591    {
592      if (!is_gimple_val (TREE_OPERAND (expr, 0))
593	  || !is_gimple_val (TREE_OPERAND (expr, 1)))
594	return false;
595    }
596  else if (TREE_CODE_CLASS (code) == tcc_unary)
597    {
598      if (!is_gimple_val (TREE_OPERAND (expr, 0)))
599	return false;
600    }
601  else if (code == ADDR_EXPR)
602    {
603      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ARRAY_REF
604	  && !is_gimple_val (TREE_OPERAND (TREE_OPERAND (expr, 0), 1)))
605	return false;
606    }
607  else if (code == COMPOUND_EXPR
608	   || code == MODIFY_EXPR)
609    return false;
610
611  if (EXPR_HAS_LOCATION (stmt)
612      && EXPR_P (expr)
613      && ! EXPR_HAS_LOCATION (expr)
614      && TREE_SIDE_EFFECTS (expr)
615      && TREE_CODE (expr) != LABEL_EXPR)
616    SET_EXPR_LOCATION (expr, EXPR_LOCATION (stmt));
617
618  switch (TREE_CODE (stmt))
619    {
620    case RETURN_EXPR:
621      op = TREE_OPERAND (stmt, 0);
622      if (TREE_CODE (op) != MODIFY_EXPR)
623	{
624	  TREE_OPERAND (stmt, 0) = expr;
625	  break;
626	}
627      stmt = op;
628      /* FALLTHRU */
629
630    case MODIFY_EXPR:
631      op = TREE_OPERAND (stmt, 1);
632      if (TREE_CODE (op) == WITH_SIZE_EXPR)
633	stmt = op;
634      TREE_OPERAND (stmt, 1) = expr;
635      break;
636
637    case COND_EXPR:
638      if (!is_gimple_condexpr (expr))
639        return false;
640      COND_EXPR_COND (stmt) = expr;
641      break;
642    case SWITCH_EXPR:
643      SWITCH_COND (stmt) = expr;
644      break;
645    case GOTO_EXPR:
646      GOTO_DESTINATION (stmt) = expr;
647      break;
648    case LABEL_EXPR:
649      LABEL_EXPR_LABEL (stmt) = expr;
650      break;
651
652    default:
653      /* Replace the whole statement with EXPR.  If EXPR has no side
654	 effects, then replace *STMT_P with an empty statement.  */
655      ann = stmt_ann (stmt);
656      *stmt_p = TREE_SIDE_EFFECTS (expr) ? expr : build_empty_stmt ();
657      (*stmt_p)->common.ann = (tree_ann_t) ann;
658
659      if (in_ssa_p
660	  && TREE_SIDE_EFFECTS (expr))
661	{
662	  /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
663	     replacement.  */
664	  FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_DEFS)
665	    {
666	      if (TREE_CODE (var) == SSA_NAME)
667		SSA_NAME_DEF_STMT (var) = *stmt_p;
668	    }
669	}
670      break;
671    }
672
673  return true;
674}
675
676
677/* Entry point to the propagation engine.
678
679   VISIT_STMT is called for every statement visited.
680   VISIT_PHI is called for every PHI node visited.  */
681
682void
683ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
684	       ssa_prop_visit_phi_fn visit_phi)
685{
686  ssa_prop_visit_stmt = visit_stmt;
687  ssa_prop_visit_phi = visit_phi;
688
689  ssa_prop_init ();
690
691  /* Iterate until the worklists are empty.  */
692  while (!cfg_blocks_empty_p ()
693	 || VEC_length (tree, interesting_ssa_edges) > 0
694	 || VEC_length (tree, varying_ssa_edges) > 0)
695    {
696      if (!cfg_blocks_empty_p ())
697	{
698	  /* Pull the next block to simulate off the worklist.  */
699	  basic_block dest_block = cfg_blocks_get ();
700	  simulate_block (dest_block);
701	}
702
703      /* In order to move things to varying as quickly as
704	 possible,process the VARYING_SSA_EDGES worklist first.  */
705      process_ssa_edge_worklist (&varying_ssa_edges);
706
707      /* Now process the INTERESTING_SSA_EDGES worklist.  */
708      process_ssa_edge_worklist (&interesting_ssa_edges);
709    }
710
711  ssa_prop_fini ();
712}
713
714
715/* Return the first V_MAY_DEF or V_MUST_DEF operand for STMT.  */
716
717tree
718first_vdef (tree stmt)
719{
720  ssa_op_iter iter;
721  tree op;
722
723  /* Simply return the first operand we arrive at.  */
724  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
725    return (op);
726
727  gcc_unreachable ();
728}
729
730
731/* Return true if STMT is of the form 'LHS = mem_ref', where 'mem_ref'
732   is a non-volatile pointer dereference, a structure reference or a
733   reference to a single _DECL.  Ignore volatile memory references
734   because they are not interesting for the optimizers.  */
735
736bool
737stmt_makes_single_load (tree stmt)
738{
739  tree rhs;
740
741  if (TREE_CODE (stmt) != MODIFY_EXPR)
742    return false;
743
744  if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF|SSA_OP_VUSE))
745    return false;
746
747  rhs = TREE_OPERAND (stmt, 1);
748  STRIP_NOPS (rhs);
749
750  return (!TREE_THIS_VOLATILE (rhs)
751	  && (DECL_P (rhs)
752	      || REFERENCE_CLASS_P (rhs)));
753}
754
755
756/* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
757   is a non-volatile pointer dereference, a structure reference or a
758   reference to a single _DECL.  Ignore volatile memory references
759   because they are not interesting for the optimizers.  */
760
761bool
762stmt_makes_single_store (tree stmt)
763{
764  tree lhs;
765
766  if (TREE_CODE (stmt) != MODIFY_EXPR)
767    return false;
768
769  if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF|SSA_OP_VMUSTDEF))
770    return false;
771
772  lhs = TREE_OPERAND (stmt, 0);
773  STRIP_NOPS (lhs);
774
775  return (!TREE_THIS_VOLATILE (lhs)
776          && (DECL_P (lhs)
777	      || REFERENCE_CLASS_P (lhs)));
778}
779
780
781/* If STMT makes a single memory load and all the virtual use operands
782   have the same value in array VALUES, return it.  Otherwise, return
783   NULL.  */
784
785prop_value_t *
786get_value_loaded_by (tree stmt, prop_value_t *values)
787{
788  ssa_op_iter i;
789  tree vuse;
790  prop_value_t *prev_val = NULL;
791  prop_value_t *val = NULL;
792
793  FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
794    {
795      val = &values[SSA_NAME_VERSION (vuse)];
796      if (prev_val && prev_val->value != val->value)
797	return NULL;
798      prev_val = val;
799    }
800
801  return val;
802}
803
804
805/* Propagation statistics.  */
806struct prop_stats_d
807{
808  long num_const_prop;
809  long num_copy_prop;
810  long num_pred_folded;
811};
812
813static struct prop_stats_d prop_stats;
814
815/* Replace USE references in statement STMT with the values stored in
816   PROP_VALUE. Return true if at least one reference was replaced.  If
817   REPLACED_ADDRESSES_P is given, it will be set to true if an address
818   constant was replaced.  */
819
820bool
821replace_uses_in (tree stmt, bool *replaced_addresses_p,
822		 prop_value_t *prop_value)
823{
824  bool replaced = false;
825  use_operand_p use;
826  ssa_op_iter iter;
827
828  FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
829    {
830      tree tuse = USE_FROM_PTR (use);
831      tree val = prop_value[SSA_NAME_VERSION (tuse)].value;
832
833      if (val == tuse || val == NULL_TREE)
834	continue;
835
836      if (TREE_CODE (stmt) == ASM_EXPR
837	  && !may_propagate_copy_into_asm (tuse))
838	continue;
839
840      if (!may_propagate_copy (tuse, val))
841	continue;
842
843      if (TREE_CODE (val) != SSA_NAME)
844	prop_stats.num_const_prop++;
845      else
846	prop_stats.num_copy_prop++;
847
848      propagate_value (use, val);
849
850      replaced = true;
851      if (POINTER_TYPE_P (TREE_TYPE (tuse)) && replaced_addresses_p)
852	*replaced_addresses_p = true;
853    }
854
855  return replaced;
856}
857
858
859/* Replace the VUSE references in statement STMT with the values
860   stored in PROP_VALUE.  Return true if a reference was replaced.  If
861   REPLACED_ADDRESSES_P is given, it will be set to true if an address
862   constant was replaced.
863
864   Replacing VUSE operands is slightly more complex than replacing
865   regular USEs.  We are only interested in two types of replacements
866   here:
867
868   1- If the value to be replaced is a constant or an SSA name for a
869      GIMPLE register, then we are making a copy/constant propagation
870      from a memory store.  For instance,
871
872      	# a_3 = V_MAY_DEF <a_2>
873	a.b = x_1;
874	...
875 	# VUSE <a_3>
876	y_4 = a.b;
877
878      This replacement is only possible iff STMT is an assignment
879      whose RHS is identical to the LHS of the statement that created
880      the VUSE(s) that we are replacing.  Otherwise, we may do the
881      wrong replacement:
882
883      	# a_3 = V_MAY_DEF <a_2>
884	# b_5 = V_MAY_DEF <b_4>
885	*p = 10;
886	...
887	# VUSE <b_5>
888	x_8 = b;
889
890      Even though 'b_5' acquires the value '10' during propagation,
891      there is no way for the propagator to tell whether the
892      replacement is correct in every reached use, because values are
893      computed at definition sites.  Therefore, when doing final
894      substitution of propagated values, we have to check each use
895      site.  Since the RHS of STMT ('b') is different from the LHS of
896      the originating statement ('*p'), we cannot replace 'b' with
897      '10'.
898
899      Similarly, when merging values from PHI node arguments,
900      propagators need to take care not to merge the same values
901      stored in different locations:
902
903     		if (...)
904		  # a_3 = V_MAY_DEF <a_2>
905		  a.b = 3;
906		else
907		  # a_4 = V_MAY_DEF <a_2>
908		  a.c = 3;
909		# a_5 = PHI <a_3, a_4>
910
911      It would be wrong to propagate '3' into 'a_5' because that
912      operation merges two stores to different memory locations.
913
914
915   2- If the value to be replaced is an SSA name for a virtual
916      register, then we simply replace each VUSE operand with its
917      value from PROP_VALUE.  This is the same replacement done by
918      replace_uses_in.  */
919
920static bool
921replace_vuses_in (tree stmt, bool *replaced_addresses_p,
922                  prop_value_t *prop_value)
923{
924  bool replaced = false;
925  ssa_op_iter iter;
926  use_operand_p vuse;
927
928  if (stmt_makes_single_load (stmt))
929    {
930      /* If STMT is an assignment whose RHS is a single memory load,
931	 see if we are trying to propagate a constant or a GIMPLE
932	 register (case #1 above).  */
933      prop_value_t *val = get_value_loaded_by (stmt, prop_value);
934      tree rhs = TREE_OPERAND (stmt, 1);
935
936      if (val
937	  && val->value
938	  && (is_gimple_reg (val->value)
939	      || is_gimple_min_invariant (val->value))
940	  && simple_cst_equal (rhs, val->mem_ref) == 1)
941
942	{
943	  /* If we are replacing a constant address, inform our
944	     caller.  */
945	  if (TREE_CODE (val->value) != SSA_NAME
946	      && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1)))
947	      && replaced_addresses_p)
948	    *replaced_addresses_p = true;
949
950	  /* We can only perform the substitution if the load is done
951	     from the same memory location as the original store.
952	     Since we already know that there are no intervening
953	     stores between DEF_STMT and STMT, we only need to check
954	     that the RHS of STMT is the same as the memory reference
955	     propagated together with the value.  */
956	  TREE_OPERAND (stmt, 1) = val->value;
957
958	  if (TREE_CODE (val->value) != SSA_NAME)
959	    prop_stats.num_const_prop++;
960	  else
961	    prop_stats.num_copy_prop++;
962
963	  /* Since we have replaced the whole RHS of STMT, there
964	     is no point in checking the other VUSEs, as they will
965	     all have the same value.  */
966	  return true;
967	}
968    }
969
970  /* Otherwise, the values for every VUSE operand must be other
971     SSA_NAMEs that can be propagated into STMT.  */
972  FOR_EACH_SSA_USE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
973    {
974      tree var = USE_FROM_PTR (vuse);
975      tree val = prop_value[SSA_NAME_VERSION (var)].value;
976
977      if (val == NULL_TREE || var == val)
978	continue;
979
980      /* Constants and copies propagated between real and virtual
981	 operands are only possible in the cases handled above.  They
982	 should be ignored in any other context.  */
983      if (is_gimple_min_invariant (val) || is_gimple_reg (val))
984	continue;
985
986      propagate_value (vuse, val);
987      prop_stats.num_copy_prop++;
988      replaced = true;
989    }
990
991  return replaced;
992}
993
994
995/* Replace propagated values into all the arguments for PHI using the
996   values from PROP_VALUE.  */
997
998static void
999replace_phi_args_in (tree phi, prop_value_t *prop_value)
1000{
1001  int i;
1002  bool replaced = false;
1003  tree prev_phi = NULL;
1004
1005  if (dump_file && (dump_flags & TDF_DETAILS))
1006    prev_phi = unshare_expr (phi);
1007
1008  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1009    {
1010      tree arg = PHI_ARG_DEF (phi, i);
1011
1012      if (TREE_CODE (arg) == SSA_NAME)
1013	{
1014	  tree val = prop_value[SSA_NAME_VERSION (arg)].value;
1015
1016	  if (val && val != arg && may_propagate_copy (arg, val))
1017	    {
1018	      if (TREE_CODE (val) != SSA_NAME)
1019		prop_stats.num_const_prop++;
1020	      else
1021		prop_stats.num_copy_prop++;
1022
1023	      propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
1024	      replaced = true;
1025
1026	      /* If we propagated a copy and this argument flows
1027		 through an abnormal edge, update the replacement
1028		 accordingly.  */
1029	      if (TREE_CODE (val) == SSA_NAME
1030		  && PHI_ARG_EDGE (phi, i)->flags & EDGE_ABNORMAL)
1031		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1032	    }
1033	}
1034    }
1035
1036  if (replaced && dump_file && (dump_flags & TDF_DETAILS))
1037    {
1038      fprintf (dump_file, "Folded PHI node: ");
1039      print_generic_stmt (dump_file, prev_phi, TDF_SLIM);
1040      fprintf (dump_file, "           into: ");
1041      print_generic_stmt (dump_file, phi, TDF_SLIM);
1042      fprintf (dump_file, "\n");
1043    }
1044}
1045
1046
1047/* If STMT has a predicate whose value can be computed using the value
1048   range information computed by VRP, compute its value and return true.
1049   Otherwise, return false.  */
1050
1051static bool
1052fold_predicate_in (tree stmt)
1053{
1054  tree *pred_p = NULL;
1055  bool modify_expr_p = false;
1056  tree val;
1057
1058  if (TREE_CODE (stmt) == MODIFY_EXPR
1059      && COMPARISON_CLASS_P (TREE_OPERAND (stmt, 1)))
1060    {
1061      modify_expr_p = true;
1062      pred_p = &TREE_OPERAND (stmt, 1);
1063    }
1064  else if (TREE_CODE (stmt) == COND_EXPR)
1065    pred_p = &COND_EXPR_COND (stmt);
1066  else
1067    return false;
1068
1069  val = vrp_evaluate_conditional (*pred_p, stmt);
1070  if (val)
1071    {
1072      if (modify_expr_p)
1073        val = fold_convert (TREE_TYPE (*pred_p), val);
1074
1075      if (dump_file)
1076	{
1077	  fprintf (dump_file, "Folding predicate ");
1078	  print_generic_expr (dump_file, *pred_p, 0);
1079	  fprintf (dump_file, " to ");
1080	  print_generic_expr (dump_file, val, 0);
1081	  fprintf (dump_file, "\n");
1082	}
1083
1084      prop_stats.num_pred_folded++;
1085      *pred_p = val;
1086      return true;
1087    }
1088
1089  return false;
1090}
1091
1092
1093/* Perform final substitution and folding of propagated values.
1094
1095   PROP_VALUE[I] contains the single value that should be substituted
1096   at every use of SSA name N_I.  If PROP_VALUE is NULL, no values are
1097   substituted.
1098
1099   If USE_RANGES_P is true, statements that contain predicate
1100   expressions are evaluated with a call to vrp_evaluate_conditional.
1101   This will only give meaningful results when called from tree-vrp.c
1102   (the information used by vrp_evaluate_conditional is built by the
1103   VRP pass).  */
1104
1105void
1106substitute_and_fold (prop_value_t *prop_value, bool use_ranges_p)
1107{
1108  basic_block bb;
1109
1110  if (prop_value == NULL && !use_ranges_p)
1111    return;
1112
1113  if (dump_file && (dump_flags & TDF_DETAILS))
1114    fprintf (dump_file, "\nSubstituing values and folding statements\n\n");
1115
1116  memset (&prop_stats, 0, sizeof (prop_stats));
1117
1118  /* Substitute values in every statement of every basic block.  */
1119  FOR_EACH_BB (bb)
1120    {
1121      block_stmt_iterator i;
1122      tree phi;
1123
1124      /* Propagate known values into PHI nodes.  */
1125      if (prop_value)
1126	for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1127	  replace_phi_args_in (phi, prop_value);
1128
1129      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1130	{
1131          bool replaced_address, did_replace;
1132	  tree prev_stmt = NULL;
1133	  tree stmt = bsi_stmt (i);
1134
1135	  /* Ignore ASSERT_EXPRs.  They are used by VRP to generate
1136	     range information for names and they are discarded
1137	     afterwards.  */
1138	  if (TREE_CODE (stmt) == MODIFY_EXPR
1139	      && TREE_CODE (TREE_OPERAND (stmt, 1)) == ASSERT_EXPR)
1140	    continue;
1141
1142	  /* Replace the statement with its folded version and mark it
1143	     folded.  */
1144	  did_replace = false;
1145	  replaced_address = false;
1146	  if (dump_file && (dump_flags & TDF_DETAILS))
1147	    prev_stmt = unshare_expr (stmt);
1148
1149	  /* If we have range information, see if we can fold
1150	     predicate expressions.  */
1151	  if (use_ranges_p)
1152	    did_replace = fold_predicate_in (stmt);
1153
1154	  if (prop_value)
1155	    {
1156	      /* Only replace real uses if we couldn't fold the
1157		 statement using value range information (value range
1158		 information is not collected on virtuals, so we only
1159		 need to check this for real uses).  */
1160	      if (!did_replace)
1161		did_replace |= replace_uses_in (stmt, &replaced_address,
1162		                                prop_value);
1163
1164	      did_replace |= replace_vuses_in (stmt, &replaced_address,
1165		                               prop_value);
1166	    }
1167
1168	  /* If we made a replacement, fold and cleanup the statement.  */
1169	  if (did_replace)
1170	    {
1171	      tree old_stmt = stmt;
1172	      tree rhs;
1173
1174	      fold_stmt (bsi_stmt_ptr (i));
1175	      stmt = bsi_stmt (i);
1176
1177	      /* If we folded a builtin function, we'll likely
1178		 need to rename VDEFs.  */
1179	      mark_new_vars_to_rename (stmt);
1180
1181              /* If we cleaned up EH information from the statement,
1182                 remove EH edges.  */
1183	      if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1184		tree_purge_dead_eh_edges (bb);
1185
1186	      rhs = get_rhs (stmt);
1187	      if (TREE_CODE (rhs) == ADDR_EXPR)
1188		recompute_tree_invariant_for_addr_expr (rhs);
1189
1190	      if (dump_file && (dump_flags & TDF_DETAILS))
1191		{
1192		  fprintf (dump_file, "Folded statement: ");
1193		  print_generic_stmt (dump_file, prev_stmt, TDF_SLIM);
1194		  fprintf (dump_file, "            into: ");
1195		  print_generic_stmt (dump_file, stmt, TDF_SLIM);
1196		  fprintf (dump_file, "\n");
1197		}
1198	    }
1199
1200	  /* Some statements may be simplified using ranges.  For
1201	     example, division may be replaced by shifts, modulo
1202	     replaced with bitwise and, etc.   Do this after
1203	     substituting constants, folding, etc so that we're
1204	     presented with a fully propagated, canonicalized
1205	     statement.  */
1206	  if (use_ranges_p)
1207	    simplify_stmt_using_ranges (stmt);
1208
1209	}
1210    }
1211
1212  if (dump_file && (dump_flags & TDF_STATS))
1213    {
1214      fprintf (dump_file, "Constants propagated: %6ld\n",
1215	       prop_stats.num_const_prop);
1216      fprintf (dump_file, "Copies propagated:    %6ld\n",
1217	       prop_stats.num_copy_prop);
1218      fprintf (dump_file, "Predicates folded:    %6ld\n",
1219	       prop_stats.num_pred_folded);
1220    }
1221}
1222
1223#include "gt-tree-ssa-propagate.h"
1224