1/* SSA Jump Threading
2   Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3   Contributed by Jeff Law  <law@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to
19the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20Boston, MA 02110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "flags.h"
28#include "rtl.h"
29#include "tm_p.h"
30#include "ggc.h"
31#include "basic-block.h"
32#include "cfgloop.h"
33#include "output.h"
34#include "expr.h"
35#include "function.h"
36#include "diagnostic.h"
37#include "timevar.h"
38#include "tree-dump.h"
39#include "tree-flow.h"
40#include "domwalk.h"
41#include "real.h"
42#include "tree-pass.h"
43#include "tree-ssa-propagate.h"
44#include "langhooks.h"
45#include "params.h"
46
47/* To avoid code explosion due to jump threading, we limit the
48   number of statements we are going to copy.  This variable
49   holds the number of statements currently seen that we'll have
50   to copy as part of the jump threading process.  */
51static int stmt_count;
52
53/* Return TRUE if we may be able to thread an incoming edge into
54   BB to an outgoing edge from BB.  Return FALSE otherwise.  */
55
56bool
57potentially_threadable_block (basic_block bb)
58{
59  block_stmt_iterator bsi;
60
61  /* If BB has a single successor or a single predecessor, then
62     there is no threading opportunity.  */
63  if (single_succ_p (bb) || single_pred_p (bb))
64    return false;
65
66  /* If BB does not end with a conditional, switch or computed goto,
67     then there is no threading opportunity.  */
68  bsi = bsi_last (bb);
69  if (bsi_end_p (bsi)
70      || ! bsi_stmt (bsi)
71      || (TREE_CODE (bsi_stmt (bsi)) != COND_EXPR
72	  && TREE_CODE (bsi_stmt (bsi)) != GOTO_EXPR
73	  && TREE_CODE (bsi_stmt (bsi)) != SWITCH_EXPR))
74    return false;
75
76  return true;
77}
78
79/* Return the LHS of any ASSERT_EXPR where OP appears as the first
80   argument to the ASSERT_EXPR and in which the ASSERT_EXPR dominates
81   BB.  If no such ASSERT_EXPR is found, return OP.  */
82
83static tree
84lhs_of_dominating_assert (tree op, basic_block bb, tree stmt)
85{
86  imm_use_iterator imm_iter;
87  tree use_stmt;
88  use_operand_p use_p;
89
90  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
91    {
92      use_stmt = USE_STMT (use_p);
93      if (use_stmt != stmt
94          && TREE_CODE (use_stmt) == MODIFY_EXPR
95          && TREE_CODE (TREE_OPERAND (use_stmt, 1)) == ASSERT_EXPR
96          && TREE_OPERAND (TREE_OPERAND (use_stmt, 1), 0) == op
97	  && dominated_by_p (CDI_DOMINATORS, bb, bb_for_stmt (use_stmt)))
98	{
99	  return TREE_OPERAND (use_stmt, 0);
100	}
101    }
102  return op;
103}
104
105
106/* We record temporary equivalences created by PHI nodes or
107   statements within the target block.  Doing so allows us to
108   identify more jump threading opportunities, even in blocks
109   with side effects.
110
111   We keep track of those temporary equivalences in a stack
112   structure so that we can unwind them when we're done processing
113   a particular edge.  This routine handles unwinding the data
114   structures.  */
115
116static void
117remove_temporary_equivalences (VEC(tree, heap) **stack)
118{
119  while (VEC_length (tree, *stack) > 0)
120    {
121      tree prev_value, dest;
122
123      dest = VEC_pop (tree, *stack);
124
125      /* A NULL value indicates we should stop unwinding, otherwise
126	 pop off the next entry as they're recorded in pairs.  */
127      if (dest == NULL)
128	break;
129
130      prev_value = VEC_pop (tree, *stack);
131      SSA_NAME_VALUE (dest) = prev_value;
132    }
133}
134
135/* Record a temporary equivalence, saving enough information so that
136   we can restore the state of recorded equivalences when we're
137   done processing the current edge.  */
138
139static void
140record_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack)
141{
142  tree prev_x = SSA_NAME_VALUE (x);
143
144  if (TREE_CODE (y) == SSA_NAME)
145    {
146      tree tmp = SSA_NAME_VALUE (y);
147      y = tmp ? tmp : y;
148    }
149
150  SSA_NAME_VALUE (x) = y;
151  VEC_reserve (tree, heap, *stack, 2);
152  VEC_quick_push (tree, *stack, prev_x);
153  VEC_quick_push (tree, *stack, x);
154}
155
156/* Record temporary equivalences created by PHIs at the target of the
157   edge E.  Record unwind information for the equivalences onto STACK.
158
159   If a PHI which prevents threading is encountered, then return FALSE
160   indicating we should not thread this edge, else return TRUE.  */
161
162static bool
163record_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack)
164{
165  tree phi;
166
167  /* Each PHI creates a temporary equivalence, record them.
168     These are context sensitive equivalences and will be removed
169     later.  */
170  for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
171    {
172      tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
173      tree dst = PHI_RESULT (phi);
174
175      /* If the desired argument is not the same as this PHI's result
176	 and it is set by a PHI in E->dest, then we can not thread
177	 through E->dest.  */
178      if (src != dst
179	  && TREE_CODE (src) == SSA_NAME
180	  && TREE_CODE (SSA_NAME_DEF_STMT (src)) == PHI_NODE
181	  && bb_for_stmt (SSA_NAME_DEF_STMT (src)) == e->dest)
182	return false;
183
184      /* We consider any non-virtual PHI as a statement since it
185	 count result in a constant assignment or copy operation.  */
186      if (is_gimple_reg (dst))
187	stmt_count++;
188
189      record_temporary_equivalence (dst, src, stack);
190    }
191  return true;
192}
193
194/* Try to simplify each statement in E->dest, ultimately leading to
195   a simplification of the COND_EXPR at the end of E->dest.
196
197   Record unwind information for temporary equivalences onto STACK.
198
199   Use SIMPLIFY (a pointer to a callback function) to further simplify
200   statements using pass specific information.
201
202   We might consider marking just those statements which ultimately
203   feed the COND_EXPR.  It's not clear if the overhead of bookkeeping
204   would be recovered by trying to simplify fewer statements.
205
206   If we are able to simplify a statement into the form
207   SSA_NAME = (SSA_NAME | gimple invariant), then we can record
208   a context sensitive equivalency which may help us simplify
209   later statements in E->dest.  */
210
211static tree
212record_temporary_equivalences_from_stmts_at_dest (edge e,
213						  VEC(tree, heap) **stack,
214						  tree (*simplify) (tree,
215								    tree))
216{
217  block_stmt_iterator bsi;
218  tree stmt = NULL;
219  int max_stmt_count;
220
221  max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS);
222
223  /* Walk through each statement in the block recording equivalences
224     we discover.  Note any equivalences we discover are context
225     sensitive (ie, are dependent on traversing E) and must be unwound
226     when we're finished processing E.  */
227  for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
228    {
229      tree cached_lhs = NULL;
230
231      stmt = bsi_stmt (bsi);
232
233      /* Ignore empty statements and labels.  */
234      if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
235	continue;
236
237      /* If the statement has volatile operands, then we assume we
238	 can not thread through this block.  This is overly
239	 conservative in some ways.  */
240      if (TREE_CODE (stmt) == ASM_EXPR && ASM_VOLATILE_P (stmt))
241	return NULL;
242
243      /* If duplicating this block is going to cause too much code
244	 expansion, then do not thread through this block.  */
245      stmt_count++;
246      if (stmt_count > max_stmt_count)
247	return NULL;
248
249      /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new
250	 value, then do not try to simplify this statement as it will
251	 not simplify in any way that is helpful for jump threading.  */
252      if (TREE_CODE (stmt) != MODIFY_EXPR
253	  || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
254	continue;
255
256      /* At this point we have a statement which assigns an RHS to an
257	 SSA_VAR on the LHS.  We want to try and simplify this statement
258	 to expose more context sensitive equivalences which in turn may
259	 allow us to simplify the condition at the end of the loop.
260
261	 Handle simple copy operations as well as implied copies from
262	 ASSERT_EXPRs.  */
263      if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME)
264	cached_lhs = TREE_OPERAND (stmt, 1);
265      else if (TREE_CODE (TREE_OPERAND (stmt, 1)) == ASSERT_EXPR)
266	cached_lhs = TREE_OPERAND (TREE_OPERAND (stmt, 1), 0);
267      else
268	{
269	  /* A statement that is not a trivial copy or ASSERT_EXPR.
270	     We're going to temporarily copy propagate the operands
271	     and see if that allows us to simplify this statement.  */
272	  tree *copy, pre_fold_expr;
273	  ssa_op_iter iter;
274	  use_operand_p use_p;
275	  unsigned int num, i = 0;
276
277	  num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
278	  copy = XCNEWVEC (tree, num);
279
280	  /* Make a copy of the uses & vuses into USES_COPY, then cprop into
281	     the operands.  */
282	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
283	    {
284	      tree tmp = NULL;
285	      tree use = USE_FROM_PTR (use_p);
286
287	      copy[i++] = use;
288	      if (TREE_CODE (use) == SSA_NAME)
289		tmp = SSA_NAME_VALUE (use);
290	      if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
291		SET_USE (use_p, tmp);
292	    }
293
294	  /* Try to fold/lookup the new expression.  Inserting the
295	     expression into the hash table is unlikely to help
296	     Sadly, we have to handle conditional assignments specially
297	     here, because fold expects all the operands of an expression
298	     to be folded before the expression itself is folded, but we
299	     can't just substitute the folded condition here.  */
300	  if (TREE_CODE (TREE_OPERAND (stmt, 1)) == COND_EXPR)
301	    {
302	      tree cond = COND_EXPR_COND (TREE_OPERAND (stmt, 1));
303	      cond = fold (cond);
304	      if (cond == boolean_true_node)
305		pre_fold_expr = COND_EXPR_THEN (TREE_OPERAND (stmt, 1));
306	      else if (cond == boolean_false_node)
307		pre_fold_expr = COND_EXPR_ELSE (TREE_OPERAND (stmt, 1));
308	      else
309		pre_fold_expr = TREE_OPERAND (stmt, 1);
310	    }
311	  else
312	    pre_fold_expr = TREE_OPERAND (stmt, 1);
313
314	  if (pre_fold_expr)
315	    {
316	      cached_lhs = fold (pre_fold_expr);
317	      if (TREE_CODE (cached_lhs) != SSA_NAME
318		  && !is_gimple_min_invariant (cached_lhs))
319	        cached_lhs = (*simplify) (stmt, stmt);
320	    }
321
322	  /* Restore the statement's original uses/defs.  */
323	  i = 0;
324	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
325	    SET_USE (use_p, copy[i++]);
326
327	  free (copy);
328	}
329
330      /* Record the context sensitive equivalence if we were able
331	 to simplify this statement.  */
332      if (cached_lhs
333	  && (TREE_CODE (cached_lhs) == SSA_NAME
334	      || is_gimple_min_invariant (cached_lhs)))
335	record_temporary_equivalence (TREE_OPERAND (stmt, 0),
336				      cached_lhs,
337				      stack);
338    }
339  return stmt;
340}
341
342/* Simplify the control statement at the end of the block E->dest.
343
344   To avoid allocating memory unnecessarily, a scratch COND_EXPR
345   is available to use/clobber in DUMMY_COND.
346
347   Use SIMPLIFY (a pointer to a callback function) to further simplify
348   a condition using pass specific information.
349
350   Return the simplified condition or NULL if simplification could
351   not be performed.  */
352
353static tree
354simplify_control_stmt_condition (edge e,
355				 tree stmt,
356				 tree dummy_cond,
357				 tree (*simplify) (tree, tree),
358				 bool handle_dominating_asserts)
359{
360  tree cond, cached_lhs;
361
362  if (TREE_CODE (stmt) == COND_EXPR)
363    cond = COND_EXPR_COND (stmt);
364  else if (TREE_CODE (stmt) == GOTO_EXPR)
365    cond = GOTO_DESTINATION (stmt);
366  else
367    cond = SWITCH_COND (stmt);
368
369  /* For comparisons, we have to update both operands, then try
370     to simplify the comparison.  */
371  if (COMPARISON_CLASS_P (cond))
372    {
373      tree op0, op1;
374      enum tree_code cond_code;
375
376      op0 = TREE_OPERAND (cond, 0);
377      op1 = TREE_OPERAND (cond, 1);
378      cond_code = TREE_CODE (cond);
379
380      /* Get the current value of both operands.  */
381      if (TREE_CODE (op0) == SSA_NAME)
382	{
383          tree tmp = SSA_NAME_VALUE (op0);
384	  if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
385	    op0 = tmp;
386	}
387
388      if (TREE_CODE (op1) == SSA_NAME)
389	{
390	  tree tmp = SSA_NAME_VALUE (op1);
391	  if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
392	    op1 = tmp;
393	}
394
395      if (handle_dominating_asserts)
396	{
397	  /* Now see if the operand was consumed by an ASSERT_EXPR
398	     which dominates E->src.  If so, we want to replace the
399	     operand with the LHS of the ASSERT_EXPR.  */
400	  if (TREE_CODE (op0) == SSA_NAME)
401	    op0 = lhs_of_dominating_assert (op0, e->src, stmt);
402
403	  if (TREE_CODE (op1) == SSA_NAME)
404	    op1 = lhs_of_dominating_assert (op1, e->src, stmt);
405	}
406
407      /* We may need to canonicalize the comparison.  For
408	 example, op0 might be a constant while op1 is an
409	 SSA_NAME.  Failure to canonicalize will cause us to
410	 miss threading opportunities.  */
411      if (cond_code != SSA_NAME
412	  && tree_swap_operands_p (op0, op1, false))
413	{
414	  tree tmp;
415	  cond_code = swap_tree_comparison (TREE_CODE (cond));
416	  tmp = op0;
417	  op0 = op1;
418	  op1 = tmp;
419	}
420
421      /* Stuff the operator and operands into our dummy conditional
422	 expression.  */
423      TREE_SET_CODE (COND_EXPR_COND (dummy_cond), cond_code);
424      TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op0;
425      TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) = op1;
426
427      /* We absolutely do not care about any type conversions
428         we only care about a zero/nonzero value.  */
429      fold_defer_overflow_warnings ();
430
431      cached_lhs = fold (COND_EXPR_COND (dummy_cond));
432      while (TREE_CODE (cached_lhs) == NOP_EXPR
433	     || TREE_CODE (cached_lhs) == CONVERT_EXPR
434	     || TREE_CODE (cached_lhs) == NON_LVALUE_EXPR)
435	cached_lhs = TREE_OPERAND (cached_lhs, 0);
436
437      fold_undefer_overflow_warnings (is_gimple_min_invariant (cached_lhs),
438				      stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
439
440      /* If we have not simplified the condition down to an invariant,
441	 then use the pass specific callback to simplify the condition.  */
442      if (! is_gimple_min_invariant (cached_lhs))
443	cached_lhs = (*simplify) (dummy_cond, stmt);
444    }
445
446  /* We can have conditionals which just test the state of a variable
447     rather than use a relational operator.  These are simpler to handle.  */
448  else if (TREE_CODE (cond) == SSA_NAME)
449    {
450      cached_lhs = cond;
451
452      /* Get the variable's current value from the equivalency chains.
453
454	 It is possible to get loops in the SSA_NAME_VALUE chains
455	 (consider threading the backedge of a loop where we have
456	 a loop invariant SSA_NAME used in the condition.  */
457      if (cached_lhs
458	  && TREE_CODE (cached_lhs) == SSA_NAME
459	  && SSA_NAME_VALUE (cached_lhs))
460	cached_lhs = SSA_NAME_VALUE (cached_lhs);
461
462      /* If we're dominated by a suitable ASSERT_EXPR, then
463	 update CACHED_LHS appropriately.  */
464      if (handle_dominating_asserts && TREE_CODE (cached_lhs) == SSA_NAME)
465	cached_lhs = lhs_of_dominating_assert (cached_lhs, e->src, stmt);
466
467      /* If we haven't simplified to an invariant yet, then use the
468	 pass specific callback to try and simplify it further.  */
469      if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
470        cached_lhs = (*simplify) (stmt, stmt);
471    }
472  else
473    cached_lhs = NULL;
474
475  return cached_lhs;
476}
477
478/* We are exiting E->src, see if E->dest ends with a conditional
479   jump which has a known value when reached via E.
480
481   Special care is necessary if E is a back edge in the CFG as we
482   may have already recorded equivalences for E->dest into our
483   various tables, including the result of the conditional at
484   the end of E->dest.  Threading opportunities are severely
485   limited in that case to avoid short-circuiting the loop
486   incorrectly.
487
488   Note it is quite common for the first block inside a loop to
489   end with a conditional which is either always true or always
490   false when reached via the loop backedge.  Thus we do not want
491   to blindly disable threading across a loop backedge.  */
492
493void
494thread_across_edge (tree dummy_cond,
495		    edge e,
496		    bool handle_dominating_asserts,
497		    VEC(tree, heap) **stack,
498		    tree (*simplify) (tree, tree))
499{
500  tree stmt;
501
502  /* If E is a backedge, then we want to verify that the COND_EXPR,
503     SWITCH_EXPR or GOTO_EXPR at the end of e->dest is not affected
504     by any statements in e->dest.  If it is affected, then it is not
505     safe to thread this edge.  */
506  if (e->flags & EDGE_DFS_BACK)
507    {
508      ssa_op_iter iter;
509      use_operand_p use_p;
510      tree last = bsi_stmt (bsi_last (e->dest));
511
512      FOR_EACH_SSA_USE_OPERAND (use_p, last, iter, SSA_OP_USE | SSA_OP_VUSE)
513	{
514	  tree use = USE_FROM_PTR (use_p);
515
516          if (TREE_CODE (use) == SSA_NAME
517	      && TREE_CODE (SSA_NAME_DEF_STMT (use)) != PHI_NODE
518	      && bb_for_stmt (SSA_NAME_DEF_STMT (use)) == e->dest)
519	    goto fail;
520	}
521    }
522
523  stmt_count = 0;
524
525  /* PHIs create temporary equivalences.  */
526  if (!record_temporary_equivalences_from_phis (e, stack))
527    goto fail;
528
529  /* Now walk each statement recording any context sensitive
530     temporary equivalences we can detect.  */
531  stmt = record_temporary_equivalences_from_stmts_at_dest (e, stack, simplify);
532  if (!stmt)
533    goto fail;
534
535  /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm
536     will be taken.  */
537  if (TREE_CODE (stmt) == COND_EXPR
538      || TREE_CODE (stmt) == GOTO_EXPR
539      || TREE_CODE (stmt) == SWITCH_EXPR)
540    {
541      tree cond;
542
543      /* Extract and simplify the condition.  */
544      cond = simplify_control_stmt_condition (e, stmt, dummy_cond, simplify, handle_dominating_asserts);
545
546      if (cond && is_gimple_min_invariant (cond))
547	{
548	  edge taken_edge = find_taken_edge (e->dest, cond);
549	  basic_block dest = (taken_edge ? taken_edge->dest : NULL);
550
551	  if (dest == e->dest)
552	    goto fail;
553
554	  remove_temporary_equivalences (stack);
555	  register_jump_thread (e, taken_edge);
556	}
557    }
558
559 fail:
560  remove_temporary_equivalences (stack);
561}
562