1/* Conditional constant propagation pass for the GNU compiler.
2   Copyright (C) 2000-2015 Free Software Foundation, Inc.
3   Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4   Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
10Free Software Foundation; either version 3, or (at your option) any
11later version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22/* Conditional constant propagation (CCP) is based on the SSA
23   propagation engine (tree-ssa-propagate.c).  Constant assignments of
24   the form VAR = CST are propagated from the assignments into uses of
25   VAR, which in turn may generate new constants.  The simulation uses
26   a four level lattice to keep track of constant values associated
27   with SSA names.  Given an SSA name V_i, it may take one of the
28   following values:
29
30	UNINITIALIZED   ->  the initial state of the value.  This value
31			    is replaced with a correct initial value
32			    the first time the value is used, so the
33			    rest of the pass does not need to care about
34			    it.  Using this value simplifies initialization
35			    of the pass, and prevents us from needlessly
36			    scanning statements that are never reached.
37
38	UNDEFINED	->  V_i is a local variable whose definition
39			    has not been processed yet.  Therefore we
40			    don't yet know if its value is a constant
41			    or not.
42
43	CONSTANT	->  V_i has been found to hold a constant
44			    value C.
45
46	VARYING		->  V_i cannot take a constant value, or if it
47			    does, it is not possible to determine it
48			    at compile time.
49
50   The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
51
52   1- In ccp_visit_stmt, we are interested in assignments whose RHS
53      evaluates into a constant and conditional jumps whose predicate
54      evaluates into a boolean true or false.  When an assignment of
55      the form V_i = CONST is found, V_i's lattice value is set to
56      CONSTANT and CONST is associated with it.  This causes the
57      propagation engine to add all the SSA edges coming out the
58      assignment into the worklists, so that statements that use V_i
59      can be visited.
60
61      If the statement is a conditional with a constant predicate, we
62      mark the outgoing edges as executable or not executable
63      depending on the predicate's value.  This is then used when
64      visiting PHI nodes to know when a PHI argument can be ignored.
65
66
67   2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68      same constant C, then the LHS of the PHI is set to C.  This
69      evaluation is known as the "meet operation".  Since one of the
70      goals of this evaluation is to optimistically return constant
71      values as often as possible, it uses two main short cuts:
72
73      - If an argument is flowing in through a non-executable edge, it
74	is ignored.  This is useful in cases like this:
75
76			if (PRED)
77			  a_9 = 3;
78			else
79			  a_10 = 100;
80			a_11 = PHI (a_9, a_10)
81
82	If PRED is known to always evaluate to false, then we can
83	assume that a_11 will always take its value from a_10, meaning
84	that instead of consider it VARYING (a_9 and a_10 have
85	different values), we can consider it CONSTANT 100.
86
87      - If an argument has an UNDEFINED value, then it does not affect
88	the outcome of the meet operation.  If a variable V_i has an
89	UNDEFINED value, it means that either its defining statement
90	hasn't been visited yet or V_i has no defining statement, in
91	which case the original symbol 'V' is being used
92	uninitialized.  Since 'V' is a local variable, the compiler
93	may assume any initial value for it.
94
95
96   After propagation, every variable V_i that ends up with a lattice
97   value of CONSTANT will have the associated constant value in the
98   array CONST_VAL[i].VALUE.  That is fed into substitute_and_fold for
99   final substitution and folding.
100
101   This algorithm uses wide-ints at the max precision of the target.
102   This means that, with one uninteresting exception, variables with
103   UNSIGNED types never go to VARYING because the bits above the
104   precision of the type of the variable are always zero.  The
105   uninteresting case is a variable of UNSIGNED type that has the
106   maximum precision of the target.  Such variables can go to VARYING,
107   but this causes no loss of infomation since these variables will
108   never be extended.
109
110   References:
111
112     Constant propagation with conditional branches,
113     Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
114
115     Building an Optimizing Compiler,
116     Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
117
118     Advanced Compiler Design and Implementation,
119     Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6  */
120
121#include "config.h"
122#include "system.h"
123#include "coretypes.h"
124#include "tm.h"
125#include "hash-set.h"
126#include "machmode.h"
127#include "vec.h"
128#include "double-int.h"
129#include "input.h"
130#include "alias.h"
131#include "symtab.h"
132#include "wide-int.h"
133#include "inchash.h"
134#include "real.h"
135#include "tree.h"
136#include "fold-const.h"
137#include "stor-layout.h"
138#include "flags.h"
139#include "tm_p.h"
140#include "predict.h"
141#include "hard-reg-set.h"
142#include "input.h"
143#include "function.h"
144#include "dominance.h"
145#include "cfg.h"
146#include "basic-block.h"
147#include "gimple-pretty-print.h"
148#include "hash-table.h"
149#include "tree-ssa-alias.h"
150#include "internal-fn.h"
151#include "gimple-fold.h"
152#include "tree-eh.h"
153#include "gimple-expr.h"
154#include "is-a.h"
155#include "gimple.h"
156#include "gimplify.h"
157#include "gimple-iterator.h"
158#include "gimple-ssa.h"
159#include "tree-cfg.h"
160#include "tree-phinodes.h"
161#include "ssa-iterators.h"
162#include "stringpool.h"
163#include "tree-ssanames.h"
164#include "tree-pass.h"
165#include "tree-ssa-propagate.h"
166#include "value-prof.h"
167#include "langhooks.h"
168#include "target.h"
169#include "diagnostic-core.h"
170#include "dbgcnt.h"
171#include "params.h"
172#include "wide-int-print.h"
173#include "builtins.h"
174#include "tree-chkp.h"
175
176
177/* Possible lattice values.  */
178typedef enum
179{
180  UNINITIALIZED,
181  UNDEFINED,
182  CONSTANT,
183  VARYING
184} ccp_lattice_t;
185
186struct ccp_prop_value_t {
187    /* Lattice value.  */
188    ccp_lattice_t lattice_val;
189
190    /* Propagated value.  */
191    tree value;
192
193    /* Mask that applies to the propagated value during CCP.  For X
194       with a CONSTANT lattice value X & ~mask == value & ~mask.  The
195       zero bits in the mask cover constant values.  The ones mean no
196       information.  */
197    widest_int mask;
198};
199
200/* Array of propagated constant values.  After propagation,
201   CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I).  If
202   the constant is held in an SSA name representing a memory store
203   (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
204   memory reference used to store (i.e., the LHS of the assignment
205   doing the store).  */
206static ccp_prop_value_t *const_val;
207static unsigned n_const_val;
208
209static void canonicalize_value (ccp_prop_value_t *);
210static bool ccp_fold_stmt (gimple_stmt_iterator *);
211
212/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX.  */
213
214static void
215dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
216{
217  switch (val.lattice_val)
218    {
219    case UNINITIALIZED:
220      fprintf (outf, "%sUNINITIALIZED", prefix);
221      break;
222    case UNDEFINED:
223      fprintf (outf, "%sUNDEFINED", prefix);
224      break;
225    case VARYING:
226      fprintf (outf, "%sVARYING", prefix);
227      break;
228    case CONSTANT:
229      if (TREE_CODE (val.value) != INTEGER_CST
230	  || val.mask == 0)
231	{
232	  fprintf (outf, "%sCONSTANT ", prefix);
233	  print_generic_expr (outf, val.value, dump_flags);
234	}
235      else
236	{
237	  widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
238					     val.mask);
239	  fprintf (outf, "%sCONSTANT ", prefix);
240	  print_hex (cval, outf);
241	  fprintf (outf, " (");
242	  print_hex (val.mask, outf);
243	  fprintf (outf, ")");
244	}
245      break;
246    default:
247      gcc_unreachable ();
248    }
249}
250
251
252/* Print lattice value VAL to stderr.  */
253
254void debug_lattice_value (ccp_prop_value_t val);
255
256DEBUG_FUNCTION void
257debug_lattice_value (ccp_prop_value_t val)
258{
259  dump_lattice_value (stderr, "", val);
260  fprintf (stderr, "\n");
261}
262
263/* Extend NONZERO_BITS to a full mask, with the upper bits being set.  */
264
265static widest_int
266extend_mask (const wide_int &nonzero_bits)
267{
268  return (wi::mask <widest_int> (wi::get_precision (nonzero_bits), true)
269	  | widest_int::from (nonzero_bits, UNSIGNED));
270}
271
272/* Compute a default value for variable VAR and store it in the
273   CONST_VAL array.  The following rules are used to get default
274   values:
275
276   1- Global and static variables that are declared constant are
277      considered CONSTANT.
278
279   2- Any other value is considered UNDEFINED.  This is useful when
280      considering PHI nodes.  PHI arguments that are undefined do not
281      change the constant value of the PHI node, which allows for more
282      constants to be propagated.
283
284   3- Variables defined by statements other than assignments and PHI
285      nodes are considered VARYING.
286
287   4- Initial values of variables that are not GIMPLE registers are
288      considered VARYING.  */
289
290static ccp_prop_value_t
291get_default_value (tree var)
292{
293  ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
294  gimple stmt;
295
296  stmt = SSA_NAME_DEF_STMT (var);
297
298  if (gimple_nop_p (stmt))
299    {
300      /* Variables defined by an empty statement are those used
301	 before being initialized.  If VAR is a local variable, we
302	 can assume initially that it is UNDEFINED, otherwise we must
303	 consider it VARYING.  */
304      if (!virtual_operand_p (var)
305	  && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
306	val.lattice_val = UNDEFINED;
307      else
308	{
309	  val.lattice_val = VARYING;
310	  val.mask = -1;
311	  if (flag_tree_bit_ccp)
312	    {
313	      wide_int nonzero_bits = get_nonzero_bits (var);
314	      if (nonzero_bits != -1)
315		{
316		  val.lattice_val = CONSTANT;
317		  val.value = build_zero_cst (TREE_TYPE (var));
318		  val.mask = extend_mask (nonzero_bits);
319		}
320	    }
321	}
322    }
323  else if (is_gimple_assign (stmt))
324    {
325      tree cst;
326      if (gimple_assign_single_p (stmt)
327	  && DECL_P (gimple_assign_rhs1 (stmt))
328	  && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
329	{
330	  val.lattice_val = CONSTANT;
331	  val.value = cst;
332	}
333      else
334	{
335	  /* Any other variable defined by an assignment is considered
336	     UNDEFINED.  */
337	  val.lattice_val = UNDEFINED;
338	}
339    }
340  else if ((is_gimple_call (stmt)
341	    && gimple_call_lhs (stmt) != NULL_TREE)
342	   || gimple_code (stmt) == GIMPLE_PHI)
343    {
344      /* A variable defined by a call or a PHI node is considered
345	 UNDEFINED.  */
346      val.lattice_val = UNDEFINED;
347    }
348  else
349    {
350      /* Otherwise, VAR will never take on a constant value.  */
351      val.lattice_val = VARYING;
352      val.mask = -1;
353    }
354
355  return val;
356}
357
358
359/* Get the constant value associated with variable VAR.  */
360
361static inline ccp_prop_value_t *
362get_value (tree var)
363{
364  ccp_prop_value_t *val;
365
366  if (const_val == NULL
367      || SSA_NAME_VERSION (var) >= n_const_val)
368    return NULL;
369
370  val = &const_val[SSA_NAME_VERSION (var)];
371  if (val->lattice_val == UNINITIALIZED)
372    *val = get_default_value (var);
373
374  canonicalize_value (val);
375
376  return val;
377}
378
379/* Return the constant tree value associated with VAR.  */
380
381static inline tree
382get_constant_value (tree var)
383{
384  ccp_prop_value_t *val;
385  if (TREE_CODE (var) != SSA_NAME)
386    {
387      if (is_gimple_min_invariant (var))
388        return var;
389      return NULL_TREE;
390    }
391  val = get_value (var);
392  if (val
393      && val->lattice_val == CONSTANT
394      && (TREE_CODE (val->value) != INTEGER_CST
395	  || val->mask == 0))
396    return val->value;
397  return NULL_TREE;
398}
399
400/* Sets the value associated with VAR to VARYING.  */
401
402static inline void
403set_value_varying (tree var)
404{
405  ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
406
407  val->lattice_val = VARYING;
408  val->value = NULL_TREE;
409  val->mask = -1;
410}
411
412/* For integer constants, make sure to drop TREE_OVERFLOW.  */
413
414static void
415canonicalize_value (ccp_prop_value_t *val)
416{
417  if (val->lattice_val != CONSTANT)
418    return;
419
420  if (TREE_OVERFLOW_P (val->value))
421    val->value = drop_tree_overflow (val->value);
422}
423
424/* Return whether the lattice transition is valid.  */
425
426static bool
427valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
428{
429  /* Lattice transitions must always be monotonically increasing in
430     value.  */
431  if (old_val.lattice_val < new_val.lattice_val)
432    return true;
433
434  if (old_val.lattice_val != new_val.lattice_val)
435    return false;
436
437  if (!old_val.value && !new_val.value)
438    return true;
439
440  /* Now both lattice values are CONSTANT.  */
441
442  /* Allow transitioning from PHI <&x, not executable> == &x
443     to PHI <&x, &y> == common alignment.  */
444  if (TREE_CODE (old_val.value) != INTEGER_CST
445      && TREE_CODE (new_val.value) == INTEGER_CST)
446    return true;
447
448  /* Bit-lattices have to agree in the still valid bits.  */
449  if (TREE_CODE (old_val.value) == INTEGER_CST
450      && TREE_CODE (new_val.value) == INTEGER_CST)
451    return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
452	    == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
453
454  /* Otherwise constant values have to agree.  */
455  if (operand_equal_p (old_val.value, new_val.value, 0))
456    return true;
457
458  /* At least the kinds and types should agree now.  */
459  if (TREE_CODE (old_val.value) != TREE_CODE (new_val.value)
460      || !types_compatible_p (TREE_TYPE (old_val.value),
461			      TREE_TYPE (new_val.value)))
462    return false;
463
464  /* For floats and !HONOR_NANS allow transitions from (partial) NaN
465     to non-NaN.  */
466  tree type = TREE_TYPE (new_val.value);
467  if (SCALAR_FLOAT_TYPE_P (type)
468      && !HONOR_NANS (type))
469    {
470      if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value)))
471	return true;
472    }
473  else if (VECTOR_FLOAT_TYPE_P (type)
474	   && !HONOR_NANS (type))
475    {
476      for (unsigned i = 0; i < VECTOR_CST_NELTS (old_val.value); ++i)
477	if (!REAL_VALUE_ISNAN
478	       (TREE_REAL_CST (VECTOR_CST_ELT (old_val.value, i)))
479	    && !operand_equal_p (VECTOR_CST_ELT (old_val.value, i),
480				 VECTOR_CST_ELT (new_val.value, i), 0))
481	  return false;
482      return true;
483    }
484  else if (COMPLEX_FLOAT_TYPE_P (type)
485	   && !HONOR_NANS (type))
486    {
487      if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value)))
488	  && !operand_equal_p (TREE_REALPART (old_val.value),
489			       TREE_REALPART (new_val.value), 0))
490	return false;
491      if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value)))
492	  && !operand_equal_p (TREE_IMAGPART (old_val.value),
493			       TREE_IMAGPART (new_val.value), 0))
494	return false;
495      return true;
496    }
497  return false;
498}
499
500/* Set the value for variable VAR to NEW_VAL.  Return true if the new
501   value is different from VAR's previous value.  */
502
503static bool
504set_lattice_value (tree var, ccp_prop_value_t new_val)
505{
506  /* We can deal with old UNINITIALIZED values just fine here.  */
507  ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
508
509  canonicalize_value (&new_val);
510
511  /* We have to be careful to not go up the bitwise lattice
512     represented by the mask.
513     ???  This doesn't seem to be the best place to enforce this.  */
514  if (new_val.lattice_val == CONSTANT
515      && old_val->lattice_val == CONSTANT
516      && TREE_CODE (new_val.value) == INTEGER_CST
517      && TREE_CODE (old_val->value) == INTEGER_CST)
518    {
519      widest_int diff = (wi::to_widest (new_val.value)
520			 ^ wi::to_widest (old_val->value));
521      new_val.mask = new_val.mask | old_val->mask | diff;
522    }
523
524  gcc_checking_assert (valid_lattice_transition (*old_val, new_val));
525
526  /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
527     caller that this was a non-transition.  */
528  if (old_val->lattice_val != new_val.lattice_val
529      || (new_val.lattice_val == CONSTANT
530	  && TREE_CODE (new_val.value) == INTEGER_CST
531	  && (TREE_CODE (old_val->value) != INTEGER_CST
532	      || new_val.mask != old_val->mask)))
533    {
534      /* ???  We would like to delay creation of INTEGER_CSTs from
535	 partially constants here.  */
536
537      if (dump_file && (dump_flags & TDF_DETAILS))
538	{
539	  dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
540	  fprintf (dump_file, ".  Adding SSA edges to worklist.\n");
541	}
542
543      *old_val = new_val;
544
545      gcc_assert (new_val.lattice_val != UNINITIALIZED);
546      return true;
547    }
548
549  return false;
550}
551
552static ccp_prop_value_t get_value_for_expr (tree, bool);
553static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
554static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
555			       tree, const widest_int &, const widest_int &,
556			       tree, const widest_int &, const widest_int &);
557
558/* Return a widest_int that can be used for bitwise simplifications
559   from VAL.  */
560
561static widest_int
562value_to_wide_int (ccp_prop_value_t val)
563{
564  if (val.value
565      && TREE_CODE (val.value) == INTEGER_CST)
566    return wi::to_widest (val.value);
567
568  return 0;
569}
570
571/* Return the value for the address expression EXPR based on alignment
572   information.  */
573
574static ccp_prop_value_t
575get_value_from_alignment (tree expr)
576{
577  tree type = TREE_TYPE (expr);
578  ccp_prop_value_t val;
579  unsigned HOST_WIDE_INT bitpos;
580  unsigned int align;
581
582  gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
583
584  get_pointer_alignment_1 (expr, &align, &bitpos);
585  val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
586	      ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
587	      : -1).and_not (align / BITS_PER_UNIT - 1);
588  val.lattice_val = val.mask == -1 ? VARYING : CONSTANT;
589  if (val.lattice_val == CONSTANT)
590    val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
591  else
592    val.value = NULL_TREE;
593
594  return val;
595}
596
597/* Return the value for the tree operand EXPR.  If FOR_BITS_P is true
598   return constant bits extracted from alignment information for
599   invariant addresses.  */
600
601static ccp_prop_value_t
602get_value_for_expr (tree expr, bool for_bits_p)
603{
604  ccp_prop_value_t val;
605
606  if (TREE_CODE (expr) == SSA_NAME)
607    {
608      val = *get_value (expr);
609      if (for_bits_p
610	  && val.lattice_val == CONSTANT
611	  && TREE_CODE (val.value) == ADDR_EXPR)
612	val = get_value_from_alignment (val.value);
613    }
614  else if (is_gimple_min_invariant (expr)
615	   && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
616    {
617      val.lattice_val = CONSTANT;
618      val.value = expr;
619      val.mask = 0;
620      canonicalize_value (&val);
621    }
622  else if (TREE_CODE (expr) == ADDR_EXPR)
623    val = get_value_from_alignment (expr);
624  else
625    {
626      val.lattice_val = VARYING;
627      val.mask = -1;
628      val.value = NULL_TREE;
629    }
630  return val;
631}
632
633/* Return the likely CCP lattice value for STMT.
634
635   If STMT has no operands, then return CONSTANT.
636
637   Else if undefinedness of operands of STMT cause its value to be
638   undefined, then return UNDEFINED.
639
640   Else if any operands of STMT are constants, then return CONSTANT.
641
642   Else return VARYING.  */
643
644static ccp_lattice_t
645likely_value (gimple stmt)
646{
647  bool has_constant_operand, has_undefined_operand, all_undefined_operands;
648  tree use;
649  ssa_op_iter iter;
650  unsigned i;
651
652  enum gimple_code code = gimple_code (stmt);
653
654  /* This function appears to be called only for assignments, calls,
655     conditionals, and switches, due to the logic in visit_stmt.  */
656  gcc_assert (code == GIMPLE_ASSIGN
657              || code == GIMPLE_CALL
658              || code == GIMPLE_COND
659              || code == GIMPLE_SWITCH);
660
661  /* If the statement has volatile operands, it won't fold to a
662     constant value.  */
663  if (gimple_has_volatile_ops (stmt))
664    return VARYING;
665
666  /* Arrive here for more complex cases.  */
667  has_constant_operand = false;
668  has_undefined_operand = false;
669  all_undefined_operands = true;
670  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
671    {
672      ccp_prop_value_t *val = get_value (use);
673
674      if (val->lattice_val == UNDEFINED)
675	has_undefined_operand = true;
676      else
677	all_undefined_operands = false;
678
679      if (val->lattice_val == CONSTANT)
680	has_constant_operand = true;
681    }
682
683  /* There may be constants in regular rhs operands.  For calls we
684     have to ignore lhs, fndecl and static chain, otherwise only
685     the lhs.  */
686  for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
687       i < gimple_num_ops (stmt); ++i)
688    {
689      tree op = gimple_op (stmt, i);
690      if (!op || TREE_CODE (op) == SSA_NAME)
691	continue;
692      if (is_gimple_min_invariant (op))
693	has_constant_operand = true;
694    }
695
696  if (has_constant_operand)
697    all_undefined_operands = false;
698
699  if (has_undefined_operand
700      && code == GIMPLE_CALL
701      && gimple_call_internal_p (stmt))
702    switch (gimple_call_internal_fn (stmt))
703      {
704	/* These 3 builtins use the first argument just as a magic
705	   way how to find out a decl uid.  */
706      case IFN_GOMP_SIMD_LANE:
707      case IFN_GOMP_SIMD_VF:
708      case IFN_GOMP_SIMD_LAST_LANE:
709	has_undefined_operand = false;
710	break;
711      default:
712	break;
713      }
714
715  /* If the operation combines operands like COMPLEX_EXPR make sure to
716     not mark the result UNDEFINED if only one part of the result is
717     undefined.  */
718  if (has_undefined_operand && all_undefined_operands)
719    return UNDEFINED;
720  else if (code == GIMPLE_ASSIGN && has_undefined_operand)
721    {
722      switch (gimple_assign_rhs_code (stmt))
723	{
724	/* Unary operators are handled with all_undefined_operands.  */
725	case PLUS_EXPR:
726	case MINUS_EXPR:
727	case POINTER_PLUS_EXPR:
728	  /* Not MIN_EXPR, MAX_EXPR.  One VARYING operand may be selected.
729	     Not bitwise operators, one VARYING operand may specify the
730	     result completely.  Not logical operators for the same reason.
731	     Not COMPLEX_EXPR as one VARYING operand makes the result partly
732	     not UNDEFINED.  Not *DIV_EXPR, comparisons and shifts because
733	     the undefined operand may be promoted.  */
734	  return UNDEFINED;
735
736	case ADDR_EXPR:
737	  /* If any part of an address is UNDEFINED, like the index
738	     of an ARRAY_EXPR, then treat the result as UNDEFINED.  */
739	  return UNDEFINED;
740
741	default:
742	  ;
743	}
744    }
745  /* If there was an UNDEFINED operand but the result may be not UNDEFINED
746     fall back to CONSTANT.  During iteration UNDEFINED may still drop
747     to CONSTANT.  */
748  if (has_undefined_operand)
749    return CONSTANT;
750
751  /* We do not consider virtual operands here -- load from read-only
752     memory may have only VARYING virtual operands, but still be
753     constant.  */
754  if (has_constant_operand
755      || gimple_references_memory_p (stmt))
756    return CONSTANT;
757
758  return VARYING;
759}
760
761/* Returns true if STMT cannot be constant.  */
762
763static bool
764surely_varying_stmt_p (gimple stmt)
765{
766  /* If the statement has operands that we cannot handle, it cannot be
767     constant.  */
768  if (gimple_has_volatile_ops (stmt))
769    return true;
770
771  /* If it is a call and does not return a value or is not a
772     builtin and not an indirect call or a call to function with
773     assume_aligned/alloc_align attribute, it is varying.  */
774  if (is_gimple_call (stmt))
775    {
776      tree fndecl, fntype = gimple_call_fntype (stmt);
777      if (!gimple_call_lhs (stmt)
778	  || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
779	      && !DECL_BUILT_IN (fndecl)
780	      && !lookup_attribute ("assume_aligned",
781				    TYPE_ATTRIBUTES (fntype))
782	      && !lookup_attribute ("alloc_align",
783				    TYPE_ATTRIBUTES (fntype))))
784	return true;
785    }
786
787  /* Any other store operation is not interesting.  */
788  else if (gimple_vdef (stmt))
789    return true;
790
791  /* Anything other than assignments and conditional jumps are not
792     interesting for CCP.  */
793  if (gimple_code (stmt) != GIMPLE_ASSIGN
794      && gimple_code (stmt) != GIMPLE_COND
795      && gimple_code (stmt) != GIMPLE_SWITCH
796      && gimple_code (stmt) != GIMPLE_CALL)
797    return true;
798
799  return false;
800}
801
802/* Initialize local data structures for CCP.  */
803
804static void
805ccp_initialize (void)
806{
807  basic_block bb;
808
809  n_const_val = num_ssa_names;
810  const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
811
812  /* Initialize simulation flags for PHI nodes and statements.  */
813  FOR_EACH_BB_FN (bb, cfun)
814    {
815      gimple_stmt_iterator i;
816
817      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
818        {
819	  gimple stmt = gsi_stmt (i);
820	  bool is_varying;
821
822	  /* If the statement is a control insn, then we do not
823	     want to avoid simulating the statement once.  Failure
824	     to do so means that those edges will never get added.  */
825	  if (stmt_ends_bb_p (stmt))
826	    is_varying = false;
827	  else
828	    is_varying = surely_varying_stmt_p (stmt);
829
830	  if (is_varying)
831	    {
832	      tree def;
833	      ssa_op_iter iter;
834
835	      /* If the statement will not produce a constant, mark
836		 all its outputs VARYING.  */
837	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
838		set_value_varying (def);
839	    }
840          prop_set_simulate_again (stmt, !is_varying);
841	}
842    }
843
844  /* Now process PHI nodes.  We never clear the simulate_again flag on
845     phi nodes, since we do not know which edges are executable yet,
846     except for phi nodes for virtual operands when we do not do store ccp.  */
847  FOR_EACH_BB_FN (bb, cfun)
848    {
849      gphi_iterator i;
850
851      for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
852        {
853          gphi *phi = i.phi ();
854
855	  if (virtual_operand_p (gimple_phi_result (phi)))
856            prop_set_simulate_again (phi, false);
857	  else
858            prop_set_simulate_again (phi, true);
859	}
860    }
861}
862
863/* Debug count support. Reset the values of ssa names
864   VARYING when the total number ssa names analyzed is
865   beyond the debug count specified.  */
866
867static void
868do_dbg_cnt (void)
869{
870  unsigned i;
871  for (i = 0; i < num_ssa_names; i++)
872    {
873      if (!dbg_cnt (ccp))
874        {
875          const_val[i].lattice_val = VARYING;
876	  const_val[i].mask = -1;
877          const_val[i].value = NULL_TREE;
878        }
879    }
880}
881
882
883/* Do final substitution of propagated values, cleanup the flowgraph and
884   free allocated storage.
885
886   Return TRUE when something was optimized.  */
887
888static bool
889ccp_finalize (void)
890{
891  bool something_changed;
892  unsigned i;
893
894  do_dbg_cnt ();
895
896  /* Derive alignment and misalignment information from partially
897     constant pointers in the lattice or nonzero bits from partially
898     constant integers.  */
899  for (i = 1; i < num_ssa_names; ++i)
900    {
901      tree name = ssa_name (i);
902      ccp_prop_value_t *val;
903      unsigned int tem, align;
904
905      if (!name
906	  || (!POINTER_TYPE_P (TREE_TYPE (name))
907	      && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
908		  /* Don't record nonzero bits before IPA to avoid
909		     using too much memory.  */
910		  || first_pass_instance)))
911	continue;
912
913      val = get_value (name);
914      if (val->lattice_val != CONSTANT
915	  || TREE_CODE (val->value) != INTEGER_CST)
916	continue;
917
918      if (POINTER_TYPE_P (TREE_TYPE (name)))
919	{
920	  /* Trailing mask bits specify the alignment, trailing value
921	     bits the misalignment.  */
922	  tem = val->mask.to_uhwi ();
923	  align = (tem & -tem);
924	  if (align > 1)
925	    set_ptr_info_alignment (get_ptr_info (name), align,
926				    (TREE_INT_CST_LOW (val->value)
927				     & (align - 1)));
928	}
929      else
930	{
931	  unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value));
932	  wide_int nonzero_bits = wide_int::from (val->mask, precision,
933						  UNSIGNED) | val->value;
934	  nonzero_bits &= get_nonzero_bits (name);
935	  set_nonzero_bits (name, nonzero_bits);
936	}
937    }
938
939  /* Perform substitutions based on the known constant values.  */
940  something_changed = substitute_and_fold (get_constant_value,
941					   ccp_fold_stmt, true);
942
943  free (const_val);
944  const_val = NULL;
945  return something_changed;;
946}
947
948
949/* Compute the meet operator between *VAL1 and *VAL2.  Store the result
950   in VAL1.
951
952   		any  M UNDEFINED   = any
953		any  M VARYING     = VARYING
954		Ci   M Cj	   = Ci		if (i == j)
955		Ci   M Cj	   = VARYING	if (i != j)
956   */
957
958static void
959ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
960{
961  if (val1->lattice_val == UNDEFINED)
962    {
963      /* UNDEFINED M any = any   */
964      *val1 = *val2;
965    }
966  else if (val2->lattice_val == UNDEFINED)
967    {
968      /* any M UNDEFINED = any
969         Nothing to do.  VAL1 already contains the value we want.  */
970      ;
971    }
972  else if (val1->lattice_val == VARYING
973           || val2->lattice_val == VARYING)
974    {
975      /* any M VARYING = VARYING.  */
976      val1->lattice_val = VARYING;
977      val1->mask = -1;
978      val1->value = NULL_TREE;
979    }
980  else if (val1->lattice_val == CONSTANT
981	   && val2->lattice_val == CONSTANT
982	   && TREE_CODE (val1->value) == INTEGER_CST
983	   && TREE_CODE (val2->value) == INTEGER_CST)
984    {
985      /* Ci M Cj = Ci		if (i == j)
986	 Ci M Cj = VARYING	if (i != j)
987
988         For INTEGER_CSTs mask unequal bits.  If no equal bits remain,
989	 drop to varying.  */
990      val1->mask = (val1->mask | val2->mask
991		    | (wi::to_widest (val1->value)
992		       ^ wi::to_widest (val2->value)));
993      if (val1->mask == -1)
994	{
995	  val1->lattice_val = VARYING;
996	  val1->value = NULL_TREE;
997	}
998    }
999  else if (val1->lattice_val == CONSTANT
1000	   && val2->lattice_val == CONSTANT
1001	   && simple_cst_equal (val1->value, val2->value) == 1)
1002    {
1003      /* Ci M Cj = Ci		if (i == j)
1004	 Ci M Cj = VARYING	if (i != j)
1005
1006         VAL1 already contains the value we want for equivalent values.  */
1007    }
1008  else if (val1->lattice_val == CONSTANT
1009	   && val2->lattice_val == CONSTANT
1010	   && (TREE_CODE (val1->value) == ADDR_EXPR
1011	       || TREE_CODE (val2->value) == ADDR_EXPR))
1012    {
1013      /* When not equal addresses are involved try meeting for
1014	 alignment.  */
1015      ccp_prop_value_t tem = *val2;
1016      if (TREE_CODE (val1->value) == ADDR_EXPR)
1017	*val1 = get_value_for_expr (val1->value, true);
1018      if (TREE_CODE (val2->value) == ADDR_EXPR)
1019	tem = get_value_for_expr (val2->value, true);
1020      ccp_lattice_meet (val1, &tem);
1021    }
1022  else
1023    {
1024      /* Any other combination is VARYING.  */
1025      val1->lattice_val = VARYING;
1026      val1->mask = -1;
1027      val1->value = NULL_TREE;
1028    }
1029}
1030
1031
1032/* Loop through the PHI_NODE's parameters for BLOCK and compare their
1033   lattice values to determine PHI_NODE's lattice value.  The value of a
1034   PHI node is determined calling ccp_lattice_meet with all the arguments
1035   of the PHI node that are incoming via executable edges.  */
1036
1037static enum ssa_prop_result
1038ccp_visit_phi_node (gphi *phi)
1039{
1040  unsigned i;
1041  ccp_prop_value_t *old_val, new_val;
1042
1043  if (dump_file && (dump_flags & TDF_DETAILS))
1044    {
1045      fprintf (dump_file, "\nVisiting PHI node: ");
1046      print_gimple_stmt (dump_file, phi, 0, dump_flags);
1047    }
1048
1049  old_val = get_value (gimple_phi_result (phi));
1050  switch (old_val->lattice_val)
1051    {
1052    case VARYING:
1053      return SSA_PROP_VARYING;
1054
1055    case CONSTANT:
1056      new_val = *old_val;
1057      break;
1058
1059    case UNDEFINED:
1060      new_val.lattice_val = UNDEFINED;
1061      new_val.value = NULL_TREE;
1062      break;
1063
1064    default:
1065      gcc_unreachable ();
1066    }
1067
1068  for (i = 0; i < gimple_phi_num_args (phi); i++)
1069    {
1070      /* Compute the meet operator over all the PHI arguments flowing
1071	 through executable edges.  */
1072      edge e = gimple_phi_arg_edge (phi, i);
1073
1074      if (dump_file && (dump_flags & TDF_DETAILS))
1075	{
1076	  fprintf (dump_file,
1077	      "\n    Argument #%d (%d -> %d %sexecutable)\n",
1078	      i, e->src->index, e->dest->index,
1079	      (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1080	}
1081
1082      /* If the incoming edge is executable, Compute the meet operator for
1083	 the existing value of the PHI node and the current PHI argument.  */
1084      if (e->flags & EDGE_EXECUTABLE)
1085	{
1086	  tree arg = gimple_phi_arg (phi, i)->def;
1087	  ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
1088
1089	  ccp_lattice_meet (&new_val, &arg_val);
1090
1091	  if (dump_file && (dump_flags & TDF_DETAILS))
1092	    {
1093	      fprintf (dump_file, "\t");
1094	      print_generic_expr (dump_file, arg, dump_flags);
1095	      dump_lattice_value (dump_file, "\tValue: ", arg_val);
1096	      fprintf (dump_file, "\n");
1097	    }
1098
1099	  if (new_val.lattice_val == VARYING)
1100	    break;
1101	}
1102    }
1103
1104  if (dump_file && (dump_flags & TDF_DETAILS))
1105    {
1106      dump_lattice_value (dump_file, "\n    PHI node value: ", new_val);
1107      fprintf (dump_file, "\n\n");
1108    }
1109
1110  /* Make the transition to the new value.  */
1111  if (set_lattice_value (gimple_phi_result (phi), new_val))
1112    {
1113      if (new_val.lattice_val == VARYING)
1114	return SSA_PROP_VARYING;
1115      else
1116	return SSA_PROP_INTERESTING;
1117    }
1118  else
1119    return SSA_PROP_NOT_INTERESTING;
1120}
1121
1122/* Return the constant value for OP or OP otherwise.  */
1123
1124static tree
1125valueize_op (tree op)
1126{
1127  if (TREE_CODE (op) == SSA_NAME)
1128    {
1129      tree tem = get_constant_value (op);
1130      if (tem)
1131	return tem;
1132    }
1133  return op;
1134}
1135
1136/* Return the constant value for OP, but signal to not follow SSA
1137   edges if the definition may be simulated again.  */
1138
1139static tree
1140valueize_op_1 (tree op)
1141{
1142  if (TREE_CODE (op) == SSA_NAME)
1143    {
1144      /* If the definition may be simulated again we cannot follow
1145         this SSA edge as the SSA propagator does not necessarily
1146	 re-visit the use.  */
1147      gimple def_stmt = SSA_NAME_DEF_STMT (op);
1148      if (!gimple_nop_p (def_stmt)
1149	  && prop_simulate_again_p (def_stmt))
1150	return NULL_TREE;
1151      tree tem = get_constant_value (op);
1152      if (tem)
1153	return tem;
1154    }
1155  return op;
1156}
1157
1158/* CCP specific front-end to the non-destructive constant folding
1159   routines.
1160
1161   Attempt to simplify the RHS of STMT knowing that one or more
1162   operands are constants.
1163
1164   If simplification is possible, return the simplified RHS,
1165   otherwise return the original RHS or NULL_TREE.  */
1166
1167static tree
1168ccp_fold (gimple stmt)
1169{
1170  location_t loc = gimple_location (stmt);
1171  switch (gimple_code (stmt))
1172    {
1173    case GIMPLE_COND:
1174      {
1175        /* Handle comparison operators that can appear in GIMPLE form.  */
1176        tree op0 = valueize_op (gimple_cond_lhs (stmt));
1177        tree op1 = valueize_op (gimple_cond_rhs (stmt));
1178        enum tree_code code = gimple_cond_code (stmt);
1179        return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1180      }
1181
1182    case GIMPLE_SWITCH:
1183      {
1184	/* Return the constant switch index.  */
1185        return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
1186      }
1187
1188    case GIMPLE_ASSIGN:
1189    case GIMPLE_CALL:
1190      return gimple_fold_stmt_to_constant_1 (stmt,
1191					     valueize_op, valueize_op_1);
1192
1193    default:
1194      gcc_unreachable ();
1195    }
1196}
1197
1198/* Apply the operation CODE in type TYPE to the value, mask pair
1199   RVAL and RMASK representing a value of type RTYPE and set
1200   the value, mask pair *VAL and *MASK to the result.  */
1201
1202static void
1203bit_value_unop_1 (enum tree_code code, tree type,
1204		  widest_int *val, widest_int *mask,
1205		  tree rtype, const widest_int &rval, const widest_int &rmask)
1206{
1207  switch (code)
1208    {
1209    case BIT_NOT_EXPR:
1210      *mask = rmask;
1211      *val = ~rval;
1212      break;
1213
1214    case NEGATE_EXPR:
1215      {
1216	widest_int temv, temm;
1217	/* Return ~rval + 1.  */
1218	bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1219	bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1220			   type, temv, temm, type, 1, 0);
1221	break;
1222      }
1223
1224    CASE_CONVERT:
1225      {
1226	signop sgn;
1227
1228	/* First extend mask and value according to the original type.  */
1229	sgn = TYPE_SIGN (rtype);
1230	*mask = wi::ext (rmask, TYPE_PRECISION (rtype), sgn);
1231	*val = wi::ext (rval, TYPE_PRECISION (rtype), sgn);
1232
1233	/* Then extend mask and value according to the target type.  */
1234	sgn = TYPE_SIGN (type);
1235	*mask = wi::ext (*mask, TYPE_PRECISION (type), sgn);
1236	*val = wi::ext (*val, TYPE_PRECISION (type), sgn);
1237	break;
1238      }
1239
1240    default:
1241      *mask = -1;
1242      break;
1243    }
1244}
1245
1246/* Apply the operation CODE in type TYPE to the value, mask pairs
1247   R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1248   and R2TYPE and set the value, mask pair *VAL and *MASK to the result.  */
1249
1250static void
1251bit_value_binop_1 (enum tree_code code, tree type,
1252		   widest_int *val, widest_int *mask,
1253		   tree r1type, const widest_int &r1val,
1254		   const widest_int &r1mask, tree r2type,
1255		   const widest_int &r2val, const widest_int &r2mask)
1256{
1257  signop sgn = TYPE_SIGN (type);
1258  int width = TYPE_PRECISION (type);
1259  bool swap_p = false;
1260
1261  /* Assume we'll get a constant result.  Use an initial non varying
1262     value, we fall back to varying in the end if necessary.  */
1263  *mask = -1;
1264
1265  switch (code)
1266    {
1267    case BIT_AND_EXPR:
1268      /* The mask is constant where there is a known not
1269	 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1270      *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1271      *val = r1val & r2val;
1272      break;
1273
1274    case BIT_IOR_EXPR:
1275      /* The mask is constant where there is a known
1276	 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)).  */
1277      *mask = (r1mask | r2mask)
1278	      .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1279      *val = r1val | r2val;
1280      break;
1281
1282    case BIT_XOR_EXPR:
1283      /* m1 | m2  */
1284      *mask = r1mask | r2mask;
1285      *val = r1val ^ r2val;
1286      break;
1287
1288    case LROTATE_EXPR:
1289    case RROTATE_EXPR:
1290      if (r2mask == 0)
1291	{
1292	  widest_int shift = r2val;
1293	  if (shift == 0)
1294	    {
1295	      *mask = r1mask;
1296	      *val = r1val;
1297	    }
1298	  else
1299	    {
1300	      if (wi::neg_p (shift))
1301		{
1302		  shift = -shift;
1303		  if (code == RROTATE_EXPR)
1304		    code = LROTATE_EXPR;
1305		  else
1306		    code = RROTATE_EXPR;
1307		}
1308	      if (code == RROTATE_EXPR)
1309		{
1310		  *mask = wi::rrotate (r1mask, shift, width);
1311		  *val = wi::rrotate (r1val, shift, width);
1312		}
1313	      else
1314		{
1315		  *mask = wi::lrotate (r1mask, shift, width);
1316		  *val = wi::lrotate (r1val, shift, width);
1317		}
1318	    }
1319	}
1320      break;
1321
1322    case LSHIFT_EXPR:
1323    case RSHIFT_EXPR:
1324      /* ???  We can handle partially known shift counts if we know
1325	 its sign.  That way we can tell that (x << (y | 8)) & 255
1326	 is zero.  */
1327      if (r2mask == 0)
1328	{
1329	  widest_int shift = r2val;
1330	  if (shift == 0)
1331	    {
1332	      *mask = r1mask;
1333	      *val = r1val;
1334	    }
1335	  else
1336	    {
1337	      if (wi::neg_p (shift))
1338		{
1339		  shift = -shift;
1340		  if (code == RSHIFT_EXPR)
1341		    code = LSHIFT_EXPR;
1342		  else
1343		    code = RSHIFT_EXPR;
1344		}
1345	      if (code == RSHIFT_EXPR)
1346		{
1347		  *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1348		  *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
1349		}
1350	      else
1351		{
1352		  *mask = wi::ext (wi::lshift (r1mask, shift), width, sgn);
1353		  *val = wi::ext (wi::lshift (r1val, shift), width, sgn);
1354		}
1355	    }
1356	}
1357      break;
1358
1359    case PLUS_EXPR:
1360    case POINTER_PLUS_EXPR:
1361      {
1362	/* Do the addition with unknown bits set to zero, to give carry-ins of
1363	   zero wherever possible.  */
1364	widest_int lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
1365	lo = wi::ext (lo, width, sgn);
1366	/* Do the addition with unknown bits set to one, to give carry-ins of
1367	   one wherever possible.  */
1368	widest_int hi = (r1val | r1mask) + (r2val | r2mask);
1369	hi = wi::ext (hi, width, sgn);
1370	/* Each bit in the result is known if (a) the corresponding bits in
1371	   both inputs are known, and (b) the carry-in to that bit position
1372	   is known.  We can check condition (b) by seeing if we got the same
1373	   result with minimised carries as with maximised carries.  */
1374	*mask = r1mask | r2mask | (lo ^ hi);
1375	*mask = wi::ext (*mask, width, sgn);
1376	/* It shouldn't matter whether we choose lo or hi here.  */
1377	*val = lo;
1378	break;
1379      }
1380
1381    case MINUS_EXPR:
1382      {
1383	widest_int temv, temm;
1384	bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1385			  r2type, r2val, r2mask);
1386	bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1387			   r1type, r1val, r1mask,
1388			   r2type, temv, temm);
1389	break;
1390      }
1391
1392    case MULT_EXPR:
1393      {
1394	/* Just track trailing zeros in both operands and transfer
1395	   them to the other.  */
1396	int r1tz = wi::ctz (r1val | r1mask);
1397	int r2tz = wi::ctz (r2val | r2mask);
1398	if (r1tz + r2tz >= width)
1399	  {
1400	    *mask = 0;
1401	    *val = 0;
1402	  }
1403	else if (r1tz + r2tz > 0)
1404	  {
1405	    *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
1406			     width, sgn);
1407	    *val = 0;
1408	  }
1409	break;
1410      }
1411
1412    case EQ_EXPR:
1413    case NE_EXPR:
1414      {
1415	widest_int m = r1mask | r2mask;
1416	if (r1val.and_not (m) != r2val.and_not (m))
1417	  {
1418	    *mask = 0;
1419	    *val = ((code == EQ_EXPR) ? 0 : 1);
1420	  }
1421	else
1422	  {
1423	    /* We know the result of a comparison is always one or zero.  */
1424	    *mask = 1;
1425	    *val = 0;
1426	  }
1427	break;
1428      }
1429
1430    case GE_EXPR:
1431    case GT_EXPR:
1432      swap_p = true;
1433      code = swap_tree_comparison (code);
1434      /* Fall through.  */
1435    case LT_EXPR:
1436    case LE_EXPR:
1437      {
1438	int minmax, maxmin;
1439
1440	const widest_int &o1val = swap_p ? r2val : r1val;
1441	const widest_int &o1mask = swap_p ? r2mask : r1mask;
1442	const widest_int &o2val = swap_p ? r1val : r2val;
1443	const widest_int &o2mask = swap_p ? r1mask : r2mask;
1444
1445	/* If the most significant bits are not known we know nothing.  */
1446	if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
1447	  break;
1448
1449	/* For comparisons the signedness is in the comparison operands.  */
1450	sgn = TYPE_SIGN (r1type);
1451
1452	/* If we know the most significant bits we know the values
1453	   value ranges by means of treating varying bits as zero
1454	   or one.  Do a cross comparison of the max/min pairs.  */
1455	maxmin = wi::cmp (o1val | o1mask, o2val.and_not (o2mask), sgn);
1456	minmax = wi::cmp (o1val.and_not (o1mask), o2val | o2mask, sgn);
1457	if (maxmin < 0)  /* o1 is less than o2.  */
1458	  {
1459	    *mask = 0;
1460	    *val = 1;
1461	  }
1462	else if (minmax > 0)  /* o1 is not less or equal to o2.  */
1463	  {
1464	    *mask = 0;
1465	    *val = 0;
1466	  }
1467	else if (maxmin == minmax)  /* o1 and o2 are equal.  */
1468	  {
1469	    /* This probably should never happen as we'd have
1470	       folded the thing during fully constant value folding.  */
1471	    *mask = 0;
1472	    *val = (code == LE_EXPR ? 1 : 0);
1473	  }
1474	else
1475	  {
1476	    /* We know the result of a comparison is always one or zero.  */
1477	    *mask = 1;
1478	    *val = 0;
1479	  }
1480	break;
1481      }
1482
1483    default:;
1484    }
1485}
1486
1487/* Return the propagation value when applying the operation CODE to
1488   the value RHS yielding type TYPE.  */
1489
1490static ccp_prop_value_t
1491bit_value_unop (enum tree_code code, tree type, tree rhs)
1492{
1493  ccp_prop_value_t rval = get_value_for_expr (rhs, true);
1494  widest_int value, mask;
1495  ccp_prop_value_t val;
1496
1497  if (rval.lattice_val == UNDEFINED)
1498    return rval;
1499
1500  gcc_assert ((rval.lattice_val == CONSTANT
1501	       && TREE_CODE (rval.value) == INTEGER_CST)
1502	      || rval.mask == -1);
1503  bit_value_unop_1 (code, type, &value, &mask,
1504		    TREE_TYPE (rhs), value_to_wide_int (rval), rval.mask);
1505  if (mask != -1)
1506    {
1507      val.lattice_val = CONSTANT;
1508      val.mask = mask;
1509      /* ???  Delay building trees here.  */
1510      val.value = wide_int_to_tree (type, value);
1511    }
1512  else
1513    {
1514      val.lattice_val = VARYING;
1515      val.value = NULL_TREE;
1516      val.mask = -1;
1517    }
1518  return val;
1519}
1520
1521/* Return the propagation value when applying the operation CODE to
1522   the values RHS1 and RHS2 yielding type TYPE.  */
1523
1524static ccp_prop_value_t
1525bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1526{
1527  ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
1528  ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
1529  widest_int value, mask;
1530  ccp_prop_value_t val;
1531
1532  if (r1val.lattice_val == UNDEFINED
1533      || r2val.lattice_val == UNDEFINED)
1534    {
1535      val.lattice_val = VARYING;
1536      val.value = NULL_TREE;
1537      val.mask = -1;
1538      return val;
1539    }
1540
1541  gcc_assert ((r1val.lattice_val == CONSTANT
1542	       && TREE_CODE (r1val.value) == INTEGER_CST)
1543	      || r1val.mask == -1);
1544  gcc_assert ((r2val.lattice_val == CONSTANT
1545	       && TREE_CODE (r2val.value) == INTEGER_CST)
1546	      || r2val.mask == -1);
1547  bit_value_binop_1 (code, type, &value, &mask,
1548		     TREE_TYPE (rhs1), value_to_wide_int (r1val), r1val.mask,
1549		     TREE_TYPE (rhs2), value_to_wide_int (r2val), r2val.mask);
1550  if (mask != -1)
1551    {
1552      val.lattice_val = CONSTANT;
1553      val.mask = mask;
1554      /* ???  Delay building trees here.  */
1555      val.value = wide_int_to_tree (type, value);
1556    }
1557  else
1558    {
1559      val.lattice_val = VARYING;
1560      val.value = NULL_TREE;
1561      val.mask = -1;
1562    }
1563  return val;
1564}
1565
1566/* Return the propagation value for __builtin_assume_aligned
1567   and functions with assume_aligned or alloc_aligned attribute.
1568   For __builtin_assume_aligned, ATTR is NULL_TREE,
1569   for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1570   is false, for alloc_aligned attribute ATTR is non-NULL and
1571   ALLOC_ALIGNED is true.  */
1572
1573static ccp_prop_value_t
1574bit_value_assume_aligned (gimple stmt, tree attr, ccp_prop_value_t ptrval,
1575			  bool alloc_aligned)
1576{
1577  tree align, misalign = NULL_TREE, type;
1578  unsigned HOST_WIDE_INT aligni, misaligni = 0;
1579  ccp_prop_value_t alignval;
1580  widest_int value, mask;
1581  ccp_prop_value_t val;
1582
1583  if (attr == NULL_TREE)
1584    {
1585      tree ptr = gimple_call_arg (stmt, 0);
1586      type = TREE_TYPE (ptr);
1587      ptrval = get_value_for_expr (ptr, true);
1588    }
1589  else
1590    {
1591      tree lhs = gimple_call_lhs (stmt);
1592      type = TREE_TYPE (lhs);
1593    }
1594
1595  if (ptrval.lattice_val == UNDEFINED)
1596    return ptrval;
1597  gcc_assert ((ptrval.lattice_val == CONSTANT
1598	       && TREE_CODE (ptrval.value) == INTEGER_CST)
1599	      || ptrval.mask == -1);
1600  if (attr == NULL_TREE)
1601    {
1602      /* Get aligni and misaligni from __builtin_assume_aligned.  */
1603      align = gimple_call_arg (stmt, 1);
1604      if (!tree_fits_uhwi_p (align))
1605	return ptrval;
1606      aligni = tree_to_uhwi (align);
1607      if (gimple_call_num_args (stmt) > 2)
1608	{
1609	  misalign = gimple_call_arg (stmt, 2);
1610	  if (!tree_fits_uhwi_p (misalign))
1611	    return ptrval;
1612	  misaligni = tree_to_uhwi (misalign);
1613	}
1614    }
1615  else
1616    {
1617      /* Get aligni and misaligni from assume_aligned or
1618	 alloc_align attributes.  */
1619      if (TREE_VALUE (attr) == NULL_TREE)
1620	return ptrval;
1621      attr = TREE_VALUE (attr);
1622      align = TREE_VALUE (attr);
1623      if (!tree_fits_uhwi_p (align))
1624	return ptrval;
1625      aligni = tree_to_uhwi (align);
1626      if (alloc_aligned)
1627	{
1628	  if (aligni == 0 || aligni > gimple_call_num_args (stmt))
1629	    return ptrval;
1630	  align = gimple_call_arg (stmt, aligni - 1);
1631	  if (!tree_fits_uhwi_p (align))
1632	    return ptrval;
1633	  aligni = tree_to_uhwi (align);
1634	}
1635      else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
1636	{
1637	  misalign = TREE_VALUE (TREE_CHAIN (attr));
1638	  if (!tree_fits_uhwi_p (misalign))
1639	    return ptrval;
1640	  misaligni = tree_to_uhwi (misalign);
1641	}
1642    }
1643  if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
1644    return ptrval;
1645
1646  align = build_int_cst_type (type, -aligni);
1647  alignval = get_value_for_expr (align, true);
1648  bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1649		     type, value_to_wide_int (ptrval), ptrval.mask,
1650		     type, value_to_wide_int (alignval), alignval.mask);
1651  if (mask != -1)
1652    {
1653      val.lattice_val = CONSTANT;
1654      val.mask = mask;
1655      gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1656      gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1657      value |= misaligni;
1658      /* ???  Delay building trees here.  */
1659      val.value = wide_int_to_tree (type, value);
1660    }
1661  else
1662    {
1663      val.lattice_val = VARYING;
1664      val.value = NULL_TREE;
1665      val.mask = -1;
1666    }
1667  return val;
1668}
1669
1670/* Evaluate statement STMT.
1671   Valid only for assignments, calls, conditionals, and switches. */
1672
1673static ccp_prop_value_t
1674evaluate_stmt (gimple stmt)
1675{
1676  ccp_prop_value_t val;
1677  tree simplified = NULL_TREE;
1678  ccp_lattice_t likelyvalue = likely_value (stmt);
1679  bool is_constant = false;
1680  unsigned int align;
1681
1682  if (dump_file && (dump_flags & TDF_DETAILS))
1683    {
1684      fprintf (dump_file, "which is likely ");
1685      switch (likelyvalue)
1686	{
1687	case CONSTANT:
1688	  fprintf (dump_file, "CONSTANT");
1689	  break;
1690	case UNDEFINED:
1691	  fprintf (dump_file, "UNDEFINED");
1692	  break;
1693	case VARYING:
1694	  fprintf (dump_file, "VARYING");
1695	  break;
1696	default:;
1697	}
1698      fprintf (dump_file, "\n");
1699    }
1700
1701  /* If the statement is likely to have a CONSTANT result, then try
1702     to fold the statement to determine the constant value.  */
1703  /* FIXME.  This is the only place that we call ccp_fold.
1704     Since likely_value never returns CONSTANT for calls, we will
1705     not attempt to fold them, including builtins that may profit.  */
1706  if (likelyvalue == CONSTANT)
1707    {
1708      fold_defer_overflow_warnings ();
1709      simplified = ccp_fold (stmt);
1710      is_constant = simplified && is_gimple_min_invariant (simplified);
1711      fold_undefer_overflow_warnings (is_constant, stmt, 0);
1712      if (is_constant)
1713	{
1714	  /* The statement produced a constant value.  */
1715	  val.lattice_val = CONSTANT;
1716	  val.value = simplified;
1717	  val.mask = 0;
1718	}
1719    }
1720  /* If the statement is likely to have a VARYING result, then do not
1721     bother folding the statement.  */
1722  else if (likelyvalue == VARYING)
1723    {
1724      enum gimple_code code = gimple_code (stmt);
1725      if (code == GIMPLE_ASSIGN)
1726        {
1727          enum tree_code subcode = gimple_assign_rhs_code (stmt);
1728
1729          /* Other cases cannot satisfy is_gimple_min_invariant
1730             without folding.  */
1731          if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1732            simplified = gimple_assign_rhs1 (stmt);
1733        }
1734      else if (code == GIMPLE_SWITCH)
1735        simplified = gimple_switch_index (as_a <gswitch *> (stmt));
1736      else
1737	/* These cannot satisfy is_gimple_min_invariant without folding.  */
1738	gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1739      is_constant = simplified && is_gimple_min_invariant (simplified);
1740      if (is_constant)
1741	{
1742	  /* The statement produced a constant value.  */
1743	  val.lattice_val = CONSTANT;
1744	  val.value = simplified;
1745	  val.mask = 0;
1746	}
1747    }
1748
1749  /* Resort to simplification for bitwise tracking.  */
1750  if (flag_tree_bit_ccp
1751      && (likelyvalue == CONSTANT || is_gimple_call (stmt))
1752      && !is_constant)
1753    {
1754      enum gimple_code code = gimple_code (stmt);
1755      val.lattice_val = VARYING;
1756      val.value = NULL_TREE;
1757      val.mask = -1;
1758      if (code == GIMPLE_ASSIGN)
1759	{
1760	  enum tree_code subcode = gimple_assign_rhs_code (stmt);
1761	  tree rhs1 = gimple_assign_rhs1 (stmt);
1762	  switch (get_gimple_rhs_class (subcode))
1763	    {
1764	    case GIMPLE_SINGLE_RHS:
1765	      if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1766		  || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1767		val = get_value_for_expr (rhs1, true);
1768	      break;
1769
1770	    case GIMPLE_UNARY_RHS:
1771	      if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1772		   || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1773		  && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1774		      || POINTER_TYPE_P (gimple_expr_type (stmt))))
1775		val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1776	      break;
1777
1778	    case GIMPLE_BINARY_RHS:
1779	      if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1780		  || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1781		{
1782		  tree lhs = gimple_assign_lhs (stmt);
1783		  tree rhs2 = gimple_assign_rhs2 (stmt);
1784		  val = bit_value_binop (subcode,
1785					 TREE_TYPE (lhs), rhs1, rhs2);
1786		}
1787	      break;
1788
1789	    default:;
1790	    }
1791	}
1792      else if (code == GIMPLE_COND)
1793	{
1794	  enum tree_code code = gimple_cond_code (stmt);
1795	  tree rhs1 = gimple_cond_lhs (stmt);
1796	  tree rhs2 = gimple_cond_rhs (stmt);
1797	  if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1798	      || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1799	    val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1800	}
1801      else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1802	{
1803	  tree fndecl = gimple_call_fndecl (stmt);
1804	  switch (DECL_FUNCTION_CODE (fndecl))
1805	    {
1806	    case BUILT_IN_MALLOC:
1807	    case BUILT_IN_REALLOC:
1808	    case BUILT_IN_CALLOC:
1809	    case BUILT_IN_STRDUP:
1810	    case BUILT_IN_STRNDUP:
1811	      val.lattice_val = CONSTANT;
1812	      val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1813	      val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
1814			   / BITS_PER_UNIT - 1);
1815	      break;
1816
1817	    case BUILT_IN_ALLOCA:
1818	    case BUILT_IN_ALLOCA_WITH_ALIGN:
1819	      align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1820		       ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1821		       : BIGGEST_ALIGNMENT);
1822	      val.lattice_val = CONSTANT;
1823	      val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1824	      val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
1825	      break;
1826
1827	    /* These builtins return their first argument, unmodified.  */
1828	    case BUILT_IN_MEMCPY:
1829	    case BUILT_IN_MEMMOVE:
1830	    case BUILT_IN_MEMSET:
1831	    case BUILT_IN_STRCPY:
1832	    case BUILT_IN_STRNCPY:
1833	    case BUILT_IN_MEMCPY_CHK:
1834	    case BUILT_IN_MEMMOVE_CHK:
1835	    case BUILT_IN_MEMSET_CHK:
1836	    case BUILT_IN_STRCPY_CHK:
1837	    case BUILT_IN_STRNCPY_CHK:
1838	      val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1839	      break;
1840
1841	    case BUILT_IN_ASSUME_ALIGNED:
1842	      val = bit_value_assume_aligned (stmt, NULL_TREE, val, false);
1843	      break;
1844
1845	    case BUILT_IN_ALIGNED_ALLOC:
1846	      {
1847		tree align = get_constant_value (gimple_call_arg (stmt, 0));
1848		if (align
1849		    && tree_fits_uhwi_p (align))
1850		  {
1851		    unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align);
1852		    if (aligni > 1
1853			/* align must be power-of-two */
1854			&& (aligni & (aligni - 1)) == 0)
1855		      {
1856			val.lattice_val = CONSTANT;
1857			val.value = build_int_cst (ptr_type_node, 0);
1858			val.mask = -aligni;
1859		      }
1860		  }
1861		break;
1862	      }
1863
1864	    default:;
1865	    }
1866	}
1867      if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
1868	{
1869	  tree fntype = gimple_call_fntype (stmt);
1870	  if (fntype)
1871	    {
1872	      tree attrs = lookup_attribute ("assume_aligned",
1873					     TYPE_ATTRIBUTES (fntype));
1874	      if (attrs)
1875		val = bit_value_assume_aligned (stmt, attrs, val, false);
1876	      attrs = lookup_attribute ("alloc_align",
1877					TYPE_ATTRIBUTES (fntype));
1878	      if (attrs)
1879		val = bit_value_assume_aligned (stmt, attrs, val, true);
1880	    }
1881	}
1882      is_constant = (val.lattice_val == CONSTANT);
1883    }
1884
1885  if (flag_tree_bit_ccp
1886      && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
1887	  || (!is_constant && likelyvalue != UNDEFINED))
1888      && gimple_get_lhs (stmt)
1889      && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
1890    {
1891      tree lhs = gimple_get_lhs (stmt);
1892      wide_int nonzero_bits = get_nonzero_bits (lhs);
1893      if (nonzero_bits != -1)
1894	{
1895	  if (!is_constant)
1896	    {
1897	      val.lattice_val = CONSTANT;
1898	      val.value = build_zero_cst (TREE_TYPE (lhs));
1899	      val.mask = extend_mask (nonzero_bits);
1900	      is_constant = true;
1901	    }
1902	  else
1903	    {
1904	      if (wi::bit_and_not (val.value, nonzero_bits) != 0)
1905		val.value = wide_int_to_tree (TREE_TYPE (lhs),
1906					      nonzero_bits & val.value);
1907	      if (nonzero_bits == 0)
1908		val.mask = 0;
1909	      else
1910		val.mask = val.mask & extend_mask (nonzero_bits);
1911	    }
1912	}
1913    }
1914
1915  if (!is_constant)
1916    {
1917      /* The statement produced a nonconstant value.  If the statement
1918	 had UNDEFINED operands, then the result of the statement
1919	 should be UNDEFINED.  Otherwise, the statement is VARYING.  */
1920      if (likelyvalue == UNDEFINED)
1921	{
1922	  val.lattice_val = likelyvalue;
1923	  val.mask = 0;
1924	}
1925      else
1926	{
1927	  val.lattice_val = VARYING;
1928	  val.mask = -1;
1929	}
1930
1931      val.value = NULL_TREE;
1932    }
1933
1934  return val;
1935}
1936
1937typedef hash_table<pointer_hash<gimple_statement_base> > gimple_htab;
1938
1939/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1940   each matching BUILT_IN_STACK_RESTORE.  Mark visited phis in VISITED.  */
1941
1942static void
1943insert_clobber_before_stack_restore (tree saved_val, tree var,
1944				     gimple_htab **visited)
1945{
1946  gimple stmt;
1947  gassign *clobber_stmt;
1948  tree clobber;
1949  imm_use_iterator iter;
1950  gimple_stmt_iterator i;
1951  gimple *slot;
1952
1953  FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1954    if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1955      {
1956	clobber = build_constructor (TREE_TYPE (var),
1957				     NULL);
1958	TREE_THIS_VOLATILE (clobber) = 1;
1959	clobber_stmt = gimple_build_assign (var, clobber);
1960
1961	i = gsi_for_stmt (stmt);
1962	gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1963      }
1964    else if (gimple_code (stmt) == GIMPLE_PHI)
1965      {
1966	if (!*visited)
1967	  *visited = new gimple_htab (10);
1968
1969	slot = (*visited)->find_slot (stmt, INSERT);
1970	if (*slot != NULL)
1971	  continue;
1972
1973	*slot = stmt;
1974	insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1975					     visited);
1976      }
1977    else if (gimple_assign_ssa_name_copy_p (stmt))
1978      insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
1979					   visited);
1980    else if (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1981      continue;
1982    else
1983      gcc_assert (is_gimple_debug (stmt));
1984}
1985
1986/* Advance the iterator to the previous non-debug gimple statement in the same
1987   or dominating basic block.  */
1988
1989static inline void
1990gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1991{
1992  basic_block dom;
1993
1994  gsi_prev_nondebug (i);
1995  while (gsi_end_p (*i))
1996    {
1997      dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1998      if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1999	return;
2000
2001      *i = gsi_last_bb (dom);
2002    }
2003}
2004
2005/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2006   a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2007
2008   It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2009   previous pass (such as DOM) duplicated it along multiple paths to a BB.  In
2010   that case the function gives up without inserting the clobbers.  */
2011
2012static void
2013insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
2014{
2015  gimple stmt;
2016  tree saved_val;
2017  gimple_htab *visited = NULL;
2018
2019  for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
2020    {
2021      stmt = gsi_stmt (i);
2022
2023      if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
2024	continue;
2025
2026      saved_val = gimple_call_lhs (stmt);
2027      if (saved_val == NULL_TREE)
2028	continue;
2029
2030      insert_clobber_before_stack_restore (saved_val, var, &visited);
2031      break;
2032    }
2033
2034  delete visited;
2035}
2036
2037/* Detects a __builtin_alloca_with_align with constant size argument.  Declares
2038   fixed-size array and returns the address, if found, otherwise returns
2039   NULL_TREE.  */
2040
2041static tree
2042fold_builtin_alloca_with_align (gimple stmt)
2043{
2044  unsigned HOST_WIDE_INT size, threshold, n_elem;
2045  tree lhs, arg, block, var, elem_type, array_type;
2046
2047  /* Get lhs.  */
2048  lhs = gimple_call_lhs (stmt);
2049  if (lhs == NULL_TREE)
2050    return NULL_TREE;
2051
2052  /* Detect constant argument.  */
2053  arg = get_constant_value (gimple_call_arg (stmt, 0));
2054  if (arg == NULL_TREE
2055      || TREE_CODE (arg) != INTEGER_CST
2056      || !tree_fits_uhwi_p (arg))
2057    return NULL_TREE;
2058
2059  size = tree_to_uhwi (arg);
2060
2061  /* Heuristic: don't fold large allocas.  */
2062  threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
2063  /* In case the alloca is located at function entry, it has the same lifetime
2064     as a declared array, so we allow a larger size.  */
2065  block = gimple_block (stmt);
2066  if (!(cfun->after_inlining
2067	&& block
2068        && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
2069    threshold /= 10;
2070  if (size > threshold)
2071    return NULL_TREE;
2072
2073  /* Declare array.  */
2074  elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
2075  n_elem = size * 8 / BITS_PER_UNIT;
2076  array_type = build_array_type_nelts (elem_type, n_elem);
2077  var = create_tmp_var (array_type);
2078  DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
2079  {
2080    struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
2081    if (pi != NULL && !pi->pt.anything)
2082      {
2083	bool singleton_p;
2084	unsigned uid;
2085	singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
2086	gcc_assert (singleton_p);
2087	SET_DECL_PT_UID (var, uid);
2088      }
2089  }
2090
2091  /* Fold alloca to the address of the array.  */
2092  return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
2093}
2094
2095/* Fold the stmt at *GSI with CCP specific information that propagating
2096   and regular folding does not catch.  */
2097
2098static bool
2099ccp_fold_stmt (gimple_stmt_iterator *gsi)
2100{
2101  gimple stmt = gsi_stmt (*gsi);
2102
2103  switch (gimple_code (stmt))
2104    {
2105    case GIMPLE_COND:
2106      {
2107	gcond *cond_stmt = as_a <gcond *> (stmt);
2108	ccp_prop_value_t val;
2109	/* Statement evaluation will handle type mismatches in constants
2110	   more gracefully than the final propagation.  This allows us to
2111	   fold more conditionals here.  */
2112	val = evaluate_stmt (stmt);
2113	if (val.lattice_val != CONSTANT
2114	    || val.mask != 0)
2115	  return false;
2116
2117	if (dump_file)
2118	  {
2119	    fprintf (dump_file, "Folding predicate ");
2120	    print_gimple_expr (dump_file, stmt, 0, 0);
2121	    fprintf (dump_file, " to ");
2122	    print_generic_expr (dump_file, val.value, 0);
2123	    fprintf (dump_file, "\n");
2124	  }
2125
2126	if (integer_zerop (val.value))
2127	  gimple_cond_make_false (cond_stmt);
2128	else
2129	  gimple_cond_make_true (cond_stmt);
2130
2131	return true;
2132      }
2133
2134    case GIMPLE_CALL:
2135      {
2136	tree lhs = gimple_call_lhs (stmt);
2137	int flags = gimple_call_flags (stmt);
2138	tree val;
2139	tree argt;
2140	bool changed = false;
2141	unsigned i;
2142
2143	/* If the call was folded into a constant make sure it goes
2144	   away even if we cannot propagate into all uses because of
2145	   type issues.  */
2146	if (lhs
2147	    && TREE_CODE (lhs) == SSA_NAME
2148	    && (val = get_constant_value (lhs))
2149	    /* Don't optimize away calls that have side-effects.  */
2150	    && (flags & (ECF_CONST|ECF_PURE)) != 0
2151	    && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
2152	  {
2153	    tree new_rhs = unshare_expr (val);
2154	    bool res;
2155	    if (!useless_type_conversion_p (TREE_TYPE (lhs),
2156					    TREE_TYPE (new_rhs)))
2157	      new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2158	    res = update_call_from_tree (gsi, new_rhs);
2159	    gcc_assert (res);
2160	    return true;
2161	  }
2162
2163	/* Internal calls provide no argument types, so the extra laxity
2164	   for normal calls does not apply.  */
2165	if (gimple_call_internal_p (stmt))
2166	  return false;
2167
2168        /* The heuristic of fold_builtin_alloca_with_align differs before and
2169	   after inlining, so we don't require the arg to be changed into a
2170	   constant for folding, but just to be constant.  */
2171        if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
2172          {
2173            tree new_rhs = fold_builtin_alloca_with_align (stmt);
2174            if (new_rhs)
2175	      {
2176		bool res = update_call_from_tree (gsi, new_rhs);
2177		tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
2178		gcc_assert (res);
2179		insert_clobbers_for_var (*gsi, var);
2180		return true;
2181	      }
2182          }
2183
2184	/* Propagate into the call arguments.  Compared to replace_uses_in
2185	   this can use the argument slot types for type verification
2186	   instead of the current argument type.  We also can safely
2187	   drop qualifiers here as we are dealing with constants anyway.  */
2188	argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
2189	for (i = 0; i < gimple_call_num_args (stmt) && argt;
2190	     ++i, argt = TREE_CHAIN (argt))
2191	  {
2192	    tree arg = gimple_call_arg (stmt, i);
2193	    if (TREE_CODE (arg) == SSA_NAME
2194		&& (val = get_constant_value (arg))
2195		&& useless_type_conversion_p
2196		     (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2197		      TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2198	      {
2199		gimple_call_set_arg (stmt, i, unshare_expr (val));
2200		changed = true;
2201	      }
2202	  }
2203
2204	return changed;
2205      }
2206
2207    case GIMPLE_ASSIGN:
2208      {
2209	tree lhs = gimple_assign_lhs (stmt);
2210	tree val;
2211
2212	/* If we have a load that turned out to be constant replace it
2213	   as we cannot propagate into all uses in all cases.  */
2214	if (gimple_assign_single_p (stmt)
2215	    && TREE_CODE (lhs) == SSA_NAME
2216	    && (val = get_constant_value (lhs)))
2217	  {
2218	    tree rhs = unshare_expr (val);
2219	    if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2220	      rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2221	    gimple_assign_set_rhs_from_tree (gsi, rhs);
2222	    return true;
2223	  }
2224
2225	return false;
2226      }
2227
2228    default:
2229      return false;
2230    }
2231}
2232
2233/* Visit the assignment statement STMT.  Set the value of its LHS to the
2234   value computed by the RHS and store LHS in *OUTPUT_P.  If STMT
2235   creates virtual definitions, set the value of each new name to that
2236   of the RHS (if we can derive a constant out of the RHS).
2237   Value-returning call statements also perform an assignment, and
2238   are handled here.  */
2239
2240static enum ssa_prop_result
2241visit_assignment (gimple stmt, tree *output_p)
2242{
2243  ccp_prop_value_t val;
2244  enum ssa_prop_result retval;
2245
2246  tree lhs = gimple_get_lhs (stmt);
2247
2248  gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2249              || gimple_call_lhs (stmt) != NULL_TREE);
2250
2251  if (gimple_assign_single_p (stmt)
2252      && gimple_assign_rhs_code (stmt) == SSA_NAME)
2253    /* For a simple copy operation, we copy the lattice values.  */
2254    val = *get_value (gimple_assign_rhs1 (stmt));
2255  else
2256    /* Evaluate the statement, which could be
2257       either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
2258    val = evaluate_stmt (stmt);
2259
2260  retval = SSA_PROP_NOT_INTERESTING;
2261
2262  /* Set the lattice value of the statement's output.  */
2263  if (TREE_CODE (lhs) == SSA_NAME)
2264    {
2265      /* If STMT is an assignment to an SSA_NAME, we only have one
2266	 value to set.  */
2267      if (set_lattice_value (lhs, val))
2268	{
2269	  *output_p = lhs;
2270	  if (val.lattice_val == VARYING)
2271	    retval = SSA_PROP_VARYING;
2272	  else
2273	    retval = SSA_PROP_INTERESTING;
2274	}
2275    }
2276
2277  return retval;
2278}
2279
2280
2281/* Visit the conditional statement STMT.  Return SSA_PROP_INTERESTING
2282   if it can determine which edge will be taken.  Otherwise, return
2283   SSA_PROP_VARYING.  */
2284
2285static enum ssa_prop_result
2286visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2287{
2288  ccp_prop_value_t val;
2289  basic_block block;
2290
2291  block = gimple_bb (stmt);
2292  val = evaluate_stmt (stmt);
2293  if (val.lattice_val != CONSTANT
2294      || val.mask != 0)
2295    return SSA_PROP_VARYING;
2296
2297  /* Find which edge out of the conditional block will be taken and add it
2298     to the worklist.  If no single edge can be determined statically,
2299     return SSA_PROP_VARYING to feed all the outgoing edges to the
2300     propagation engine.  */
2301  *taken_edge_p = find_taken_edge (block, val.value);
2302  if (*taken_edge_p)
2303    return SSA_PROP_INTERESTING;
2304  else
2305    return SSA_PROP_VARYING;
2306}
2307
2308
2309/* Evaluate statement STMT.  If the statement produces an output value and
2310   its evaluation changes the lattice value of its output, return
2311   SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2312   output value.
2313
2314   If STMT is a conditional branch and we can determine its truth
2315   value, set *TAKEN_EDGE_P accordingly.  If STMT produces a varying
2316   value, return SSA_PROP_VARYING.  */
2317
2318static enum ssa_prop_result
2319ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2320{
2321  tree def;
2322  ssa_op_iter iter;
2323
2324  if (dump_file && (dump_flags & TDF_DETAILS))
2325    {
2326      fprintf (dump_file, "\nVisiting statement:\n");
2327      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2328    }
2329
2330  switch (gimple_code (stmt))
2331    {
2332      case GIMPLE_ASSIGN:
2333        /* If the statement is an assignment that produces a single
2334           output value, evaluate its RHS to see if the lattice value of
2335           its output has changed.  */
2336        return visit_assignment (stmt, output_p);
2337
2338      case GIMPLE_CALL:
2339        /* A value-returning call also performs an assignment.  */
2340        if (gimple_call_lhs (stmt) != NULL_TREE)
2341          return visit_assignment (stmt, output_p);
2342        break;
2343
2344      case GIMPLE_COND:
2345      case GIMPLE_SWITCH:
2346        /* If STMT is a conditional branch, see if we can determine
2347           which branch will be taken.   */
2348        /* FIXME.  It appears that we should be able to optimize
2349           computed GOTOs here as well.  */
2350        return visit_cond_stmt (stmt, taken_edge_p);
2351
2352      default:
2353        break;
2354    }
2355
2356  /* Any other kind of statement is not interesting for constant
2357     propagation and, therefore, not worth simulating.  */
2358  if (dump_file && (dump_flags & TDF_DETAILS))
2359    fprintf (dump_file, "No interesting values produced.  Marked VARYING.\n");
2360
2361  /* Definitions made by statements other than assignments to
2362     SSA_NAMEs represent unknown modifications to their outputs.
2363     Mark them VARYING.  */
2364  FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2365    {
2366      ccp_prop_value_t v = { VARYING, NULL_TREE, -1 };
2367      set_lattice_value (def, v);
2368    }
2369
2370  return SSA_PROP_VARYING;
2371}
2372
2373
2374/* Main entry point for SSA Conditional Constant Propagation.  */
2375
2376static unsigned int
2377do_ssa_ccp (void)
2378{
2379  unsigned int todo = 0;
2380  calculate_dominance_info (CDI_DOMINATORS);
2381  ccp_initialize ();
2382  ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2383  if (ccp_finalize ())
2384    todo = (TODO_cleanup_cfg | TODO_update_ssa);
2385  free_dominance_info (CDI_DOMINATORS);
2386  return todo;
2387}
2388
2389
2390namespace {
2391
2392const pass_data pass_data_ccp =
2393{
2394  GIMPLE_PASS, /* type */
2395  "ccp", /* name */
2396  OPTGROUP_NONE, /* optinfo_flags */
2397  TV_TREE_CCP, /* tv_id */
2398  ( PROP_cfg | PROP_ssa ), /* properties_required */
2399  0, /* properties_provided */
2400  0, /* properties_destroyed */
2401  0, /* todo_flags_start */
2402  TODO_update_address_taken, /* todo_flags_finish */
2403};
2404
2405class pass_ccp : public gimple_opt_pass
2406{
2407public:
2408  pass_ccp (gcc::context *ctxt)
2409    : gimple_opt_pass (pass_data_ccp, ctxt)
2410  {}
2411
2412  /* opt_pass methods: */
2413  opt_pass * clone () { return new pass_ccp (m_ctxt); }
2414  virtual bool gate (function *) { return flag_tree_ccp != 0; }
2415  virtual unsigned int execute (function *) { return do_ssa_ccp (); }
2416
2417}; // class pass_ccp
2418
2419} // anon namespace
2420
2421gimple_opt_pass *
2422make_pass_ccp (gcc::context *ctxt)
2423{
2424  return new pass_ccp (ctxt);
2425}
2426
2427
2428
2429/* Try to optimize out __builtin_stack_restore.  Optimize it out
2430   if there is another __builtin_stack_restore in the same basic
2431   block and no calls or ASM_EXPRs are in between, or if this block's
2432   only outgoing edge is to EXIT_BLOCK and there are no calls or
2433   ASM_EXPRs after this __builtin_stack_restore.  */
2434
2435static tree
2436optimize_stack_restore (gimple_stmt_iterator i)
2437{
2438  tree callee;
2439  gimple stmt;
2440
2441  basic_block bb = gsi_bb (i);
2442  gimple call = gsi_stmt (i);
2443
2444  if (gimple_code (call) != GIMPLE_CALL
2445      || gimple_call_num_args (call) != 1
2446      || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2447      || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2448    return NULL_TREE;
2449
2450  for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2451    {
2452      stmt = gsi_stmt (i);
2453      if (gimple_code (stmt) == GIMPLE_ASM)
2454	return NULL_TREE;
2455      if (gimple_code (stmt) != GIMPLE_CALL)
2456	continue;
2457
2458      callee = gimple_call_fndecl (stmt);
2459      if (!callee
2460	  || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2461	  /* All regular builtins are ok, just obviously not alloca.  */
2462	  || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2463	  || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2464	return NULL_TREE;
2465
2466      if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2467	goto second_stack_restore;
2468    }
2469
2470  if (!gsi_end_p (i))
2471    return NULL_TREE;
2472
2473  /* Allow one successor of the exit block, or zero successors.  */
2474  switch (EDGE_COUNT (bb->succs))
2475    {
2476    case 0:
2477      break;
2478    case 1:
2479      if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2480	return NULL_TREE;
2481      break;
2482    default:
2483      return NULL_TREE;
2484    }
2485 second_stack_restore:
2486
2487  /* If there's exactly one use, then zap the call to __builtin_stack_save.
2488     If there are multiple uses, then the last one should remove the call.
2489     In any case, whether the call to __builtin_stack_save can be removed
2490     or not is irrelevant to removing the call to __builtin_stack_restore.  */
2491  if (has_single_use (gimple_call_arg (call, 0)))
2492    {
2493      gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2494      if (is_gimple_call (stack_save))
2495	{
2496	  callee = gimple_call_fndecl (stack_save);
2497	  if (callee
2498	      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2499	      && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2500	    {
2501	      gimple_stmt_iterator stack_save_gsi;
2502	      tree rhs;
2503
2504	      stack_save_gsi = gsi_for_stmt (stack_save);
2505	      rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2506	      update_call_from_tree (&stack_save_gsi, rhs);
2507	    }
2508	}
2509    }
2510
2511  /* No effect, so the statement will be deleted.  */
2512  return integer_zero_node;
2513}
2514
2515/* If va_list type is a simple pointer and nothing special is needed,
2516   optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2517   __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2518   pointer assignment.  */
2519
2520static tree
2521optimize_stdarg_builtin (gimple call)
2522{
2523  tree callee, lhs, rhs, cfun_va_list;
2524  bool va_list_simple_ptr;
2525  location_t loc = gimple_location (call);
2526
2527  if (gimple_code (call) != GIMPLE_CALL)
2528    return NULL_TREE;
2529
2530  callee = gimple_call_fndecl (call);
2531
2532  cfun_va_list = targetm.fn_abi_va_list (callee);
2533  va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2534		       && (TREE_TYPE (cfun_va_list) == void_type_node
2535			   || TREE_TYPE (cfun_va_list) == char_type_node);
2536
2537  switch (DECL_FUNCTION_CODE (callee))
2538    {
2539    case BUILT_IN_VA_START:
2540      if (!va_list_simple_ptr
2541	  || targetm.expand_builtin_va_start != NULL
2542	  || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2543	return NULL_TREE;
2544
2545      if (gimple_call_num_args (call) != 2)
2546	return NULL_TREE;
2547
2548      lhs = gimple_call_arg (call, 0);
2549      if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2550	  || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2551	     != TYPE_MAIN_VARIANT (cfun_va_list))
2552	return NULL_TREE;
2553
2554      lhs = build_fold_indirect_ref_loc (loc, lhs);
2555      rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2556                             1, integer_zero_node);
2557      rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2558      return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2559
2560    case BUILT_IN_VA_COPY:
2561      if (!va_list_simple_ptr)
2562	return NULL_TREE;
2563
2564      if (gimple_call_num_args (call) != 2)
2565	return NULL_TREE;
2566
2567      lhs = gimple_call_arg (call, 0);
2568      if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2569	  || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2570	     != TYPE_MAIN_VARIANT (cfun_va_list))
2571	return NULL_TREE;
2572
2573      lhs = build_fold_indirect_ref_loc (loc, lhs);
2574      rhs = gimple_call_arg (call, 1);
2575      if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2576	  != TYPE_MAIN_VARIANT (cfun_va_list))
2577	return NULL_TREE;
2578
2579      rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2580      return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2581
2582    case BUILT_IN_VA_END:
2583      /* No effect, so the statement will be deleted.  */
2584      return integer_zero_node;
2585
2586    default:
2587      gcc_unreachable ();
2588    }
2589}
2590
2591/* Attemp to make the block of __builtin_unreachable I unreachable by changing
2592   the incoming jumps.  Return true if at least one jump was changed.  */
2593
2594static bool
2595optimize_unreachable (gimple_stmt_iterator i)
2596{
2597  basic_block bb = gsi_bb (i);
2598  gimple_stmt_iterator gsi;
2599  gimple stmt;
2600  edge_iterator ei;
2601  edge e;
2602  bool ret;
2603
2604  if (flag_sanitize & SANITIZE_UNREACHABLE)
2605    return false;
2606
2607  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2608    {
2609      stmt = gsi_stmt (gsi);
2610
2611      if (is_gimple_debug (stmt))
2612       continue;
2613
2614      if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2615	{
2616	  /* Verify we do not need to preserve the label.  */
2617	  if (FORCED_LABEL (gimple_label_label (label_stmt)))
2618	    return false;
2619
2620	  continue;
2621	}
2622
2623      /* Only handle the case that __builtin_unreachable is the first statement
2624	 in the block.  We rely on DCE to remove stmts without side-effects
2625	 before __builtin_unreachable.  */
2626      if (gsi_stmt (gsi) != gsi_stmt (i))
2627        return false;
2628    }
2629
2630  ret = false;
2631  FOR_EACH_EDGE (e, ei, bb->preds)
2632    {
2633      gsi = gsi_last_bb (e->src);
2634      if (gsi_end_p (gsi))
2635	continue;
2636
2637      stmt = gsi_stmt (gsi);
2638      if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
2639	{
2640	  if (e->flags & EDGE_TRUE_VALUE)
2641	    gimple_cond_make_false (cond_stmt);
2642	  else if (e->flags & EDGE_FALSE_VALUE)
2643	    gimple_cond_make_true (cond_stmt);
2644	  else
2645	    gcc_unreachable ();
2646	  update_stmt (cond_stmt);
2647	}
2648      else
2649	{
2650	  /* Todo: handle other cases, f.i. switch statement.  */
2651	  continue;
2652	}
2653
2654      ret = true;
2655    }
2656
2657  return ret;
2658}
2659
2660/* A simple pass that attempts to fold all builtin functions.  This pass
2661   is run after we've propagated as many constants as we can.  */
2662
2663namespace {
2664
2665const pass_data pass_data_fold_builtins =
2666{
2667  GIMPLE_PASS, /* type */
2668  "fab", /* name */
2669  OPTGROUP_NONE, /* optinfo_flags */
2670  TV_NONE, /* tv_id */
2671  ( PROP_cfg | PROP_ssa ), /* properties_required */
2672  0, /* properties_provided */
2673  0, /* properties_destroyed */
2674  0, /* todo_flags_start */
2675  TODO_update_ssa, /* todo_flags_finish */
2676};
2677
2678class pass_fold_builtins : public gimple_opt_pass
2679{
2680public:
2681  pass_fold_builtins (gcc::context *ctxt)
2682    : gimple_opt_pass (pass_data_fold_builtins, ctxt)
2683  {}
2684
2685  /* opt_pass methods: */
2686  opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
2687  virtual unsigned int execute (function *);
2688
2689}; // class pass_fold_builtins
2690
2691unsigned int
2692pass_fold_builtins::execute (function *fun)
2693{
2694  bool cfg_changed = false;
2695  basic_block bb;
2696  unsigned int todoflags = 0;
2697
2698  FOR_EACH_BB_FN (bb, fun)
2699    {
2700      gimple_stmt_iterator i;
2701      for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2702	{
2703          gimple stmt, old_stmt;
2704	  tree callee;
2705	  enum built_in_function fcode;
2706
2707	  stmt = gsi_stmt (i);
2708
2709          if (gimple_code (stmt) != GIMPLE_CALL)
2710	    {
2711	      /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2712		 after the last GIMPLE DSE they aren't needed and might
2713		 unnecessarily keep the SSA_NAMEs live.  */
2714	      if (gimple_clobber_p (stmt))
2715		{
2716		  tree lhs = gimple_assign_lhs (stmt);
2717		  if (TREE_CODE (lhs) == MEM_REF
2718		      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2719		    {
2720		      unlink_stmt_vdef (stmt);
2721		      gsi_remove (&i, true);
2722		      release_defs (stmt);
2723		      continue;
2724		    }
2725		}
2726	      gsi_next (&i);
2727	      continue;
2728	    }
2729
2730	  callee = gimple_call_fndecl (stmt);
2731	  if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2732	    {
2733	      gsi_next (&i);
2734	      continue;
2735	    }
2736
2737	  fcode = DECL_FUNCTION_CODE (callee);
2738	  if (fold_stmt (&i))
2739	    ;
2740	  else
2741	    {
2742	      tree result = NULL_TREE;
2743	      switch (DECL_FUNCTION_CODE (callee))
2744		{
2745		case BUILT_IN_CONSTANT_P:
2746		  /* Resolve __builtin_constant_p.  If it hasn't been
2747		     folded to integer_one_node by now, it's fairly
2748		     certain that the value simply isn't constant.  */
2749		  result = integer_zero_node;
2750		  break;
2751
2752		case BUILT_IN_ASSUME_ALIGNED:
2753		  /* Remove __builtin_assume_aligned.  */
2754		  result = gimple_call_arg (stmt, 0);
2755		  break;
2756
2757		case BUILT_IN_STACK_RESTORE:
2758		  result = optimize_stack_restore (i);
2759		  if (result)
2760		    break;
2761		  gsi_next (&i);
2762		  continue;
2763
2764		case BUILT_IN_UNREACHABLE:
2765		  if (optimize_unreachable (i))
2766		    cfg_changed = true;
2767		  break;
2768
2769		case BUILT_IN_VA_START:
2770		case BUILT_IN_VA_END:
2771		case BUILT_IN_VA_COPY:
2772		  /* These shouldn't be folded before pass_stdarg.  */
2773		  result = optimize_stdarg_builtin (stmt);
2774		  if (result)
2775		    break;
2776		  /* FALLTHRU */
2777
2778		default:;
2779		}
2780
2781	      if (!result)
2782		{
2783		  gsi_next (&i);
2784		  continue;
2785		}
2786
2787	      if (!update_call_from_tree (&i, result))
2788		gimplify_and_update_call_from_tree (&i, result);
2789	    }
2790
2791	  todoflags |= TODO_update_address_taken;
2792
2793	  if (dump_file && (dump_flags & TDF_DETAILS))
2794	    {
2795	      fprintf (dump_file, "Simplified\n  ");
2796	      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2797	    }
2798
2799          old_stmt = stmt;
2800	  stmt = gsi_stmt (i);
2801	  update_stmt (stmt);
2802
2803	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2804	      && gimple_purge_dead_eh_edges (bb))
2805	    cfg_changed = true;
2806
2807	  if (dump_file && (dump_flags & TDF_DETAILS))
2808	    {
2809	      fprintf (dump_file, "to\n  ");
2810	      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2811	      fprintf (dump_file, "\n");
2812	    }
2813
2814	  /* Retry the same statement if it changed into another
2815	     builtin, there might be new opportunities now.  */
2816          if (gimple_code (stmt) != GIMPLE_CALL)
2817	    {
2818	      gsi_next (&i);
2819	      continue;
2820	    }
2821	  callee = gimple_call_fndecl (stmt);
2822	  if (!callee
2823              || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2824	      || DECL_FUNCTION_CODE (callee) == fcode)
2825	    gsi_next (&i);
2826	}
2827    }
2828
2829  /* Delete unreachable blocks.  */
2830  if (cfg_changed)
2831    todoflags |= TODO_cleanup_cfg;
2832
2833  return todoflags;
2834}
2835
2836} // anon namespace
2837
2838gimple_opt_pass *
2839make_pass_fold_builtins (gcc::context *ctxt)
2840{
2841  return new pass_fold_builtins (ctxt);
2842}
2843