1/* Statement simplification on GIMPLE.
2   Copyright (C) 2010-2015 Free Software Foundation, Inc.
3   Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "hash-set.h"
26#include "machmode.h"
27#include "vec.h"
28#include "double-int.h"
29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"
35#include "fold-const.h"
36#include "stringpool.h"
37#include "hashtab.h"
38#include "hard-reg-set.h"
39#include "function.h"
40#include "rtl.h"
41#include "flags.h"
42#include "statistics.h"
43#include "real.h"
44#include "fixed-value.h"
45#include "insn-config.h"
46#include "expmed.h"
47#include "dojump.h"
48#include "explow.h"
49#include "calls.h"
50#include "emit-rtl.h"
51#include "varasm.h"
52#include "stmt.h"
53#include "expr.h"
54#include "stor-layout.h"
55#include "dumpfile.h"
56#include "bitmap.h"
57#include "predict.h"
58#include "dominance.h"
59#include "basic-block.h"
60#include "tree-ssa-alias.h"
61#include "internal-fn.h"
62#include "gimple-fold.h"
63#include "gimple-expr.h"
64#include "is-a.h"
65#include "gimple.h"
66#include "gimplify.h"
67#include "gimple-iterator.h"
68#include "gimple-ssa.h"
69#include "tree-ssanames.h"
70#include "tree-into-ssa.h"
71#include "tree-dfa.h"
72#include "tree-ssa.h"
73#include "tree-ssa-propagate.h"
74#include "target.h"
75#include "hash-map.h"
76#include "plugin-api.h"
77#include "ipa-ref.h"
78#include "cgraph.h"
79#include "ipa-utils.h"
80#include "gimple-pretty-print.h"
81#include "tree-ssa-address.h"
82#include "langhooks.h"
83#include "gimplify-me.h"
84#include "dbgcnt.h"
85#include "builtins.h"
86#include "output.h"
87#include "tree-eh.h"
88#include "gimple-match.h"
89#include "tree-phinodes.h"
90#include "ssa-iterators.h"
91#include "ipa-chkp.h"
92
93/* Return true when DECL can be referenced from current unit.
94   FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
95   We can get declarations that are not possible to reference for various
96   reasons:
97
98     1) When analyzing C++ virtual tables.
99	C++ virtual tables do have known constructors even
100	when they are keyed to other compilation unit.
101	Those tables can contain pointers to methods and vars
102	in other units.  Those methods have both STATIC and EXTERNAL
103	set.
104     2) In WHOPR mode devirtualization might lead to reference
105	to method that was partitioned elsehwere.
106	In this case we have static VAR_DECL or FUNCTION_DECL
107	that has no corresponding callgraph/varpool node
108	declaring the body.
109     3) COMDAT functions referred by external vtables that
110        we devirtualize only during final compilation stage.
111        At this time we already decided that we will not output
112        the function body and thus we can't reference the symbol
113        directly.  */
114
115static bool
116can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
117{
118  varpool_node *vnode;
119  struct cgraph_node *node;
120  symtab_node *snode;
121
122  if (DECL_ABSTRACT_P (decl))
123    return false;
124
125  /* We are concerned only about static/external vars and functions.  */
126  if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
127      || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
128    return true;
129
130  /* Static objects can be referred only if they was not optimized out yet.  */
131  if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
132    {
133      /* Before we start optimizing unreachable code we can be sure all
134	 static objects are defined.  */
135      if (symtab->function_flags_ready)
136	return true;
137      snode = symtab_node::get (decl);
138      if (!snode || !snode->definition)
139	return false;
140      node = dyn_cast <cgraph_node *> (snode);
141      return !node || !node->global.inlined_to;
142    }
143
144  /* We will later output the initializer, so we can refer to it.
145     So we are concerned only when DECL comes from initializer of
146     external var or var that has been optimized out.  */
147  if (!from_decl
148      || TREE_CODE (from_decl) != VAR_DECL
149      || (!DECL_EXTERNAL (from_decl)
150	  && (vnode = varpool_node::get (from_decl)) != NULL
151	  && vnode->definition)
152      || (flag_ltrans
153	  && (vnode = varpool_node::get (from_decl)) != NULL
154	  && vnode->in_other_partition))
155    return true;
156  /* We are folding reference from external vtable.  The vtable may reffer
157     to a symbol keyed to other compilation unit.  The other compilation
158     unit may be in separate DSO and the symbol may be hidden.  */
159  if (DECL_VISIBILITY_SPECIFIED (decl)
160      && DECL_EXTERNAL (decl)
161      && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
162      && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
163    return false;
164  /* When function is public, we always can introduce new reference.
165     Exception are the COMDAT functions where introducing a direct
166     reference imply need to include function body in the curren tunit.  */
167  if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
168    return true;
169  /* We have COMDAT.  We are going to check if we still have definition
170     or if the definition is going to be output in other partition.
171     Bypass this when gimplifying; all needed functions will be produced.
172
173     As observed in PR20991 for already optimized out comdat virtual functions
174     it may be tempting to not necessarily give up because the copy will be
175     output elsewhere when corresponding vtable is output.
176     This is however not possible - ABI specify that COMDATs are output in
177     units where they are used and when the other unit was compiled with LTO
178     it is possible that vtable was kept public while the function itself
179     was privatized. */
180  if (!symtab->function_flags_ready)
181    return true;
182
183  snode = symtab_node::get (decl);
184  if (!snode
185      || ((!snode->definition || DECL_EXTERNAL (decl))
186	  && (!snode->in_other_partition
187	      || (!snode->forced_by_abi && !snode->force_output))))
188    return false;
189  node = dyn_cast <cgraph_node *> (snode);
190  return !node || !node->global.inlined_to;
191}
192
193/* CVAL is value taken from DECL_INITIAL of variable.  Try to transform it into
194   acceptable form for is_gimple_min_invariant.
195   FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL.  */
196
197tree
198canonicalize_constructor_val (tree cval, tree from_decl)
199{
200  tree orig_cval = cval;
201  STRIP_NOPS (cval);
202  if (TREE_CODE (cval) == POINTER_PLUS_EXPR
203      && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
204    {
205      tree ptr = TREE_OPERAND (cval, 0);
206      if (is_gimple_min_invariant (ptr))
207	cval = build1_loc (EXPR_LOCATION (cval),
208			   ADDR_EXPR, TREE_TYPE (ptr),
209			   fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
210					ptr,
211					fold_convert (ptr_type_node,
212						      TREE_OPERAND (cval, 1))));
213    }
214  if (TREE_CODE (cval) == ADDR_EXPR)
215    {
216      tree base = NULL_TREE;
217      if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
218	{
219	  base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
220	  if (base)
221	    TREE_OPERAND (cval, 0) = base;
222	}
223      else
224	base = get_base_address (TREE_OPERAND (cval, 0));
225      if (!base)
226	return NULL_TREE;
227
228      if ((TREE_CODE (base) == VAR_DECL
229	   || TREE_CODE (base) == FUNCTION_DECL)
230	  && !can_refer_decl_in_current_unit_p (base, from_decl))
231	return NULL_TREE;
232      if (TREE_CODE (base) == VAR_DECL)
233	TREE_ADDRESSABLE (base) = 1;
234      else if (TREE_CODE (base) == FUNCTION_DECL)
235	{
236	  /* Make sure we create a cgraph node for functions we'll reference.
237	     They can be non-existent if the reference comes from an entry
238	     of an external vtable for example.  */
239	  cgraph_node::get_create (base);
240	}
241      /* Fixup types in global initializers.  */
242      if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
243	cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
244
245      if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
246	cval = fold_convert (TREE_TYPE (orig_cval), cval);
247      return cval;
248    }
249  if (TREE_OVERFLOW_P (cval))
250    return drop_tree_overflow (cval);
251  return orig_cval;
252}
253
254/* If SYM is a constant variable with known value, return the value.
255   NULL_TREE is returned otherwise.  */
256
257tree
258get_symbol_constant_value (tree sym)
259{
260  tree val = ctor_for_folding (sym);
261  if (val != error_mark_node)
262    {
263      if (val)
264	{
265	  val = canonicalize_constructor_val (unshare_expr (val), sym);
266	  if (val && is_gimple_min_invariant (val))
267	    return val;
268	  else
269	    return NULL_TREE;
270	}
271      /* Variables declared 'const' without an initializer
272	 have zero as the initializer if they may not be
273	 overridden at link or run time.  */
274      if (!val
275          && is_gimple_reg_type (TREE_TYPE (sym)))
276	return build_zero_cst (TREE_TYPE (sym));
277    }
278
279  return NULL_TREE;
280}
281
282
283
284/* Subroutine of fold_stmt.  We perform several simplifications of the
285   memory reference tree EXPR and make sure to re-gimplify them properly
286   after propagation of constant addresses.  IS_LHS is true if the
287   reference is supposed to be an lvalue.  */
288
289static tree
290maybe_fold_reference (tree expr, bool is_lhs)
291{
292  tree result;
293
294  if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
295       || TREE_CODE (expr) == REALPART_EXPR
296       || TREE_CODE (expr) == IMAGPART_EXPR)
297      && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
298    return fold_unary_loc (EXPR_LOCATION (expr),
299			   TREE_CODE (expr),
300			   TREE_TYPE (expr),
301			   TREE_OPERAND (expr, 0));
302  else if (TREE_CODE (expr) == BIT_FIELD_REF
303	   && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
304    return fold_ternary_loc (EXPR_LOCATION (expr),
305			     TREE_CODE (expr),
306			     TREE_TYPE (expr),
307			     TREE_OPERAND (expr, 0),
308			     TREE_OPERAND (expr, 1),
309			     TREE_OPERAND (expr, 2));
310
311  if (!is_lhs
312      && (result = fold_const_aggregate_ref (expr))
313      && is_gimple_min_invariant (result))
314    return result;
315
316  return NULL_TREE;
317}
318
319
320/* Attempt to fold an assignment statement pointed-to by SI.  Returns a
321   replacement rhs for the statement or NULL_TREE if no simplification
322   could be made.  It is assumed that the operands have been previously
323   folded.  */
324
325static tree
326fold_gimple_assign (gimple_stmt_iterator *si)
327{
328  gimple stmt = gsi_stmt (*si);
329  enum tree_code subcode = gimple_assign_rhs_code (stmt);
330  location_t loc = gimple_location (stmt);
331
332  tree result = NULL_TREE;
333
334  switch (get_gimple_rhs_class (subcode))
335    {
336    case GIMPLE_SINGLE_RHS:
337      {
338        tree rhs = gimple_assign_rhs1 (stmt);
339
340	if (TREE_CLOBBER_P (rhs))
341	  return NULL_TREE;
342
343	if (REFERENCE_CLASS_P (rhs))
344	  return maybe_fold_reference (rhs, false);
345
346	else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
347	  {
348	    tree val = OBJ_TYPE_REF_EXPR (rhs);
349	    if (is_gimple_min_invariant (val))
350	      return val;
351	    else if (flag_devirtualize && virtual_method_call_p (rhs))
352	      {
353		bool final;
354		vec <cgraph_node *>targets
355		  = possible_polymorphic_call_targets (rhs, stmt, &final);
356		if (final && targets.length () <= 1 && dbg_cnt (devirt))
357		  {
358		    if (dump_enabled_p ())
359		      {
360			location_t loc = gimple_location_safe (stmt);
361			dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
362					 "resolving virtual function address "
363					 "reference to function %s\n",
364					 targets.length () == 1
365					 ? targets[0]->name ()
366					 : "NULL");
367		      }
368		    if (targets.length () == 1)
369		      {
370			val = fold_convert (TREE_TYPE (val),
371					    build_fold_addr_expr_loc
372					      (loc, targets[0]->decl));
373			STRIP_USELESS_TYPE_CONVERSION (val);
374		      }
375		    else
376		      /* We can not use __builtin_unreachable here because it
377			 can not have address taken.  */
378		      val = build_int_cst (TREE_TYPE (val), 0);
379		    return val;
380		  }
381	      }
382
383	  }
384	else if (TREE_CODE (rhs) == ADDR_EXPR)
385	  {
386	    tree ref = TREE_OPERAND (rhs, 0);
387	    tree tem = maybe_fold_reference (ref, true);
388	    if (tem
389		&& TREE_CODE (tem) == MEM_REF
390		&& integer_zerop (TREE_OPERAND (tem, 1)))
391	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
392	    else if (tem)
393	      result = fold_convert (TREE_TYPE (rhs),
394				     build_fold_addr_expr_loc (loc, tem));
395	    else if (TREE_CODE (ref) == MEM_REF
396		     && integer_zerop (TREE_OPERAND (ref, 1)))
397	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
398	  }
399
400	else if (TREE_CODE (rhs) == CONSTRUCTOR
401		 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
402		 && (CONSTRUCTOR_NELTS (rhs)
403		     == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
404	  {
405	    /* Fold a constant vector CONSTRUCTOR to VECTOR_CST.  */
406	    unsigned i;
407	    tree val;
408
409	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
410	      if (TREE_CODE (val) != INTEGER_CST
411		  && TREE_CODE (val) != REAL_CST
412		  && TREE_CODE (val) != FIXED_CST)
413		return NULL_TREE;
414
415	    return build_vector_from_ctor (TREE_TYPE (rhs),
416					   CONSTRUCTOR_ELTS (rhs));
417	  }
418
419	else if (DECL_P (rhs))
420	  return get_symbol_constant_value (rhs);
421
422        /* If we couldn't fold the RHS, hand over to the generic
423           fold routines.  */
424        if (result == NULL_TREE)
425          result = fold (rhs);
426
427        /* Strip away useless type conversions.  Both the NON_LVALUE_EXPR
428           that may have been added by fold, and "useless" type
429           conversions that might now be apparent due to propagation.  */
430        STRIP_USELESS_TYPE_CONVERSION (result);
431
432        if (result != rhs && valid_gimple_rhs_p (result))
433	  return result;
434
435	return NULL_TREE;
436      }
437      break;
438
439    case GIMPLE_UNARY_RHS:
440      break;
441
442    case GIMPLE_BINARY_RHS:
443      /* Try to canonicalize for boolean-typed X the comparisons
444	 X == 0, X == 1, X != 0, and X != 1.  */
445      if (gimple_assign_rhs_code (stmt) == EQ_EXPR
446	  || gimple_assign_rhs_code (stmt) == NE_EXPR)
447        {
448	  tree lhs = gimple_assign_lhs (stmt);
449	  tree op1 = gimple_assign_rhs1 (stmt);
450	  tree op2 = gimple_assign_rhs2 (stmt);
451	  tree type = TREE_TYPE (op1);
452
453	  /* Check whether the comparison operands are of the same boolean
454	     type as the result type is.
455	     Check that second operand is an integer-constant with value
456	     one or zero.  */
457	  if (TREE_CODE (op2) == INTEGER_CST
458	      && (integer_zerop (op2) || integer_onep (op2))
459	      && useless_type_conversion_p (TREE_TYPE (lhs), type))
460	    {
461	      enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
462	      bool is_logical_not = false;
463
464	      /* X == 0 and X != 1 is a logical-not.of X
465	         X == 1 and X != 0 is X  */
466	      if ((cmp_code == EQ_EXPR && integer_zerop (op2))
467	          || (cmp_code == NE_EXPR && integer_onep (op2)))
468	        is_logical_not = true;
469
470	      if (is_logical_not == false)
471	        result = op1;
472	      /* Only for one-bit precision typed X the transformation
473	         !X -> ~X is valied.  */
474	      else if (TYPE_PRECISION (type) == 1)
475		result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
476				     type, op1);
477	      /* Otherwise we use !X -> X ^ 1.  */
478	      else
479	        result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
480				     type, op1, build_int_cst (type, 1));
481
482	    }
483	}
484
485      if (!result)
486        result = fold_binary_loc (loc, subcode,
487				  TREE_TYPE (gimple_assign_lhs (stmt)),
488				  gimple_assign_rhs1 (stmt),
489				  gimple_assign_rhs2 (stmt));
490
491      if (result)
492        {
493          STRIP_USELESS_TYPE_CONVERSION (result);
494          if (valid_gimple_rhs_p (result))
495	    return result;
496        }
497      break;
498
499    case GIMPLE_TERNARY_RHS:
500      /* Try to fold a conditional expression.  */
501      if (gimple_assign_rhs_code (stmt) == COND_EXPR)
502	{
503	  tree op0 = gimple_assign_rhs1 (stmt);
504	  tree tem;
505	  bool set = false;
506	  location_t cond_loc = gimple_location (stmt);
507
508	  if (COMPARISON_CLASS_P (op0))
509	    {
510	      fold_defer_overflow_warnings ();
511	      tem = fold_binary_loc (cond_loc,
512				     TREE_CODE (op0), TREE_TYPE (op0),
513				     TREE_OPERAND (op0, 0),
514				     TREE_OPERAND (op0, 1));
515	      /* This is actually a conditional expression, not a GIMPLE
516		 conditional statement, however, the valid_gimple_rhs_p
517		 test still applies.  */
518	      set = (tem && is_gimple_condexpr (tem)
519		     && valid_gimple_rhs_p (tem));
520	      fold_undefer_overflow_warnings (set, stmt, 0);
521	    }
522	  else if (is_gimple_min_invariant (op0))
523	    {
524	      tem = op0;
525	      set = true;
526	    }
527	  else
528	    return NULL_TREE;
529
530	  if (set)
531	    result = fold_build3_loc (cond_loc, COND_EXPR,
532				      TREE_TYPE (gimple_assign_lhs (stmt)), tem,
533				      gimple_assign_rhs2 (stmt),
534				      gimple_assign_rhs3 (stmt));
535	}
536
537      if (!result)
538	result = fold_ternary_loc (loc, subcode,
539				   TREE_TYPE (gimple_assign_lhs (stmt)),
540				   gimple_assign_rhs1 (stmt),
541				   gimple_assign_rhs2 (stmt),
542				   gimple_assign_rhs3 (stmt));
543
544      if (result)
545        {
546          STRIP_USELESS_TYPE_CONVERSION (result);
547          if (valid_gimple_rhs_p (result))
548	    return result;
549        }
550      break;
551
552    case GIMPLE_INVALID_RHS:
553      gcc_unreachable ();
554    }
555
556  return NULL_TREE;
557}
558
559/* Attempt to fold a conditional statement. Return true if any changes were
560   made. We only attempt to fold the condition expression, and do not perform
561   any transformation that would require alteration of the cfg.  It is
562   assumed that the operands have been previously folded.  */
563
564static bool
565fold_gimple_cond (gcond *stmt)
566{
567  tree result = fold_binary_loc (gimple_location (stmt),
568			     gimple_cond_code (stmt),
569                             boolean_type_node,
570                             gimple_cond_lhs (stmt),
571                             gimple_cond_rhs (stmt));
572
573  if (result)
574    {
575      STRIP_USELESS_TYPE_CONVERSION (result);
576      if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
577        {
578          gimple_cond_set_condition_from_tree (stmt, result);
579          return true;
580        }
581    }
582
583  return false;
584}
585
586
587/* Replace a statement at *SI_P with a sequence of statements in STMTS,
588   adjusting the replacement stmts location and virtual operands.
589   If the statement has a lhs the last stmt in the sequence is expected
590   to assign to that lhs.  */
591
592static void
593gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
594{
595  gimple stmt = gsi_stmt (*si_p);
596
597  if (gimple_has_location (stmt))
598    annotate_all_with_location (stmts, gimple_location (stmt));
599
600  /* First iterate over the replacement statements backward, assigning
601     virtual operands to their defining statements.  */
602  gimple laststore = NULL;
603  for (gimple_stmt_iterator i = gsi_last (stmts);
604       !gsi_end_p (i); gsi_prev (&i))
605    {
606      gimple new_stmt = gsi_stmt (i);
607      if ((gimple_assign_single_p (new_stmt)
608	   && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
609	  || (is_gimple_call (new_stmt)
610	      && (gimple_call_flags (new_stmt)
611		  & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
612	{
613	  tree vdef;
614	  if (!laststore)
615	    vdef = gimple_vdef (stmt);
616	  else
617	    vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
618	  gimple_set_vdef (new_stmt, vdef);
619	  if (vdef && TREE_CODE (vdef) == SSA_NAME)
620	    SSA_NAME_DEF_STMT (vdef) = new_stmt;
621	  laststore = new_stmt;
622	}
623    }
624
625  /* Second iterate over the statements forward, assigning virtual
626     operands to their uses.  */
627  tree reaching_vuse = gimple_vuse (stmt);
628  for (gimple_stmt_iterator i = gsi_start (stmts);
629       !gsi_end_p (i); gsi_next (&i))
630    {
631      gimple new_stmt = gsi_stmt (i);
632      /* If the new statement possibly has a VUSE, update it with exact SSA
633	 name we know will reach this one.  */
634      if (gimple_has_mem_ops (new_stmt))
635	gimple_set_vuse (new_stmt, reaching_vuse);
636      gimple_set_modified (new_stmt, true);
637      if (gimple_vdef (new_stmt))
638	reaching_vuse = gimple_vdef (new_stmt);
639    }
640
641  /* If the new sequence does not do a store release the virtual
642     definition of the original statement.  */
643  if (reaching_vuse
644      && reaching_vuse == gimple_vuse (stmt))
645    {
646      tree vdef = gimple_vdef (stmt);
647      if (vdef
648	  && TREE_CODE (vdef) == SSA_NAME)
649	{
650	  unlink_stmt_vdef (stmt);
651	  release_ssa_name (vdef);
652	}
653    }
654
655  /* Finally replace the original statement with the sequence.  */
656  gsi_replace_with_seq (si_p, stmts, false);
657}
658
659/* Convert EXPR into a GIMPLE value suitable for substitution on the
660   RHS of an assignment.  Insert the necessary statements before
661   iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
662   is replaced.  If the call is expected to produces a result, then it
663   is replaced by an assignment of the new RHS to the result variable.
664   If the result is to be ignored, then the call is replaced by a
665   GIMPLE_NOP.  A proper VDEF chain is retained by making the first
666   VUSE and the last VDEF of the whole sequence be the same as the replaced
667   statement and using new SSA names for stores in between.  */
668
669void
670gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
671{
672  tree lhs;
673  gimple stmt, new_stmt;
674  gimple_stmt_iterator i;
675  gimple_seq stmts = NULL;
676
677  stmt = gsi_stmt (*si_p);
678
679  gcc_assert (is_gimple_call (stmt));
680
681  push_gimplify_context (gimple_in_ssa_p (cfun));
682
683  lhs = gimple_call_lhs (stmt);
684  if (lhs == NULL_TREE)
685    {
686      gimplify_and_add (expr, &stmts);
687      /* We can end up with folding a memcpy of an empty class assignment
688	 which gets optimized away by C++ gimplification.  */
689      if (gimple_seq_empty_p (stmts))
690	{
691	  pop_gimplify_context (NULL);
692	  if (gimple_in_ssa_p (cfun))
693	    {
694	      unlink_stmt_vdef (stmt);
695	      release_defs (stmt);
696	    }
697	  gsi_replace (si_p, gimple_build_nop (), false);
698	  return;
699	}
700    }
701  else
702    {
703      tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
704      new_stmt = gimple_build_assign (lhs, tmp);
705      i = gsi_last (stmts);
706      gsi_insert_after_without_update (&i, new_stmt,
707				       GSI_CONTINUE_LINKING);
708    }
709
710  pop_gimplify_context (NULL);
711
712  gsi_replace_with_seq_vops (si_p, stmts);
713}
714
715
716/* Replace the call at *GSI with the gimple value VAL.  */
717
718static void
719replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
720{
721  gimple stmt = gsi_stmt (*gsi);
722  tree lhs = gimple_call_lhs (stmt);
723  gimple repl;
724  if (lhs)
725    {
726      if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
727	val = fold_convert (TREE_TYPE (lhs), val);
728      repl = gimple_build_assign (lhs, val);
729    }
730  else
731    repl = gimple_build_nop ();
732  tree vdef = gimple_vdef (stmt);
733  if (vdef && TREE_CODE (vdef) == SSA_NAME)
734    {
735      unlink_stmt_vdef (stmt);
736      release_ssa_name (vdef);
737    }
738  gsi_replace (gsi, repl, false);
739}
740
741/* Replace the call at *GSI with the new call REPL and fold that
742   again.  */
743
744static void
745replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
746{
747  gimple stmt = gsi_stmt (*gsi);
748  gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
749  gimple_set_location (repl, gimple_location (stmt));
750  if (gimple_vdef (stmt)
751      && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
752    {
753      gimple_set_vdef (repl, gimple_vdef (stmt));
754      gimple_set_vuse (repl, gimple_vuse (stmt));
755      SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
756    }
757  gsi_replace (gsi, repl, false);
758  fold_stmt (gsi);
759}
760
761/* Return true if VAR is a VAR_DECL or a component thereof.  */
762
763static bool
764var_decl_component_p (tree var)
765{
766  tree inner = var;
767  while (handled_component_p (inner))
768    inner = TREE_OPERAND (inner, 0);
769  return SSA_VAR_P (inner);
770}
771
772/* Fold function call to builtin mem{{,p}cpy,move}.  Return
773   false if no simplification can be made.
774   If ENDP is 0, return DEST (like memcpy).
775   If ENDP is 1, return DEST+LEN (like mempcpy).
776   If ENDP is 2, return DEST+LEN-1 (like stpcpy).
777   If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
778   (memmove).   */
779
780static bool
781gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
782			       tree dest, tree src, int endp)
783{
784  gimple stmt = gsi_stmt (*gsi);
785  tree lhs = gimple_call_lhs (stmt);
786  tree len = gimple_call_arg (stmt, 2);
787  tree destvar, srcvar;
788  location_t loc = gimple_location (stmt);
789
790  /* If the LEN parameter is zero, return DEST.  */
791  if (integer_zerop (len))
792    {
793      gimple repl;
794      if (gimple_call_lhs (stmt))
795	repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
796      else
797	repl = gimple_build_nop ();
798      tree vdef = gimple_vdef (stmt);
799      if (vdef && TREE_CODE (vdef) == SSA_NAME)
800	{
801	  unlink_stmt_vdef (stmt);
802	  release_ssa_name (vdef);
803	}
804      gsi_replace (gsi, repl, false);
805      return true;
806    }
807
808  /* If SRC and DEST are the same (and not volatile), return
809     DEST{,+LEN,+LEN-1}.  */
810  if (operand_equal_p (src, dest, 0))
811    {
812      unlink_stmt_vdef (stmt);
813      if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
814	release_ssa_name (gimple_vdef (stmt));
815      if (!lhs)
816	{
817	  gsi_replace (gsi, gimple_build_nop (), false);
818	  return true;
819	}
820      goto done;
821    }
822  else
823    {
824      tree srctype, desttype;
825      unsigned int src_align, dest_align;
826      tree off0;
827
828      /* Inlining of memcpy/memmove may cause bounds lost (if we copy
829	 pointers as wide integer) and also may result in huge function
830	 size because of inlined bounds copy.  Thus don't inline for
831	 functions we want to instrument.  */
832      if (flag_check_pointer_bounds
833	  && chkp_instrumentable_p (cfun->decl)
834	  /* Even if data may contain pointers we can inline if copy
835	     less than a pointer size.  */
836	  && (!tree_fits_uhwi_p (len)
837	      || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
838	return false;
839
840      /* Build accesses at offset zero with a ref-all character type.  */
841      off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
842							 ptr_mode, true), 0);
843
844      /* If we can perform the copy efficiently with first doing all loads
845         and then all stores inline it that way.  Currently efficiently
846	 means that we can load all the memory into a single integer
847	 register which is what MOVE_MAX gives us.  */
848      src_align = get_pointer_alignment (src);
849      dest_align = get_pointer_alignment (dest);
850      if (tree_fits_uhwi_p (len)
851	  && compare_tree_int (len, MOVE_MAX) <= 0
852	  /* ???  Don't transform copies from strings with known length this
853	     confuses the tree-ssa-strlen.c.  This doesn't handle
854	     the case in gcc.dg/strlenopt-8.c which is XFAILed for that
855	     reason.  */
856	  && !c_strlen (src, 2))
857	{
858	  unsigned ilen = tree_to_uhwi (len);
859	  if (exact_log2 (ilen) != -1)
860	    {
861	      tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
862	      if (type
863		  && TYPE_MODE (type) != BLKmode
864		  && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
865		      == ilen * 8)
866		  /* If the destination pointer is not aligned we must be able
867		     to emit an unaligned store.  */
868		  && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
869		      || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
870		{
871		  tree srctype = type;
872		  tree desttype = type;
873		  if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
874		    srctype = build_aligned_type (type, src_align);
875		  tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
876		  tree tem = fold_const_aggregate_ref (srcmem);
877		  if (tem)
878		    srcmem = tem;
879		  else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
880			   && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
881						     src_align))
882		    srcmem = NULL_TREE;
883		  if (srcmem)
884		    {
885		      gimple new_stmt;
886		      if (is_gimple_reg_type (TREE_TYPE (srcmem)))
887			{
888			  new_stmt = gimple_build_assign (NULL_TREE, srcmem);
889			  if (gimple_in_ssa_p (cfun))
890			    srcmem = make_ssa_name (TREE_TYPE (srcmem),
891						    new_stmt);
892			  else
893			    srcmem = create_tmp_reg (TREE_TYPE (srcmem));
894			  gimple_assign_set_lhs (new_stmt, srcmem);
895			  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
896			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
897			}
898		      if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
899			desttype = build_aligned_type (type, dest_align);
900		      new_stmt
901			= gimple_build_assign (fold_build2 (MEM_REF, desttype,
902							    dest, off0),
903					       srcmem);
904		      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
905		      gimple_set_vdef (new_stmt, gimple_vdef (stmt));
906		      if (gimple_vdef (new_stmt)
907			  && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
908			SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
909		      if (!lhs)
910			{
911			  gsi_replace (gsi, new_stmt, false);
912			  return true;
913			}
914		      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
915		      goto done;
916		    }
917		}
918	    }
919	}
920
921      if (endp == 3)
922	{
923	  /* Both DEST and SRC must be pointer types.
924	     ??? This is what old code did.  Is the testing for pointer types
925	     really mandatory?
926
927	     If either SRC is readonly or length is 1, we can use memcpy.  */
928	  if (!dest_align || !src_align)
929	    return false;
930	  if (readonly_data_expr (src)
931	      || (tree_fits_uhwi_p (len)
932		  && (MIN (src_align, dest_align) / BITS_PER_UNIT
933		      >= tree_to_uhwi (len))))
934	    {
935	      tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
936	      if (!fn)
937		return false;
938	      gimple_call_set_fndecl (stmt, fn);
939	      gimple_call_set_arg (stmt, 0, dest);
940	      gimple_call_set_arg (stmt, 1, src);
941	      fold_stmt (gsi);
942	      return true;
943	    }
944
945	  /* If *src and *dest can't overlap, optimize into memcpy as well.  */
946	  if (TREE_CODE (src) == ADDR_EXPR
947	      && TREE_CODE (dest) == ADDR_EXPR)
948	    {
949	      tree src_base, dest_base, fn;
950	      HOST_WIDE_INT src_offset = 0, dest_offset = 0;
951	      HOST_WIDE_INT size = -1;
952	      HOST_WIDE_INT maxsize = -1;
953
954	      srcvar = TREE_OPERAND (src, 0);
955	      src_base = get_ref_base_and_extent (srcvar, &src_offset,
956						  &size, &maxsize);
957	      destvar = TREE_OPERAND (dest, 0);
958	      dest_base = get_ref_base_and_extent (destvar, &dest_offset,
959						   &size, &maxsize);
960	      if (tree_fits_uhwi_p (len))
961		maxsize = tree_to_uhwi (len);
962	      else
963		maxsize = -1;
964	      src_offset /= BITS_PER_UNIT;
965	      dest_offset /= BITS_PER_UNIT;
966	      if (SSA_VAR_P (src_base)
967		  && SSA_VAR_P (dest_base))
968		{
969		  if (operand_equal_p (src_base, dest_base, 0)
970		      && ranges_overlap_p (src_offset, maxsize,
971					   dest_offset, maxsize))
972		    return false;
973		}
974	      else if (TREE_CODE (src_base) == MEM_REF
975		       && TREE_CODE (dest_base) == MEM_REF)
976		{
977		  if (! operand_equal_p (TREE_OPERAND (src_base, 0),
978					 TREE_OPERAND (dest_base, 0), 0))
979		    return false;
980		  offset_int off = mem_ref_offset (src_base) + src_offset;
981		  if (!wi::fits_shwi_p (off))
982		    return false;
983		  src_offset = off.to_shwi ();
984
985		  off = mem_ref_offset (dest_base) + dest_offset;
986		  if (!wi::fits_shwi_p (off))
987		    return false;
988		  dest_offset = off.to_shwi ();
989		  if (ranges_overlap_p (src_offset, maxsize,
990					dest_offset, maxsize))
991		    return false;
992		}
993	      else
994		return false;
995
996	      fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
997	      if (!fn)
998		return false;
999	      gimple_call_set_fndecl (stmt, fn);
1000	      gimple_call_set_arg (stmt, 0, dest);
1001	      gimple_call_set_arg (stmt, 1, src);
1002	      fold_stmt (gsi);
1003	      return true;
1004	    }
1005
1006	  /* If the destination and source do not alias optimize into
1007	     memcpy as well.  */
1008	  if ((is_gimple_min_invariant (dest)
1009	       || TREE_CODE (dest) == SSA_NAME)
1010	      && (is_gimple_min_invariant (src)
1011		  || TREE_CODE (src) == SSA_NAME))
1012	    {
1013	      ao_ref destr, srcr;
1014	      ao_ref_init_from_ptr_and_size (&destr, dest, len);
1015	      ao_ref_init_from_ptr_and_size (&srcr, src, len);
1016	      if (!refs_may_alias_p_1 (&destr, &srcr, false))
1017		{
1018		  tree fn;
1019		  fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1020		  if (!fn)
1021		    return false;
1022		  gimple_call_set_fndecl (stmt, fn);
1023		  gimple_call_set_arg (stmt, 0, dest);
1024		  gimple_call_set_arg (stmt, 1, src);
1025		  fold_stmt (gsi);
1026		  return true;
1027		}
1028	    }
1029
1030	  return false;
1031	}
1032
1033      if (!tree_fits_shwi_p (len))
1034	return false;
1035      /* FIXME:
1036         This logic lose for arguments like (type *)malloc (sizeof (type)),
1037         since we strip the casts of up to VOID return value from malloc.
1038	 Perhaps we ought to inherit type from non-VOID argument here?  */
1039      STRIP_NOPS (src);
1040      STRIP_NOPS (dest);
1041      if (!POINTER_TYPE_P (TREE_TYPE (src))
1042	  || !POINTER_TYPE_P (TREE_TYPE (dest)))
1043	return false;
1044      /* In the following try to find a type that is most natural to be
1045	 used for the memcpy source and destination and that allows
1046	 the most optimization when memcpy is turned into a plain assignment
1047	 using that type.  In theory we could always use a char[len] type
1048	 but that only gains us that the destination and source possibly
1049	 no longer will have their address taken.  */
1050      /* As we fold (void *)(p + CST) to (void *)p + CST undo this here.  */
1051      if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1052	{
1053	  tree tem = TREE_OPERAND (src, 0);
1054	  STRIP_NOPS (tem);
1055	  if (tem != TREE_OPERAND (src, 0))
1056	    src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1057	}
1058      if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1059	{
1060	  tree tem = TREE_OPERAND (dest, 0);
1061	  STRIP_NOPS (tem);
1062	  if (tem != TREE_OPERAND (dest, 0))
1063	    dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1064	}
1065      srctype = TREE_TYPE (TREE_TYPE (src));
1066      if (TREE_CODE (srctype) == ARRAY_TYPE
1067	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1068	{
1069	  srctype = TREE_TYPE (srctype);
1070	  STRIP_NOPS (src);
1071	  src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1072	}
1073      desttype = TREE_TYPE (TREE_TYPE (dest));
1074      if (TREE_CODE (desttype) == ARRAY_TYPE
1075	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1076	{
1077	  desttype = TREE_TYPE (desttype);
1078	  STRIP_NOPS (dest);
1079	  dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1080	}
1081      if (TREE_ADDRESSABLE (srctype)
1082	  || TREE_ADDRESSABLE (desttype))
1083	return false;
1084
1085      /* Make sure we are not copying using a floating-point mode or
1086         a type whose size possibly does not match its precision.  */
1087      if (FLOAT_MODE_P (TYPE_MODE (desttype))
1088	  || TREE_CODE (desttype) == BOOLEAN_TYPE
1089	  || TREE_CODE (desttype) == ENUMERAL_TYPE)
1090	desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1091      if (FLOAT_MODE_P (TYPE_MODE (srctype))
1092	  || TREE_CODE (srctype) == BOOLEAN_TYPE
1093	  || TREE_CODE (srctype) == ENUMERAL_TYPE)
1094	srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1095      if (!srctype)
1096	srctype = desttype;
1097      if (!desttype)
1098	desttype = srctype;
1099      if (!srctype)
1100	return false;
1101
1102      src_align = get_pointer_alignment (src);
1103      dest_align = get_pointer_alignment (dest);
1104      if (dest_align < TYPE_ALIGN (desttype)
1105	  || src_align < TYPE_ALIGN (srctype))
1106	return false;
1107
1108      destvar = dest;
1109      STRIP_NOPS (destvar);
1110      if (TREE_CODE (destvar) == ADDR_EXPR
1111	  && var_decl_component_p (TREE_OPERAND (destvar, 0))
1112	  && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1113	destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1114      else
1115	destvar = NULL_TREE;
1116
1117      srcvar = src;
1118      STRIP_NOPS (srcvar);
1119      if (TREE_CODE (srcvar) == ADDR_EXPR
1120	  && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1121	  && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1122	{
1123	  if (!destvar
1124	      || src_align >= TYPE_ALIGN (desttype))
1125	    srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1126				  srcvar, off0);
1127	  else if (!STRICT_ALIGNMENT)
1128	    {
1129	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1130					    src_align);
1131	      srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1132	    }
1133	  else
1134	    srcvar = NULL_TREE;
1135	}
1136      else
1137	srcvar = NULL_TREE;
1138
1139      if (srcvar == NULL_TREE && destvar == NULL_TREE)
1140	return false;
1141
1142      if (srcvar == NULL_TREE)
1143	{
1144	  STRIP_NOPS (src);
1145	  if (src_align >= TYPE_ALIGN (desttype))
1146	    srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1147	  else
1148	    {
1149	      if (STRICT_ALIGNMENT)
1150		return false;
1151	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1152					    src_align);
1153	      srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1154	    }
1155	}
1156      else if (destvar == NULL_TREE)
1157	{
1158	  STRIP_NOPS (dest);
1159	  if (dest_align >= TYPE_ALIGN (srctype))
1160	    destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1161	  else
1162	    {
1163	      if (STRICT_ALIGNMENT)
1164		return false;
1165	      desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1166					     dest_align);
1167	      destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1168	    }
1169	}
1170
1171      gimple new_stmt;
1172      if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1173	{
1174	  new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1175	  if (gimple_in_ssa_p (cfun))
1176	    srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1177	  else
1178	    srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1179	  gimple_assign_set_lhs (new_stmt, srcvar);
1180	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1181	  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1182	}
1183      new_stmt = gimple_build_assign (destvar, srcvar);
1184      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1185      gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1186      if (gimple_vdef (new_stmt)
1187	  && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1188	SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1189      if (!lhs)
1190	{
1191	  gsi_replace (gsi, new_stmt, false);
1192	  return true;
1193	}
1194      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1195    }
1196
1197done:
1198  if (endp == 0 || endp == 3)
1199    len = NULL_TREE;
1200  else if (endp == 2)
1201    len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1202			   ssize_int (1));
1203  if (endp == 2 || endp == 1)
1204    dest = fold_build_pointer_plus_loc (loc, dest, len);
1205
1206  dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1207				   GSI_SAME_STMT);
1208  gimple repl = gimple_build_assign (lhs, dest);
1209  gsi_replace (gsi, repl, false);
1210  return true;
1211}
1212
1213/* Fold function call to builtin memset or bzero at *GSI setting the
1214   memory of size LEN to VAL.  Return whether a simplification was made.  */
1215
1216static bool
1217gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1218{
1219  gimple stmt = gsi_stmt (*gsi);
1220  tree etype;
1221  unsigned HOST_WIDE_INT length, cval;
1222
1223  /* If the LEN parameter is zero, return DEST.  */
1224  if (integer_zerop (len))
1225    {
1226      replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1227      return true;
1228    }
1229
1230  if (! tree_fits_uhwi_p (len))
1231    return false;
1232
1233  if (TREE_CODE (c) != INTEGER_CST)
1234    return false;
1235
1236  tree dest = gimple_call_arg (stmt, 0);
1237  tree var = dest;
1238  if (TREE_CODE (var) != ADDR_EXPR)
1239    return false;
1240
1241  var = TREE_OPERAND (var, 0);
1242  if (TREE_THIS_VOLATILE (var))
1243    return false;
1244
1245  etype = TREE_TYPE (var);
1246  if (TREE_CODE (etype) == ARRAY_TYPE)
1247    etype = TREE_TYPE (etype);
1248
1249  if (!INTEGRAL_TYPE_P (etype)
1250      && !POINTER_TYPE_P (etype))
1251    return NULL_TREE;
1252
1253  if (! var_decl_component_p (var))
1254    return NULL_TREE;
1255
1256  length = tree_to_uhwi (len);
1257  if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1258      || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1259    return NULL_TREE;
1260
1261  if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1262    return NULL_TREE;
1263
1264  if (integer_zerop (c))
1265    cval = 0;
1266  else
1267    {
1268      if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1269	return NULL_TREE;
1270
1271      cval = TREE_INT_CST_LOW (c);
1272      cval &= 0xff;
1273      cval |= cval << 8;
1274      cval |= cval << 16;
1275      cval |= (cval << 31) << 1;
1276    }
1277
1278  var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1279  gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1280  gimple_set_vuse (store, gimple_vuse (stmt));
1281  tree vdef = gimple_vdef (stmt);
1282  if (vdef && TREE_CODE (vdef) == SSA_NAME)
1283    {
1284      gimple_set_vdef (store, gimple_vdef (stmt));
1285      SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1286    }
1287  gsi_insert_before (gsi, store, GSI_SAME_STMT);
1288  if (gimple_call_lhs (stmt))
1289    {
1290      gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1291      gsi_replace (gsi, asgn, false);
1292    }
1293  else
1294    {
1295      gimple_stmt_iterator gsi2 = *gsi;
1296      gsi_prev (gsi);
1297      gsi_remove (&gsi2, true);
1298    }
1299
1300  return true;
1301}
1302
1303
1304/* Return the string length, maximum string length or maximum value of
1305   ARG in LENGTH.
1306   If ARG is an SSA name variable, follow its use-def chains.  If LENGTH
1307   is not NULL and, for TYPE == 0, its value is not equal to the length
1308   we determine or if we are unable to determine the length or value,
1309   return false.  VISITED is a bitmap of visited variables.
1310   TYPE is 0 if string length should be returned, 1 for maximum string
1311   length and 2 for maximum value ARG can have.  */
1312
1313static bool
1314get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1315{
1316  tree var, val;
1317  gimple def_stmt;
1318
1319  if (TREE_CODE (arg) != SSA_NAME)
1320    {
1321      /* We can end up with &(*iftmp_1)[0] here as well, so handle it.  */
1322      if (TREE_CODE (arg) == ADDR_EXPR
1323	  && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1324	  && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1325	{
1326	  tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1327	  if (TREE_CODE (aop0) == INDIRECT_REF
1328	      && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1329	    return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1330				      length, visited, type);
1331	}
1332
1333      if (type == 2)
1334	{
1335	  val = arg;
1336	  if (TREE_CODE (val) != INTEGER_CST
1337	      || tree_int_cst_sgn (val) < 0)
1338	    return false;
1339	}
1340      else
1341	val = c_strlen (arg, 1);
1342      if (!val)
1343	return false;
1344
1345      if (*length)
1346	{
1347	  if (type > 0)
1348	    {
1349	      if (TREE_CODE (*length) != INTEGER_CST
1350		  || TREE_CODE (val) != INTEGER_CST)
1351		return false;
1352
1353	      if (tree_int_cst_lt (*length, val))
1354		*length = val;
1355	      return true;
1356	    }
1357	  else if (simple_cst_equal (val, *length) != 1)
1358	    return false;
1359	}
1360
1361      *length = val;
1362      return true;
1363    }
1364
1365  /* If ARG is registered for SSA update we cannot look at its defining
1366     statement.  */
1367  if (name_registered_for_update_p (arg))
1368    return false;
1369
1370  /* If we were already here, break the infinite cycle.  */
1371  if (!*visited)
1372    *visited = BITMAP_ALLOC (NULL);
1373  if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1374    return true;
1375
1376  var = arg;
1377  def_stmt = SSA_NAME_DEF_STMT (var);
1378
1379  switch (gimple_code (def_stmt))
1380    {
1381      case GIMPLE_ASSIGN:
1382        /* The RHS of the statement defining VAR must either have a
1383           constant length or come from another SSA_NAME with a constant
1384           length.  */
1385        if (gimple_assign_single_p (def_stmt)
1386            || gimple_assign_unary_nop_p (def_stmt))
1387          {
1388            tree rhs = gimple_assign_rhs1 (def_stmt);
1389            return get_maxval_strlen (rhs, length, visited, type);
1390          }
1391	else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1392	  {
1393	    tree op2 = gimple_assign_rhs2 (def_stmt);
1394	    tree op3 = gimple_assign_rhs3 (def_stmt);
1395	    return get_maxval_strlen (op2, length, visited, type)
1396		   && get_maxval_strlen (op3, length, visited, type);
1397          }
1398        return false;
1399
1400      case GIMPLE_PHI:
1401	{
1402	  /* All the arguments of the PHI node must have the same constant
1403	     length.  */
1404	  unsigned i;
1405
1406	  for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1407          {
1408            tree arg = gimple_phi_arg (def_stmt, i)->def;
1409
1410            /* If this PHI has itself as an argument, we cannot
1411               determine the string length of this argument.  However,
1412               if we can find a constant string length for the other
1413               PHI args then we can still be sure that this is a
1414               constant string length.  So be optimistic and just
1415               continue with the next argument.  */
1416            if (arg == gimple_phi_result (def_stmt))
1417              continue;
1418
1419            if (!get_maxval_strlen (arg, length, visited, type))
1420              return false;
1421          }
1422        }
1423        return true;
1424
1425      default:
1426        return false;
1427    }
1428}
1429
1430tree
1431get_maxval_strlen (tree arg, int type)
1432{
1433  bitmap visited = NULL;
1434  tree len = NULL_TREE;
1435  if (!get_maxval_strlen (arg, &len, &visited, type))
1436    len = NULL_TREE;
1437  if (visited)
1438    BITMAP_FREE (visited);
1439
1440  return len;
1441}
1442
1443
1444/* Fold function call to builtin strcpy with arguments DEST and SRC.
1445   If LEN is not NULL, it represents the length of the string to be
1446   copied.  Return NULL_TREE if no simplification can be made.  */
1447
1448static bool
1449gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1450			    tree dest, tree src)
1451{
1452  location_t loc = gimple_location (gsi_stmt (*gsi));
1453  tree fn;
1454
1455  /* If SRC and DEST are the same (and not volatile), return DEST.  */
1456  if (operand_equal_p (src, dest, 0))
1457    {
1458      replace_call_with_value (gsi, dest);
1459      return true;
1460    }
1461
1462  if (optimize_function_for_size_p (cfun))
1463    return false;
1464
1465  fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1466  if (!fn)
1467    return false;
1468
1469  tree len = get_maxval_strlen (src, 0);
1470  if (!len)
1471    return false;
1472
1473  len = fold_convert_loc (loc, size_type_node, len);
1474  len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1475  len = force_gimple_operand_gsi (gsi, len, true,
1476				  NULL_TREE, true, GSI_SAME_STMT);
1477  gimple repl = gimple_build_call (fn, 3, dest, src, len);
1478  replace_call_with_call_and_fold (gsi, repl);
1479  return true;
1480}
1481
1482/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1483   If SLEN is not NULL, it represents the length of the source string.
1484   Return NULL_TREE if no simplification can be made.  */
1485
1486static bool
1487gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1488			     tree dest, tree src, tree len)
1489{
1490  location_t loc = gimple_location (gsi_stmt (*gsi));
1491  tree fn;
1492
1493  /* If the LEN parameter is zero, return DEST.  */
1494  if (integer_zerop (len))
1495    {
1496      replace_call_with_value (gsi, dest);
1497      return true;
1498    }
1499
1500  /* We can't compare slen with len as constants below if len is not a
1501     constant.  */
1502  if (TREE_CODE (len) != INTEGER_CST)
1503    return false;
1504
1505  /* Now, we must be passed a constant src ptr parameter.  */
1506  tree slen = get_maxval_strlen (src, 0);
1507  if (!slen || TREE_CODE (slen) != INTEGER_CST)
1508    return false;
1509
1510  slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1511
1512  /* We do not support simplification of this case, though we do
1513     support it when expanding trees into RTL.  */
1514  /* FIXME: generate a call to __builtin_memset.  */
1515  if (tree_int_cst_lt (slen, len))
1516    return false;
1517
1518  /* OK transform into builtin memcpy.  */
1519  fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1520  if (!fn)
1521    return false;
1522
1523  len = fold_convert_loc (loc, size_type_node, len);
1524  len = force_gimple_operand_gsi (gsi, len, true,
1525				  NULL_TREE, true, GSI_SAME_STMT);
1526  gimple repl = gimple_build_call (fn, 3, dest, src, len);
1527  replace_call_with_call_and_fold (gsi, repl);
1528  return true;
1529}
1530
1531/* Simplify a call to the strcat builtin.  DST and SRC are the arguments
1532   to the call.
1533
1534   Return NULL_TREE if no simplification was possible, otherwise return the
1535   simplified form of the call as a tree.
1536
1537   The simplified form may be a constant or other expression which
1538   computes the same value, but in a more efficient manner (including
1539   calls to other builtin functions).
1540
1541   The call may contain arguments which need to be evaluated, but
1542   which are not useful to determine the result of the call.  In
1543   this case we return a chain of COMPOUND_EXPRs.  The LHS of each
1544   COMPOUND_EXPR will be an argument which must be evaluated.
1545   COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
1546   COMPOUND_EXPR in the chain will contain the tree for the simplified
1547   form of the builtin function call.  */
1548
1549static bool
1550gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1551{
1552  gimple stmt = gsi_stmt (*gsi);
1553  location_t loc = gimple_location (stmt);
1554
1555  const char *p = c_getstr (src);
1556
1557  /* If the string length is zero, return the dst parameter.  */
1558  if (p && *p == '\0')
1559    {
1560      replace_call_with_value (gsi, dst);
1561      return true;
1562    }
1563
1564  if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1565    return false;
1566
1567  /* See if we can store by pieces into (dst + strlen(dst)).  */
1568  tree newdst;
1569  tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1570  tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1571
1572  if (!strlen_fn || !memcpy_fn)
1573    return false;
1574
1575  /* If the length of the source string isn't computable don't
1576     split strcat into strlen and memcpy.  */
1577  tree len = get_maxval_strlen (src, 0);
1578  if (! len)
1579    return false;
1580
1581  /* Create strlen (dst).  */
1582  gimple_seq stmts = NULL, stmts2;
1583  gimple repl = gimple_build_call (strlen_fn, 1, dst);
1584  gimple_set_location (repl, loc);
1585  if (gimple_in_ssa_p (cfun))
1586    newdst = make_ssa_name (size_type_node);
1587  else
1588    newdst = create_tmp_reg (size_type_node);
1589  gimple_call_set_lhs (repl, newdst);
1590  gimple_seq_add_stmt_without_update (&stmts, repl);
1591
1592  /* Create (dst p+ strlen (dst)).  */
1593  newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1594  newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1595  gimple_seq_add_seq_without_update (&stmts, stmts2);
1596
1597  len = fold_convert_loc (loc, size_type_node, len);
1598  len = size_binop_loc (loc, PLUS_EXPR, len,
1599			build_int_cst (size_type_node, 1));
1600  len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1601  gimple_seq_add_seq_without_update (&stmts, stmts2);
1602
1603  repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1604  gimple_seq_add_stmt_without_update (&stmts, repl);
1605  if (gimple_call_lhs (stmt))
1606    {
1607      repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1608      gimple_seq_add_stmt_without_update (&stmts, repl);
1609      gsi_replace_with_seq_vops (gsi, stmts);
1610      /* gsi now points at the assignment to the lhs, get a
1611         stmt iterator to the memcpy call.
1612	 ???  We can't use gsi_for_stmt as that doesn't work when the
1613	 CFG isn't built yet.  */
1614      gimple_stmt_iterator gsi2 = *gsi;
1615      gsi_prev (&gsi2);
1616      fold_stmt (&gsi2);
1617    }
1618  else
1619    {
1620      gsi_replace_with_seq_vops (gsi, stmts);
1621      fold_stmt (gsi);
1622    }
1623  return true;
1624}
1625
1626/* Fold a call to the __strcat_chk builtin FNDECL.  DEST, SRC, and SIZE
1627   are the arguments to the call.  */
1628
1629static bool
1630gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1631{
1632  gimple stmt = gsi_stmt (*gsi);
1633  tree dest = gimple_call_arg (stmt, 0);
1634  tree src = gimple_call_arg (stmt, 1);
1635  tree size = gimple_call_arg (stmt, 2);
1636  tree fn;
1637  const char *p;
1638
1639
1640  p = c_getstr (src);
1641  /* If the SRC parameter is "", return DEST.  */
1642  if (p && *p == '\0')
1643    {
1644      replace_call_with_value (gsi, dest);
1645      return true;
1646    }
1647
1648  if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1649    return false;
1650
1651  /* If __builtin_strcat_chk is used, assume strcat is available.  */
1652  fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1653  if (!fn)
1654    return false;
1655
1656  gimple repl = gimple_build_call (fn, 2, dest, src);
1657  replace_call_with_call_and_fold (gsi, repl);
1658  return true;
1659}
1660
1661/* Simplify a call to the strncat builtin.  */
1662
1663static bool
1664gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1665{
1666  gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1667  tree dst = gimple_call_arg (stmt, 0);
1668  tree src = gimple_call_arg (stmt, 1);
1669  tree len = gimple_call_arg (stmt, 2);
1670
1671  const char *p = c_getstr (src);
1672
1673  /* If the requested length is zero, or the src parameter string
1674     length is zero, return the dst parameter.  */
1675  if (integer_zerop (len) || (p && *p == '\0'))
1676    {
1677      replace_call_with_value (gsi, dst);
1678      return true;
1679    }
1680
1681  /* If the requested len is greater than or equal to the string
1682     length, call strcat.  */
1683  if (TREE_CODE (len) == INTEGER_CST && p
1684      && compare_tree_int (len, strlen (p)) >= 0)
1685    {
1686      tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1687
1688      /* If the replacement _DECL isn't initialized, don't do the
1689	 transformation.  */
1690      if (!fn)
1691	return false;
1692
1693      gcall *repl = gimple_build_call (fn, 2, dst, src);
1694      replace_call_with_call_and_fold (gsi, repl);
1695      return true;
1696    }
1697
1698  return false;
1699}
1700
1701/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1702   LEN, and SIZE.  */
1703
1704static bool
1705gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1706{
1707  gimple stmt = gsi_stmt (*gsi);
1708  tree dest = gimple_call_arg (stmt, 0);
1709  tree src = gimple_call_arg (stmt, 1);
1710  tree len = gimple_call_arg (stmt, 2);
1711  tree size = gimple_call_arg (stmt, 3);
1712  tree fn;
1713  const char *p;
1714
1715  p = c_getstr (src);
1716  /* If the SRC parameter is "" or if LEN is 0, return DEST.  */
1717  if ((p && *p == '\0')
1718      || integer_zerop (len))
1719    {
1720      replace_call_with_value (gsi, dest);
1721      return true;
1722    }
1723
1724  if (! tree_fits_uhwi_p (size))
1725    return false;
1726
1727  if (! integer_all_onesp (size))
1728    {
1729      tree src_len = c_strlen (src, 1);
1730      if (src_len
1731	  && tree_fits_uhwi_p (src_len)
1732	  && tree_fits_uhwi_p (len)
1733	  && ! tree_int_cst_lt (len, src_len))
1734	{
1735	  /* If LEN >= strlen (SRC), optimize into __strcat_chk.  */
1736	  fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1737	  if (!fn)
1738	    return false;
1739
1740	  gimple repl = gimple_build_call (fn, 3, dest, src, size);
1741	  replace_call_with_call_and_fold (gsi, repl);
1742	  return true;
1743	}
1744      return false;
1745    }
1746
1747  /* If __builtin_strncat_chk is used, assume strncat is available.  */
1748  fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1749  if (!fn)
1750    return false;
1751
1752  gimple repl = gimple_build_call (fn, 3, dest, src, len);
1753  replace_call_with_call_and_fold (gsi, repl);
1754  return true;
1755}
1756
1757/* Fold a call to the fputs builtin.  ARG0 and ARG1 are the arguments
1758   to the call.  IGNORE is true if the value returned
1759   by the builtin will be ignored.  UNLOCKED is true is true if this
1760   actually a call to fputs_unlocked.  If LEN in non-NULL, it represents
1761   the known length of the string.  Return NULL_TREE if no simplification
1762   was possible.  */
1763
1764static bool
1765gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1766			   tree arg0, tree arg1,
1767			   bool unlocked)
1768{
1769  gimple stmt = gsi_stmt (*gsi);
1770
1771  /* If we're using an unlocked function, assume the other unlocked
1772     functions exist explicitly.  */
1773  tree const fn_fputc = (unlocked
1774			 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1775			 : builtin_decl_implicit (BUILT_IN_FPUTC));
1776  tree const fn_fwrite = (unlocked
1777			  ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1778			  : builtin_decl_implicit (BUILT_IN_FWRITE));
1779
1780  /* If the return value is used, don't do the transformation.  */
1781  if (gimple_call_lhs (stmt))
1782    return false;
1783
1784  /* Get the length of the string passed to fputs.  If the length
1785     can't be determined, punt.  */
1786  tree len = get_maxval_strlen (arg0, 0);
1787  if (!len
1788      || TREE_CODE (len) != INTEGER_CST)
1789    return false;
1790
1791  switch (compare_tree_int (len, 1))
1792    {
1793    case -1: /* length is 0, delete the call entirely .  */
1794      replace_call_with_value (gsi, integer_zero_node);
1795      return true;
1796
1797    case 0: /* length is 1, call fputc.  */
1798      {
1799	const char *p = c_getstr (arg0);
1800	if (p != NULL)
1801	  {
1802	    if (!fn_fputc)
1803	      return false;
1804
1805	    gimple repl = gimple_build_call (fn_fputc, 2,
1806					     build_int_cst
1807					     (integer_type_node, p[0]), arg1);
1808	    replace_call_with_call_and_fold (gsi, repl);
1809	    return true;
1810	  }
1811      }
1812      /* FALLTHROUGH */
1813    case 1: /* length is greater than 1, call fwrite.  */
1814      {
1815	/* If optimizing for size keep fputs.  */
1816	if (optimize_function_for_size_p (cfun))
1817	  return false;
1818	/* New argument list transforming fputs(string, stream) to
1819	   fwrite(string, 1, len, stream).  */
1820	if (!fn_fwrite)
1821	  return false;
1822
1823	gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1824					 size_one_node, len, arg1);
1825	replace_call_with_call_and_fold (gsi, repl);
1826	return true;
1827      }
1828    default:
1829      gcc_unreachable ();
1830    }
1831  return false;
1832}
1833
1834/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1835   DEST, SRC, LEN, and SIZE are the arguments to the call.
1836   IGNORE is true, if return value can be ignored.  FCODE is the BUILT_IN_*
1837   code of the builtin.  If MAXLEN is not NULL, it is maximum length
1838   passed as third argument.  */
1839
1840static bool
1841gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1842				tree dest, tree src, tree len, tree size,
1843				enum built_in_function fcode)
1844{
1845  gimple stmt = gsi_stmt (*gsi);
1846  location_t loc = gimple_location (stmt);
1847  bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1848  tree fn;
1849
1850  /* If SRC and DEST are the same (and not volatile), return DEST
1851     (resp. DEST+LEN for __mempcpy_chk).  */
1852  if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1853    {
1854      if (fcode != BUILT_IN_MEMPCPY_CHK)
1855	{
1856	  replace_call_with_value (gsi, dest);
1857	  return true;
1858	}
1859      else
1860	{
1861	  tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1862	  temp = force_gimple_operand_gsi (gsi, temp,
1863					   false, NULL_TREE, true,
1864					   GSI_SAME_STMT);
1865	  replace_call_with_value (gsi, temp);
1866	  return true;
1867	}
1868    }
1869
1870  if (! tree_fits_uhwi_p (size))
1871    return false;
1872
1873  tree maxlen = get_maxval_strlen (len, 2);
1874  if (! integer_all_onesp (size))
1875    {
1876      if (! tree_fits_uhwi_p (len))
1877	{
1878	  /* If LEN is not constant, try MAXLEN too.
1879	     For MAXLEN only allow optimizing into non-_ocs function
1880	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
1881	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1882	    {
1883	      if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1884		{
1885		  /* (void) __mempcpy_chk () can be optimized into
1886		     (void) __memcpy_chk ().  */
1887		  fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1888		  if (!fn)
1889		    return false;
1890
1891		  gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1892		  replace_call_with_call_and_fold (gsi, repl);
1893		  return true;
1894		}
1895	      return false;
1896	    }
1897	}
1898      else
1899	maxlen = len;
1900
1901      if (tree_int_cst_lt (size, maxlen))
1902	return false;
1903    }
1904
1905  fn = NULL_TREE;
1906  /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1907     mem{cpy,pcpy,move,set} is available.  */
1908  switch (fcode)
1909    {
1910    case BUILT_IN_MEMCPY_CHK:
1911      fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1912      break;
1913    case BUILT_IN_MEMPCPY_CHK:
1914      fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1915      break;
1916    case BUILT_IN_MEMMOVE_CHK:
1917      fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1918      break;
1919    case BUILT_IN_MEMSET_CHK:
1920      fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1921      break;
1922    default:
1923      break;
1924    }
1925
1926  if (!fn)
1927    return false;
1928
1929  gimple repl = gimple_build_call (fn, 3, dest, src, len);
1930  replace_call_with_call_and_fold (gsi, repl);
1931  return true;
1932}
1933
1934/* Fold a call to the __st[rp]cpy_chk builtin.
1935   DEST, SRC, and SIZE are the arguments to the call.
1936   IGNORE is true if return value can be ignored.  FCODE is the BUILT_IN_*
1937   code of the builtin.  If MAXLEN is not NULL, it is maximum length of
1938   strings passed as second argument.  */
1939
1940static bool
1941gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1942				tree dest,
1943				tree src, tree size,
1944				enum built_in_function fcode)
1945{
1946  gimple stmt = gsi_stmt (*gsi);
1947  location_t loc = gimple_location (stmt);
1948  bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1949  tree len, fn;
1950
1951  /* If SRC and DEST are the same (and not volatile), return DEST.  */
1952  if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1953    {
1954      replace_call_with_value (gsi, dest);
1955      return true;
1956    }
1957
1958  if (! tree_fits_uhwi_p (size))
1959    return false;
1960
1961  tree maxlen = get_maxval_strlen (src, 1);
1962  if (! integer_all_onesp (size))
1963    {
1964      len = c_strlen (src, 1);
1965      if (! len || ! tree_fits_uhwi_p (len))
1966	{
1967	  /* If LEN is not constant, try MAXLEN too.
1968	     For MAXLEN only allow optimizing into non-_ocs function
1969	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
1970	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1971	    {
1972	      if (fcode == BUILT_IN_STPCPY_CHK)
1973		{
1974		  if (! ignore)
1975		    return false;
1976
1977		  /* If return value of __stpcpy_chk is ignored,
1978		     optimize into __strcpy_chk.  */
1979		  fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1980		  if (!fn)
1981		    return false;
1982
1983		  gimple repl = gimple_build_call (fn, 3, dest, src, size);
1984		  replace_call_with_call_and_fold (gsi, repl);
1985		  return true;
1986		}
1987
1988	      if (! len || TREE_SIDE_EFFECTS (len))
1989		return false;
1990
1991	      /* If c_strlen returned something, but not a constant,
1992		 transform __strcpy_chk into __memcpy_chk.  */
1993	      fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1994	      if (!fn)
1995		return false;
1996
1997	      len = fold_convert_loc (loc, size_type_node, len);
1998	      len = size_binop_loc (loc, PLUS_EXPR, len,
1999				    build_int_cst (size_type_node, 1));
2000	      len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
2001					      true, GSI_SAME_STMT);
2002	      gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2003	      replace_call_with_call_and_fold (gsi, repl);
2004	      return true;
2005	    }
2006	}
2007      else
2008	maxlen = len;
2009
2010      if (! tree_int_cst_lt (maxlen, size))
2011	return false;
2012    }
2013
2014  /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available.  */
2015  fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2016			      ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2017  if (!fn)
2018    return false;
2019
2020  gimple repl = gimple_build_call (fn, 2, dest, src);
2021  replace_call_with_call_and_fold (gsi, repl);
2022  return true;
2023}
2024
2025/* Fold a call to the __st{r,p}ncpy_chk builtin.  DEST, SRC, LEN, and SIZE
2026   are the arguments to the call.  If MAXLEN is not NULL, it is maximum
2027   length passed as third argument. IGNORE is true if return value can be
2028   ignored. FCODE is the BUILT_IN_* code of the builtin. */
2029
2030static bool
2031gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2032				 tree dest, tree src,
2033				 tree len, tree size,
2034				 enum built_in_function fcode)
2035{
2036  gimple stmt = gsi_stmt (*gsi);
2037  bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2038  tree fn;
2039
2040  if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2041    {
2042       /* If return value of __stpncpy_chk is ignored,
2043          optimize into __strncpy_chk.  */
2044       fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2045       if (fn)
2046	 {
2047	   gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2048	   replace_call_with_call_and_fold (gsi, repl);
2049	   return true;
2050	 }
2051    }
2052
2053  if (! tree_fits_uhwi_p (size))
2054    return false;
2055
2056  tree maxlen = get_maxval_strlen (len, 2);
2057  if (! integer_all_onesp (size))
2058    {
2059      if (! tree_fits_uhwi_p (len))
2060	{
2061	  /* If LEN is not constant, try MAXLEN too.
2062	     For MAXLEN only allow optimizing into non-_ocs function
2063	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2064	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2065	    return false;
2066	}
2067      else
2068	maxlen = len;
2069
2070      if (tree_int_cst_lt (size, maxlen))
2071	return false;
2072    }
2073
2074  /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available.  */
2075  fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2076			      ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2077  if (!fn)
2078    return false;
2079
2080  gimple repl = gimple_build_call (fn, 3, dest, src, len);
2081  replace_call_with_call_and_fold (gsi, repl);
2082  return true;
2083}
2084
2085/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2086   Return NULL_TREE if no simplification can be made.  */
2087
2088static bool
2089gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2090{
2091  gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2092  location_t loc = gimple_location (stmt);
2093  tree dest = gimple_call_arg (stmt, 0);
2094  tree src = gimple_call_arg (stmt, 1);
2095  tree fn, len, lenp1;
2096
2097  /* If the result is unused, replace stpcpy with strcpy.  */
2098  if (gimple_call_lhs (stmt) == NULL_TREE)
2099    {
2100      tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2101      if (!fn)
2102	return false;
2103      gimple_call_set_fndecl (stmt, fn);
2104      fold_stmt (gsi);
2105      return true;
2106    }
2107
2108  len = c_strlen (src, 1);
2109  if (!len
2110      || TREE_CODE (len) != INTEGER_CST)
2111    return false;
2112
2113  if (optimize_function_for_size_p (cfun)
2114      /* If length is zero it's small enough.  */
2115      && !integer_zerop (len))
2116    return false;
2117
2118  /* If the source has a known length replace stpcpy with memcpy.  */
2119  fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2120  if (!fn)
2121    return false;
2122
2123  gimple_seq stmts = NULL;
2124  tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2125  lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2126			tem, build_int_cst (size_type_node, 1));
2127  gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2128  gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2129  gimple_set_vuse (repl, gimple_vuse (stmt));
2130  gimple_set_vdef (repl, gimple_vdef (stmt));
2131  if (gimple_vdef (repl)
2132      && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2133    SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2134  gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2135  /* Replace the result with dest + len.  */
2136  stmts = NULL;
2137  tem = gimple_convert (&stmts, loc, sizetype, len);
2138  gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2139  gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2140				      POINTER_PLUS_EXPR, dest, tem);
2141  gsi_replace (gsi, ret, false);
2142  /* Finally fold the memcpy call.  */
2143  gimple_stmt_iterator gsi2 = *gsi;
2144  gsi_prev (&gsi2);
2145  fold_stmt (&gsi2);
2146  return true;
2147}
2148
2149/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS.  Return
2150   NULL_TREE if a normal call should be emitted rather than expanding
2151   the function inline.  FCODE is either BUILT_IN_SNPRINTF_CHK or
2152   BUILT_IN_VSNPRINTF_CHK.  If MAXLEN is not NULL, it is maximum length
2153   passed as second argument.  */
2154
2155static bool
2156gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2157				  enum built_in_function fcode)
2158{
2159  gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2160  tree dest, size, len, fn, fmt, flag;
2161  const char *fmt_str;
2162
2163  /* Verify the required arguments in the original call.  */
2164  if (gimple_call_num_args (stmt) < 5)
2165    return false;
2166
2167  dest = gimple_call_arg (stmt, 0);
2168  len = gimple_call_arg (stmt, 1);
2169  flag = gimple_call_arg (stmt, 2);
2170  size = gimple_call_arg (stmt, 3);
2171  fmt = gimple_call_arg (stmt, 4);
2172
2173  if (! tree_fits_uhwi_p (size))
2174    return false;
2175
2176  if (! integer_all_onesp (size))
2177    {
2178      tree maxlen = get_maxval_strlen (len, 2);
2179      if (! tree_fits_uhwi_p (len))
2180	{
2181	  /* If LEN is not constant, try MAXLEN too.
2182	     For MAXLEN only allow optimizing into non-_ocs function
2183	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2184	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2185	    return false;
2186	}
2187      else
2188	maxlen = len;
2189
2190      if (tree_int_cst_lt (size, maxlen))
2191	return false;
2192    }
2193
2194  if (!init_target_chars ())
2195    return false;
2196
2197  /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2198     or if format doesn't contain % chars or is "%s".  */
2199  if (! integer_zerop (flag))
2200    {
2201      fmt_str = c_getstr (fmt);
2202      if (fmt_str == NULL)
2203	return false;
2204      if (strchr (fmt_str, target_percent) != NULL
2205	  && strcmp (fmt_str, target_percent_s))
2206	return false;
2207    }
2208
2209  /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2210     available.  */
2211  fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2212			      ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2213  if (!fn)
2214    return false;
2215
2216  /* Replace the called function and the first 5 argument by 3 retaining
2217     trailing varargs.  */
2218  gimple_call_set_fndecl (stmt, fn);
2219  gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2220  gimple_call_set_arg (stmt, 0, dest);
2221  gimple_call_set_arg (stmt, 1, len);
2222  gimple_call_set_arg (stmt, 2, fmt);
2223  for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2224    gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2225  gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2226  fold_stmt (gsi);
2227  return true;
2228}
2229
2230/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2231   Return NULL_TREE if a normal call should be emitted rather than
2232   expanding the function inline.  FCODE is either BUILT_IN_SPRINTF_CHK
2233   or BUILT_IN_VSPRINTF_CHK.  */
2234
2235static bool
2236gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2237				 enum built_in_function fcode)
2238{
2239  gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2240  tree dest, size, len, fn, fmt, flag;
2241  const char *fmt_str;
2242  unsigned nargs = gimple_call_num_args (stmt);
2243
2244  /* Verify the required arguments in the original call.  */
2245  if (nargs < 4)
2246    return false;
2247  dest = gimple_call_arg (stmt, 0);
2248  flag = gimple_call_arg (stmt, 1);
2249  size = gimple_call_arg (stmt, 2);
2250  fmt = gimple_call_arg (stmt, 3);
2251
2252  if (! tree_fits_uhwi_p (size))
2253    return false;
2254
2255  len = NULL_TREE;
2256
2257  if (!init_target_chars ())
2258    return false;
2259
2260  /* Check whether the format is a literal string constant.  */
2261  fmt_str = c_getstr (fmt);
2262  if (fmt_str != NULL)
2263    {
2264      /* If the format doesn't contain % args or %%, we know the size.  */
2265      if (strchr (fmt_str, target_percent) == 0)
2266	{
2267	  if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2268	    len = build_int_cstu (size_type_node, strlen (fmt_str));
2269	}
2270      /* If the format is "%s" and first ... argument is a string literal,
2271	 we know the size too.  */
2272      else if (fcode == BUILT_IN_SPRINTF_CHK
2273	       && strcmp (fmt_str, target_percent_s) == 0)
2274	{
2275	  tree arg;
2276
2277	  if (nargs == 5)
2278	    {
2279	      arg = gimple_call_arg (stmt, 4);
2280	      if (POINTER_TYPE_P (TREE_TYPE (arg)))
2281		{
2282		  len = c_strlen (arg, 1);
2283		  if (! len || ! tree_fits_uhwi_p (len))
2284		    len = NULL_TREE;
2285		}
2286	    }
2287	}
2288    }
2289
2290  if (! integer_all_onesp (size))
2291    {
2292      if (! len || ! tree_int_cst_lt (len, size))
2293	return false;
2294    }
2295
2296  /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2297     or if format doesn't contain % chars or is "%s".  */
2298  if (! integer_zerop (flag))
2299    {
2300      if (fmt_str == NULL)
2301	return false;
2302      if (strchr (fmt_str, target_percent) != NULL
2303	  && strcmp (fmt_str, target_percent_s))
2304	return false;
2305    }
2306
2307  /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available.  */
2308  fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2309			      ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2310  if (!fn)
2311    return false;
2312
2313  /* Replace the called function and the first 4 argument by 2 retaining
2314     trailing varargs.  */
2315  gimple_call_set_fndecl (stmt, fn);
2316  gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2317  gimple_call_set_arg (stmt, 0, dest);
2318  gimple_call_set_arg (stmt, 1, fmt);
2319  for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2320    gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2321  gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2322  fold_stmt (gsi);
2323  return true;
2324}
2325
2326/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2327   ORIG may be null if this is a 2-argument call.  We don't attempt to
2328   simplify calls with more than 3 arguments.
2329
2330   Return NULL_TREE if no simplification was possible, otherwise return the
2331   simplified form of the call as a tree.  If IGNORED is true, it means that
2332   the caller does not use the returned value of the function.  */
2333
2334static bool
2335gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2336{
2337  gimple stmt = gsi_stmt (*gsi);
2338  tree dest = gimple_call_arg (stmt, 0);
2339  tree fmt = gimple_call_arg (stmt, 1);
2340  tree orig = NULL_TREE;
2341  const char *fmt_str = NULL;
2342
2343  /* Verify the required arguments in the original call.  We deal with two
2344     types of sprintf() calls: 'sprintf (str, fmt)' and
2345     'sprintf (dest, "%s", orig)'.  */
2346  if (gimple_call_num_args (stmt) > 3)
2347    return false;
2348
2349  if (gimple_call_num_args (stmt) == 3)
2350    orig = gimple_call_arg (stmt, 2);
2351
2352  /* Check whether the format is a literal string constant.  */
2353  fmt_str = c_getstr (fmt);
2354  if (fmt_str == NULL)
2355    return false;
2356
2357  if (!init_target_chars ())
2358    return false;
2359
2360  /* If the format doesn't contain % args or %%, use strcpy.  */
2361  if (strchr (fmt_str, target_percent) == NULL)
2362    {
2363      tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2364
2365      if (!fn)
2366	return false;
2367
2368      /* Don't optimize sprintf (buf, "abc", ptr++).  */
2369      if (orig)
2370	return false;
2371
2372      /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2373	 'format' is known to contain no % formats.  */
2374      gimple_seq stmts = NULL;
2375      gimple repl = gimple_build_call (fn, 2, dest, fmt);
2376      gimple_seq_add_stmt_without_update (&stmts, repl);
2377      if (gimple_call_lhs (stmt))
2378	{
2379	  repl = gimple_build_assign (gimple_call_lhs (stmt),
2380				      build_int_cst (integer_type_node,
2381						     strlen (fmt_str)));
2382	  gimple_seq_add_stmt_without_update (&stmts, repl);
2383	  gsi_replace_with_seq_vops (gsi, stmts);
2384	  /* gsi now points at the assignment to the lhs, get a
2385	     stmt iterator to the memcpy call.
2386	     ???  We can't use gsi_for_stmt as that doesn't work when the
2387	     CFG isn't built yet.  */
2388	  gimple_stmt_iterator gsi2 = *gsi;
2389	  gsi_prev (&gsi2);
2390	  fold_stmt (&gsi2);
2391	}
2392      else
2393	{
2394	  gsi_replace_with_seq_vops (gsi, stmts);
2395	  fold_stmt (gsi);
2396	}
2397      return true;
2398    }
2399
2400  /* If the format is "%s", use strcpy if the result isn't used.  */
2401  else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2402    {
2403      tree fn;
2404      fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2405
2406      if (!fn)
2407	return false;
2408
2409      /* Don't crash on sprintf (str1, "%s").  */
2410      if (!orig)
2411	return false;
2412
2413      tree orig_len = NULL_TREE;
2414      if (gimple_call_lhs (stmt))
2415	{
2416	  orig_len = get_maxval_strlen (orig, 0);
2417	  if (!orig_len)
2418	    return false;
2419	}
2420
2421      /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2).  */
2422      gimple_seq stmts = NULL;
2423      gimple repl = gimple_build_call (fn, 2, dest, orig);
2424      gimple_seq_add_stmt_without_update (&stmts, repl);
2425      if (gimple_call_lhs (stmt))
2426	{
2427	  if (!useless_type_conversion_p (integer_type_node,
2428					  TREE_TYPE (orig_len)))
2429	    orig_len = fold_convert (integer_type_node, orig_len);
2430	  repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2431	  gimple_seq_add_stmt_without_update (&stmts, repl);
2432	  gsi_replace_with_seq_vops (gsi, stmts);
2433	  /* gsi now points at the assignment to the lhs, get a
2434	     stmt iterator to the memcpy call.
2435	     ???  We can't use gsi_for_stmt as that doesn't work when the
2436	     CFG isn't built yet.  */
2437	  gimple_stmt_iterator gsi2 = *gsi;
2438	  gsi_prev (&gsi2);
2439	  fold_stmt (&gsi2);
2440	}
2441      else
2442	{
2443	  gsi_replace_with_seq_vops (gsi, stmts);
2444	  fold_stmt (gsi);
2445	}
2446      return true;
2447    }
2448  return false;
2449}
2450
2451/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2452   FMT, and ORIG.  ORIG may be null if this is a 3-argument call.  We don't
2453   attempt to simplify calls with more than 4 arguments.
2454
2455   Return NULL_TREE if no simplification was possible, otherwise return the
2456   simplified form of the call as a tree.  If IGNORED is true, it means that
2457   the caller does not use the returned value of the function.  */
2458
2459static bool
2460gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2461{
2462  gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2463  tree dest = gimple_call_arg (stmt, 0);
2464  tree destsize = gimple_call_arg (stmt, 1);
2465  tree fmt = gimple_call_arg (stmt, 2);
2466  tree orig = NULL_TREE;
2467  const char *fmt_str = NULL;
2468
2469  if (gimple_call_num_args (stmt) > 4)
2470    return false;
2471
2472  if (gimple_call_num_args (stmt) == 4)
2473    orig = gimple_call_arg (stmt, 3);
2474
2475  if (!tree_fits_uhwi_p (destsize))
2476    return false;
2477  unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2478
2479  /* Check whether the format is a literal string constant.  */
2480  fmt_str = c_getstr (fmt);
2481  if (fmt_str == NULL)
2482    return false;
2483
2484  if (!init_target_chars ())
2485    return false;
2486
2487  /* If the format doesn't contain % args or %%, use strcpy.  */
2488  if (strchr (fmt_str, target_percent) == NULL)
2489    {
2490      tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2491      if (!fn)
2492	return false;
2493
2494      /* Don't optimize snprintf (buf, 4, "abc", ptr++).  */
2495      if (orig)
2496	return false;
2497
2498      /* We could expand this as
2499	 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2500	 or to
2501	 memcpy (str, fmt_with_nul_at_cstm1, cst);
2502	 but in the former case that might increase code size
2503	 and in the latter case grow .rodata section too much.
2504	 So punt for now.  */
2505      size_t len = strlen (fmt_str);
2506      if (len >= destlen)
2507	return false;
2508
2509      gimple_seq stmts = NULL;
2510      gimple repl = gimple_build_call (fn, 2, dest, fmt);
2511      gimple_seq_add_stmt_without_update (&stmts, repl);
2512      if (gimple_call_lhs (stmt))
2513	{
2514	  repl = gimple_build_assign (gimple_call_lhs (stmt),
2515				      build_int_cst (integer_type_node, len));
2516	  gimple_seq_add_stmt_without_update (&stmts, repl);
2517	  gsi_replace_with_seq_vops (gsi, stmts);
2518	  /* gsi now points at the assignment to the lhs, get a
2519	     stmt iterator to the memcpy call.
2520	     ???  We can't use gsi_for_stmt as that doesn't work when the
2521	     CFG isn't built yet.  */
2522	  gimple_stmt_iterator gsi2 = *gsi;
2523	  gsi_prev (&gsi2);
2524	  fold_stmt (&gsi2);
2525	}
2526      else
2527	{
2528	  gsi_replace_with_seq_vops (gsi, stmts);
2529	  fold_stmt (gsi);
2530	}
2531      return true;
2532    }
2533
2534  /* If the format is "%s", use strcpy if the result isn't used.  */
2535  else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2536    {
2537      tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2538      if (!fn)
2539	return false;
2540
2541      /* Don't crash on snprintf (str1, cst, "%s").  */
2542      if (!orig)
2543	return false;
2544
2545      tree orig_len = get_maxval_strlen (orig, 0);
2546      if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2547	return false;
2548
2549      /* We could expand this as
2550	 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2551	 or to
2552	 memcpy (str1, str2_with_nul_at_cstm1, cst);
2553	 but in the former case that might increase code size
2554	 and in the latter case grow .rodata section too much.
2555	 So punt for now.  */
2556      if (compare_tree_int (orig_len, destlen) >= 0)
2557	return false;
2558
2559      /* Convert snprintf (str1, cst, "%s", str2) into
2560	 strcpy (str1, str2) if strlen (str2) < cst.  */
2561      gimple_seq stmts = NULL;
2562      gimple repl = gimple_build_call (fn, 2, dest, orig);
2563      gimple_seq_add_stmt_without_update (&stmts, repl);
2564      if (gimple_call_lhs (stmt))
2565	{
2566	  if (!useless_type_conversion_p (integer_type_node,
2567					  TREE_TYPE (orig_len)))
2568	    orig_len = fold_convert (integer_type_node, orig_len);
2569	  repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2570	  gimple_seq_add_stmt_without_update (&stmts, repl);
2571	  gsi_replace_with_seq_vops (gsi, stmts);
2572	  /* gsi now points at the assignment to the lhs, get a
2573	     stmt iterator to the memcpy call.
2574	     ???  We can't use gsi_for_stmt as that doesn't work when the
2575	     CFG isn't built yet.  */
2576	  gimple_stmt_iterator gsi2 = *gsi;
2577	  gsi_prev (&gsi2);
2578	  fold_stmt (&gsi2);
2579	}
2580      else
2581	{
2582	  gsi_replace_with_seq_vops (gsi, stmts);
2583	  fold_stmt (gsi);
2584	}
2585      return true;
2586    }
2587  return false;
2588}
2589
2590/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2591   FP, FMT, and ARG are the arguments to the call.  We don't fold calls with
2592   more than 3 arguments, and ARG may be null in the 2-argument case.
2593
2594   Return NULL_TREE if no simplification was possible, otherwise return the
2595   simplified form of the call as a tree.  FCODE is the BUILT_IN_*
2596   code of the function to be simplified.  */
2597
2598static bool
2599gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2600			     tree fp, tree fmt, tree arg,
2601			     enum built_in_function fcode)
2602{
2603  gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2604  tree fn_fputc, fn_fputs;
2605  const char *fmt_str = NULL;
2606
2607  /* If the return value is used, don't do the transformation.  */
2608  if (gimple_call_lhs (stmt) != NULL_TREE)
2609    return false;
2610
2611  /* Check whether the format is a literal string constant.  */
2612  fmt_str = c_getstr (fmt);
2613  if (fmt_str == NULL)
2614    return false;
2615
2616  if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2617    {
2618      /* If we're using an unlocked function, assume the other
2619	 unlocked functions exist explicitly.  */
2620      fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2621      fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2622    }
2623  else
2624    {
2625      fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2626      fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2627    }
2628
2629  if (!init_target_chars ())
2630    return false;
2631
2632  /* If the format doesn't contain % args or %%, use strcpy.  */
2633  if (strchr (fmt_str, target_percent) == NULL)
2634    {
2635      if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2636	  && arg)
2637	return false;
2638
2639      /* If the format specifier was "", fprintf does nothing.  */
2640      if (fmt_str[0] == '\0')
2641	{
2642	  replace_call_with_value (gsi, NULL_TREE);
2643	  return true;
2644	}
2645
2646      /* When "string" doesn't contain %, replace all cases of
2647	 fprintf (fp, string) with fputs (string, fp).  The fputs
2648	 builtin will take care of special cases like length == 1.  */
2649      if (fn_fputs)
2650	{
2651	  gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2652	  replace_call_with_call_and_fold (gsi, repl);
2653	  return true;
2654	}
2655    }
2656
2657  /* The other optimizations can be done only on the non-va_list variants.  */
2658  else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2659    return false;
2660
2661  /* If the format specifier was "%s", call __builtin_fputs (arg, fp).  */
2662  else if (strcmp (fmt_str, target_percent_s) == 0)
2663    {
2664      if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2665	return false;
2666      if (fn_fputs)
2667	{
2668	  gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2669	  replace_call_with_call_and_fold (gsi, repl);
2670	  return true;
2671	}
2672    }
2673
2674  /* If the format specifier was "%c", call __builtin_fputc (arg, fp).  */
2675  else if (strcmp (fmt_str, target_percent_c) == 0)
2676    {
2677      if (!arg
2678	  || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2679	return false;
2680      if (fn_fputc)
2681	{
2682	  gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2683	  replace_call_with_call_and_fold (gsi, repl);
2684	  return true;
2685	}
2686    }
2687
2688  return false;
2689}
2690
2691/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2692   FMT and ARG are the arguments to the call; we don't fold cases with
2693   more than 2 arguments, and ARG may be null if this is a 1-argument case.
2694
2695   Return NULL_TREE if no simplification was possible, otherwise return the
2696   simplified form of the call as a tree.  FCODE is the BUILT_IN_*
2697   code of the function to be simplified.  */
2698
2699static bool
2700gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2701			    tree arg, enum built_in_function fcode)
2702{
2703  gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2704  tree fn_putchar, fn_puts, newarg;
2705  const char *fmt_str = NULL;
2706
2707  /* If the return value is used, don't do the transformation.  */
2708  if (gimple_call_lhs (stmt) != NULL_TREE)
2709    return false;
2710
2711  /* Check whether the format is a literal string constant.  */
2712  fmt_str = c_getstr (fmt);
2713  if (fmt_str == NULL)
2714    return false;
2715
2716  if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2717    {
2718      /* If we're using an unlocked function, assume the other
2719	 unlocked functions exist explicitly.  */
2720      fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2721      fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2722    }
2723  else
2724    {
2725      fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2726      fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2727    }
2728
2729  if (!init_target_chars ())
2730    return false;
2731
2732  if (strcmp (fmt_str, target_percent_s) == 0
2733      || strchr (fmt_str, target_percent) == NULL)
2734    {
2735      const char *str;
2736
2737      if (strcmp (fmt_str, target_percent_s) == 0)
2738	{
2739	  if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2740	    return false;
2741
2742	  if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2743	    return false;
2744
2745	  str = c_getstr (arg);
2746	  if (str == NULL)
2747	    return false;
2748	}
2749      else
2750	{
2751	  /* The format specifier doesn't contain any '%' characters.  */
2752	  if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2753	      && arg)
2754	    return false;
2755	  str = fmt_str;
2756	}
2757
2758      /* If the string was "", printf does nothing.  */
2759      if (str[0] == '\0')
2760	{
2761	  replace_call_with_value (gsi, NULL_TREE);
2762	  return true;
2763	}
2764
2765      /* If the string has length of 1, call putchar.  */
2766      if (str[1] == '\0')
2767	{
2768	  /* Given printf("c"), (where c is any one character,)
2769	     convert "c"[0] to an int and pass that to the replacement
2770	     function.  */
2771	  newarg = build_int_cst (integer_type_node, str[0]);
2772	  if (fn_putchar)
2773	    {
2774	      gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2775	      replace_call_with_call_and_fold (gsi, repl);
2776	      return true;
2777	    }
2778	}
2779      else
2780	{
2781	  /* If the string was "string\n", call puts("string").  */
2782	  size_t len = strlen (str);
2783	  if ((unsigned char)str[len - 1] == target_newline
2784	      && (size_t) (int) len == len
2785	      && (int) len > 0)
2786	    {
2787	      char *newstr;
2788	      tree offset_node, string_cst;
2789
2790	      /* Create a NUL-terminated string that's one char shorter
2791		 than the original, stripping off the trailing '\n'.  */
2792	      newarg = build_string_literal (len, str);
2793	      string_cst = string_constant (newarg, &offset_node);
2794	      gcc_checking_assert (string_cst
2795				   && (TREE_STRING_LENGTH (string_cst)
2796				       == (int) len)
2797				   && integer_zerop (offset_node)
2798				   && (unsigned char)
2799				      TREE_STRING_POINTER (string_cst)[len - 1]
2800				      == target_newline);
2801	      /* build_string_literal creates a new STRING_CST,
2802		 modify it in place to avoid double copying.  */
2803	      newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2804	      newstr[len - 1] = '\0';
2805	      if (fn_puts)
2806		{
2807		  gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2808		  replace_call_with_call_and_fold (gsi, repl);
2809		  return true;
2810		}
2811	    }
2812	  else
2813	    /* We'd like to arrange to call fputs(string,stdout) here,
2814	       but we need stdout and don't have a way to get it yet.  */
2815	    return false;
2816	}
2817    }
2818
2819  /* The other optimizations can be done only on the non-va_list variants.  */
2820  else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2821    return false;
2822
2823  /* If the format specifier was "%s\n", call __builtin_puts(arg).  */
2824  else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2825    {
2826      if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2827	return false;
2828      if (fn_puts)
2829	{
2830	  gcall *repl = gimple_build_call (fn_puts, 1, arg);
2831	  replace_call_with_call_and_fold (gsi, repl);
2832	  return true;
2833	}
2834    }
2835
2836  /* If the format specifier was "%c", call __builtin_putchar(arg).  */
2837  else if (strcmp (fmt_str, target_percent_c) == 0)
2838    {
2839      if (!arg || ! useless_type_conversion_p (integer_type_node,
2840					       TREE_TYPE (arg)))
2841	return false;
2842      if (fn_putchar)
2843	{
2844	  gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2845	  replace_call_with_call_and_fold (gsi, repl);
2846	  return true;
2847	}
2848    }
2849
2850  return false;
2851}
2852
2853
2854
2855/* Fold a call to __builtin_strlen with known length LEN.  */
2856
2857static bool
2858gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2859{
2860  gimple stmt = gsi_stmt (*gsi);
2861  tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2862  if (!len)
2863    return false;
2864  len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2865  replace_call_with_value (gsi, len);
2866  return true;
2867}
2868
2869
2870/* Fold the non-target builtin at *GSI and return whether any simplification
2871   was made.  */
2872
2873static bool
2874gimple_fold_builtin (gimple_stmt_iterator *gsi)
2875{
2876  gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2877  tree callee = gimple_call_fndecl (stmt);
2878
2879  /* Give up for always_inline inline builtins until they are
2880     inlined.  */
2881  if (avoid_folding_inline_builtin (callee))
2882    return false;
2883
2884  unsigned n = gimple_call_num_args (stmt);
2885  enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2886  switch (fcode)
2887    {
2888    case BUILT_IN_BZERO:
2889      return gimple_fold_builtin_memset (gsi, integer_zero_node,
2890					 gimple_call_arg (stmt, 1));
2891    case BUILT_IN_MEMSET:
2892      return gimple_fold_builtin_memset (gsi,
2893					 gimple_call_arg (stmt, 1),
2894					 gimple_call_arg (stmt, 2));
2895    case BUILT_IN_BCOPY:
2896      return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2897					    gimple_call_arg (stmt, 0), 3);
2898    case BUILT_IN_MEMCPY:
2899      return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2900					    gimple_call_arg (stmt, 1), 0);
2901    case BUILT_IN_MEMPCPY:
2902      return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2903					    gimple_call_arg (stmt, 1), 1);
2904    case BUILT_IN_MEMMOVE:
2905      return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2906					    gimple_call_arg (stmt, 1), 3);
2907    case BUILT_IN_SPRINTF_CHK:
2908    case BUILT_IN_VSPRINTF_CHK:
2909      return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2910    case BUILT_IN_STRCAT_CHK:
2911      return gimple_fold_builtin_strcat_chk (gsi);
2912    case BUILT_IN_STRNCAT_CHK:
2913      return gimple_fold_builtin_strncat_chk (gsi);
2914    case BUILT_IN_STRLEN:
2915      return gimple_fold_builtin_strlen (gsi);
2916    case BUILT_IN_STRCPY:
2917      return gimple_fold_builtin_strcpy (gsi,
2918					 gimple_call_arg (stmt, 0),
2919					 gimple_call_arg (stmt, 1));
2920    case BUILT_IN_STRNCPY:
2921      return gimple_fold_builtin_strncpy (gsi,
2922					  gimple_call_arg (stmt, 0),
2923					  gimple_call_arg (stmt, 1),
2924					  gimple_call_arg (stmt, 2));
2925    case BUILT_IN_STRCAT:
2926      return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2927					 gimple_call_arg (stmt, 1));
2928    case BUILT_IN_STRNCAT:
2929      return gimple_fold_builtin_strncat (gsi);
2930    case BUILT_IN_FPUTS:
2931      return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2932					gimple_call_arg (stmt, 1), false);
2933    case BUILT_IN_FPUTS_UNLOCKED:
2934      return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2935					gimple_call_arg (stmt, 1), true);
2936    case BUILT_IN_MEMCPY_CHK:
2937    case BUILT_IN_MEMPCPY_CHK:
2938    case BUILT_IN_MEMMOVE_CHK:
2939    case BUILT_IN_MEMSET_CHK:
2940      return gimple_fold_builtin_memory_chk (gsi,
2941					     gimple_call_arg (stmt, 0),
2942					     gimple_call_arg (stmt, 1),
2943					     gimple_call_arg (stmt, 2),
2944					     gimple_call_arg (stmt, 3),
2945					     fcode);
2946    case BUILT_IN_STPCPY:
2947      return gimple_fold_builtin_stpcpy (gsi);
2948    case BUILT_IN_STRCPY_CHK:
2949    case BUILT_IN_STPCPY_CHK:
2950      return gimple_fold_builtin_stxcpy_chk (gsi,
2951					     gimple_call_arg (stmt, 0),
2952					     gimple_call_arg (stmt, 1),
2953					     gimple_call_arg (stmt, 2),
2954					     fcode);
2955    case BUILT_IN_STRNCPY_CHK:
2956    case BUILT_IN_STPNCPY_CHK:
2957      return gimple_fold_builtin_stxncpy_chk (gsi,
2958					      gimple_call_arg (stmt, 0),
2959					      gimple_call_arg (stmt, 1),
2960					      gimple_call_arg (stmt, 2),
2961					      gimple_call_arg (stmt, 3),
2962					      fcode);
2963    case BUILT_IN_SNPRINTF_CHK:
2964    case BUILT_IN_VSNPRINTF_CHK:
2965      return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2966    case BUILT_IN_SNPRINTF:
2967      return gimple_fold_builtin_snprintf (gsi);
2968    case BUILT_IN_SPRINTF:
2969      return gimple_fold_builtin_sprintf (gsi);
2970    case BUILT_IN_FPRINTF:
2971    case BUILT_IN_FPRINTF_UNLOCKED:
2972    case BUILT_IN_VFPRINTF:
2973      if (n == 2 || n == 3)
2974	return gimple_fold_builtin_fprintf (gsi,
2975					    gimple_call_arg (stmt, 0),
2976					    gimple_call_arg (stmt, 1),
2977					    n == 3
2978					    ? gimple_call_arg (stmt, 2)
2979					    : NULL_TREE,
2980					    fcode);
2981      break;
2982    case BUILT_IN_FPRINTF_CHK:
2983    case BUILT_IN_VFPRINTF_CHK:
2984      if (n == 3 || n == 4)
2985	return gimple_fold_builtin_fprintf (gsi,
2986					    gimple_call_arg (stmt, 0),
2987					    gimple_call_arg (stmt, 2),
2988					    n == 4
2989					    ? gimple_call_arg (stmt, 3)
2990					    : NULL_TREE,
2991					    fcode);
2992      break;
2993    case BUILT_IN_PRINTF:
2994    case BUILT_IN_PRINTF_UNLOCKED:
2995    case BUILT_IN_VPRINTF:
2996      if (n == 1 || n == 2)
2997	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2998					   n == 2
2999					   ? gimple_call_arg (stmt, 1)
3000					   : NULL_TREE, fcode);
3001      break;
3002    case BUILT_IN_PRINTF_CHK:
3003    case BUILT_IN_VPRINTF_CHK:
3004      if (n == 2 || n == 3)
3005	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3006					   n == 3
3007					   ? gimple_call_arg (stmt, 2)
3008					   : NULL_TREE, fcode);
3009    default:;
3010    }
3011
3012  /* Try the generic builtin folder.  */
3013  bool ignore = (gimple_call_lhs (stmt) == NULL);
3014  tree result = fold_call_stmt (stmt, ignore);
3015  if (result)
3016    {
3017      if (ignore)
3018	STRIP_NOPS (result);
3019      else
3020	result = fold_convert (gimple_call_return_type (stmt), result);
3021      if (!update_call_from_tree (gsi, result))
3022	gimplify_and_update_call_from_tree (gsi, result);
3023      return true;
3024    }
3025
3026  return false;
3027}
3028
3029/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3030   doesn't fit into TYPE.  The test for overflow should be regardless of
3031   -fwrapv, and even for unsigned types.  */
3032
3033bool
3034arith_overflowed_p (enum tree_code code, const_tree type,
3035		    const_tree arg0, const_tree arg1)
3036{
3037  typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3038  typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3039    widest2_int_cst;
3040  widest2_int warg0 = widest2_int_cst (arg0);
3041  widest2_int warg1 = widest2_int_cst (arg1);
3042  widest2_int wres;
3043  switch (code)
3044    {
3045    case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3046    case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3047    case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3048    default: gcc_unreachable ();
3049    }
3050  signop sign = TYPE_SIGN (type);
3051  if (sign == UNSIGNED && wi::neg_p (wres))
3052    return true;
3053  return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3054}
3055
3056/* Attempt to fold a call statement referenced by the statement iterator GSI.
3057   The statement may be replaced by another statement, e.g., if the call
3058   simplifies to a constant value. Return true if any changes were made.
3059   It is assumed that the operands have been previously folded.  */
3060
3061static bool
3062gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3063{
3064  gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3065  tree callee;
3066  bool changed = false;
3067  unsigned i;
3068
3069  /* Fold *& in call arguments.  */
3070  for (i = 0; i < gimple_call_num_args (stmt); ++i)
3071    if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3072      {
3073	tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3074	if (tmp)
3075	  {
3076	    gimple_call_set_arg (stmt, i, tmp);
3077	    changed = true;
3078	  }
3079      }
3080
3081  /* Check for virtual calls that became direct calls.  */
3082  callee = gimple_call_fn (stmt);
3083  if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3084    {
3085      if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3086	{
3087          if (dump_file && virtual_method_call_p (callee)
3088	      && !possible_polymorphic_call_target_p
3089		    (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3090						     (OBJ_TYPE_REF_EXPR (callee)))))
3091	    {
3092	      fprintf (dump_file,
3093		       "Type inheritance inconsistent devirtualization of ");
3094	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3095	      fprintf (dump_file, " to ");
3096	      print_generic_expr (dump_file, callee, TDF_SLIM);
3097	      fprintf (dump_file, "\n");
3098	    }
3099
3100	  gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3101	  changed = true;
3102	}
3103      else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3104	{
3105	  bool final;
3106	  vec <cgraph_node *>targets
3107	    = possible_polymorphic_call_targets (callee, stmt, &final);
3108	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
3109	    {
3110	      tree lhs = gimple_call_lhs (stmt);
3111	      if (dump_enabled_p ())
3112		{
3113		  location_t loc = gimple_location_safe (stmt);
3114		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3115				   "folding virtual function call to %s\n",
3116		 		   targets.length () == 1
3117		  		   ? targets[0]->name ()
3118		  		   : "__builtin_unreachable");
3119		}
3120	      if (targets.length () == 1)
3121		{
3122		  gimple_call_set_fndecl (stmt, targets[0]->decl);
3123		  changed = true;
3124		  /* If the call becomes noreturn, remove the lhs.  */
3125		  if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3126		    {
3127		      if (TREE_CODE (lhs) == SSA_NAME)
3128			{
3129			  tree var = create_tmp_var (TREE_TYPE (lhs));
3130			  tree def = get_or_create_ssa_default_def (cfun, var);
3131			  gimple new_stmt = gimple_build_assign (lhs, def);
3132			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3133			}
3134		      gimple_call_set_lhs (stmt, NULL_TREE);
3135		    }
3136		  maybe_remove_unused_call_args (cfun, stmt);
3137		}
3138	      else
3139		{
3140		  tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3141		  gimple new_stmt = gimple_build_call (fndecl, 0);
3142		  gimple_set_location (new_stmt, gimple_location (stmt));
3143		  if (lhs && TREE_CODE (lhs) == SSA_NAME)
3144		    {
3145		      tree var = create_tmp_var (TREE_TYPE (lhs));
3146		      tree def = get_or_create_ssa_default_def (cfun, var);
3147
3148		      /* To satisfy condition for
3149			 cgraph_update_edges_for_call_stmt_node,
3150			 we need to preserve GIMPLE_CALL statement
3151			 at position of GSI iterator.  */
3152		      update_call_from_tree (gsi, def);
3153		      gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3154		    }
3155		  else
3156		    {
3157		      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3158		      gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3159		      gsi_replace (gsi, new_stmt, false);
3160		    }
3161		  return true;
3162		}
3163	    }
3164	}
3165    }
3166
3167  /* Check for indirect calls that became direct calls, and then
3168     no longer require a static chain.  */
3169  if (gimple_call_chain (stmt))
3170    {
3171      tree fn = gimple_call_fndecl (stmt);
3172      if (fn && !DECL_STATIC_CHAIN (fn))
3173	{
3174	  gimple_call_set_chain (stmt, NULL);
3175	  changed = true;
3176	}
3177      else
3178	{
3179	  tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3180	  if (tmp)
3181	    {
3182	      gimple_call_set_chain (stmt, tmp);
3183	      changed = true;
3184	    }
3185	}
3186    }
3187
3188  if (inplace)
3189    return changed;
3190
3191  /* Check for builtins that CCP can handle using information not
3192     available in the generic fold routines.  */
3193  if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3194    {
3195      if (gimple_fold_builtin (gsi))
3196        changed = true;
3197    }
3198  else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3199    {
3200	changed |= targetm.gimple_fold_builtin (gsi);
3201    }
3202  else if (gimple_call_internal_p (stmt))
3203    {
3204      enum tree_code subcode = ERROR_MARK;
3205      tree result = NULL_TREE;
3206      bool cplx_result = false;
3207      tree overflow = NULL_TREE;
3208      switch (gimple_call_internal_fn (stmt))
3209	{
3210	case IFN_BUILTIN_EXPECT:
3211	  result = fold_builtin_expect (gimple_location (stmt),
3212					gimple_call_arg (stmt, 0),
3213					gimple_call_arg (stmt, 1),
3214					gimple_call_arg (stmt, 2));
3215	  break;
3216	case IFN_UBSAN_OBJECT_SIZE:
3217	  if (integer_all_onesp (gimple_call_arg (stmt, 2))
3218	      || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3219		  && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3220		  && tree_int_cst_le (gimple_call_arg (stmt, 1),
3221				      gimple_call_arg (stmt, 2))))
3222	    {
3223	      gsi_replace (gsi, gimple_build_nop (), false);
3224	      unlink_stmt_vdef (stmt);
3225	      release_defs (stmt);
3226	      return true;
3227	    }
3228	  break;
3229	case IFN_UBSAN_CHECK_ADD:
3230	  subcode = PLUS_EXPR;
3231	  break;
3232	case IFN_UBSAN_CHECK_SUB:
3233	  subcode = MINUS_EXPR;
3234	  break;
3235	case IFN_UBSAN_CHECK_MUL:
3236	  subcode = MULT_EXPR;
3237	  break;
3238	case IFN_ADD_OVERFLOW:
3239	  subcode = PLUS_EXPR;
3240	  cplx_result = true;
3241	  break;
3242	case IFN_SUB_OVERFLOW:
3243	  subcode = MINUS_EXPR;
3244	  cplx_result = true;
3245	  break;
3246	case IFN_MUL_OVERFLOW:
3247	  subcode = MULT_EXPR;
3248	  cplx_result = true;
3249	  break;
3250	default:
3251	  break;
3252	}
3253      if (subcode != ERROR_MARK)
3254	{
3255	  tree arg0 = gimple_call_arg (stmt, 0);
3256	  tree arg1 = gimple_call_arg (stmt, 1);
3257	  tree type = TREE_TYPE (arg0);
3258	  if (cplx_result)
3259	    {
3260	      tree lhs = gimple_call_lhs (stmt);
3261	      if (lhs == NULL_TREE)
3262		type = NULL_TREE;
3263	      else
3264		type = TREE_TYPE (TREE_TYPE (lhs));
3265	    }
3266	  if (type == NULL_TREE)
3267	    ;
3268	  /* x = y + 0; x = y - 0; x = y * 0; */
3269	  else if (integer_zerop (arg1))
3270	    result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3271	  /* x = 0 + y; x = 0 * y; */
3272	  else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3273	    result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3274	  /* x = y - y; */
3275	  else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3276	    result = integer_zero_node;
3277	  /* x = y * 1; x = 1 * y; */
3278	  else if (subcode == MULT_EXPR && integer_onep (arg1))
3279	    result = arg0;
3280	  else if (subcode == MULT_EXPR && integer_onep (arg0))
3281	    result = arg1;
3282	  else if (TREE_CODE (arg0) == INTEGER_CST
3283		   && TREE_CODE (arg1) == INTEGER_CST)
3284	    {
3285	      if (cplx_result)
3286		result = int_const_binop (subcode, fold_convert (type, arg0),
3287					  fold_convert (type, arg1));
3288	      else
3289		result = int_const_binop (subcode, arg0, arg1);
3290	      if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3291		{
3292		  if (cplx_result)
3293		    overflow = build_one_cst (type);
3294		  else
3295		    result = NULL_TREE;
3296		}
3297	    }
3298	  if (result)
3299	    {
3300	      if (result == integer_zero_node)
3301		result = build_zero_cst (type);
3302	      else if (cplx_result && TREE_TYPE (result) != type)
3303		{
3304		  if (TREE_CODE (result) == INTEGER_CST)
3305		    {
3306		      if (arith_overflowed_p (PLUS_EXPR, type, result,
3307					      integer_zero_node))
3308			overflow = build_one_cst (type);
3309		    }
3310		  else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3311			    && TYPE_UNSIGNED (type))
3312			   || (TYPE_PRECISION (type)
3313			       < (TYPE_PRECISION (TREE_TYPE (result))
3314				  + (TYPE_UNSIGNED (TREE_TYPE (result))
3315				     && !TYPE_UNSIGNED (type)))))
3316		    result = NULL_TREE;
3317		  if (result)
3318		    result = fold_convert (type, result);
3319		}
3320	    }
3321	}
3322
3323      if (result)
3324	{
3325	  if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3326	    result = drop_tree_overflow (result);
3327	  if (cplx_result)
3328	    {
3329	      if (overflow == NULL_TREE)
3330		overflow = build_zero_cst (TREE_TYPE (result));
3331	      tree ctype = build_complex_type (TREE_TYPE (result));
3332	      if (TREE_CODE (result) == INTEGER_CST
3333		  && TREE_CODE (overflow) == INTEGER_CST)
3334		result = build_complex (ctype, result, overflow);
3335	      else
3336		result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3337				     ctype, result, overflow);
3338	    }
3339	  if (!update_call_from_tree (gsi, result))
3340	    gimplify_and_update_call_from_tree (gsi, result);
3341	  changed = true;
3342	}
3343    }
3344
3345  return changed;
3346}
3347
3348
3349/* Worker for fold_stmt_1 dispatch to pattern based folding with
3350   gimple_simplify.
3351
3352   Replaces *GSI with the simplification result in RCODE and OPS
3353   and the associated statements in *SEQ.  Does the replacement
3354   according to INPLACE and returns true if the operation succeeded.  */
3355
3356static bool
3357replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3358				  code_helper rcode, tree *ops,
3359				  gimple_seq *seq, bool inplace)
3360{
3361  gimple stmt = gsi_stmt (*gsi);
3362
3363  /* Play safe and do not allow abnormals to be mentioned in
3364     newly created statements.  See also maybe_push_res_to_seq.  */
3365  if ((TREE_CODE (ops[0]) == SSA_NAME
3366       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3367      || (ops[1]
3368	  && TREE_CODE (ops[1]) == SSA_NAME
3369	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3370      || (ops[2]
3371	  && TREE_CODE (ops[2]) == SSA_NAME
3372	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3373    return false;
3374
3375  if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3376    {
3377      gcc_assert (rcode.is_tree_code ());
3378      if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3379	  /* GIMPLE_CONDs condition may not throw.  */
3380	  && (!flag_exceptions
3381	      || !cfun->can_throw_non_call_exceptions
3382	      || !operation_could_trap_p (rcode,
3383					  FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3384					  false, NULL_TREE)))
3385	gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3386      else if (rcode == SSA_NAME)
3387	gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3388				   build_zero_cst (TREE_TYPE (ops[0])));
3389      else if (rcode == INTEGER_CST)
3390	{
3391	  if (integer_zerop (ops[0]))
3392	    gimple_cond_make_false (cond_stmt);
3393	  else
3394	    gimple_cond_make_true (cond_stmt);
3395	}
3396      else if (!inplace)
3397	{
3398	  tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3399					    ops, seq);
3400	  if (!res)
3401	    return false;
3402	  gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3403				     build_zero_cst (TREE_TYPE (res)));
3404	}
3405      else
3406	return false;
3407      if (dump_file && (dump_flags & TDF_DETAILS))
3408	{
3409	  fprintf (dump_file, "gimple_simplified to ");
3410	  if (!gimple_seq_empty_p (*seq))
3411	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3412	  print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3413			     0, TDF_SLIM);
3414	}
3415      gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3416      return true;
3417    }
3418  else if (is_gimple_assign (stmt)
3419	   && rcode.is_tree_code ())
3420    {
3421      if (!inplace
3422	  || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3423	{
3424	  maybe_build_generic_op (rcode,
3425				  TREE_TYPE (gimple_assign_lhs (stmt)),
3426				  &ops[0], ops[1], ops[2]);
3427	  gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3428	  if (dump_file && (dump_flags & TDF_DETAILS))
3429	    {
3430	      fprintf (dump_file, "gimple_simplified to ");
3431	      if (!gimple_seq_empty_p (*seq))
3432		print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3433	      print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3434				 0, TDF_SLIM);
3435	    }
3436	  gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3437	  return true;
3438	}
3439    }
3440  else if (!inplace)
3441    {
3442      if (gimple_has_lhs (stmt))
3443	{
3444	  tree lhs = gimple_get_lhs (stmt);
3445	  if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3446				      ops, seq, lhs))
3447	    return false;
3448	  if (dump_file && (dump_flags & TDF_DETAILS))
3449	    {
3450	      fprintf (dump_file, "gimple_simplified to ");
3451	      print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3452	    }
3453	  gsi_replace_with_seq_vops (gsi, *seq);
3454	  return true;
3455	}
3456      else
3457	gcc_unreachable ();
3458    }
3459
3460  return false;
3461}
3462
3463/* Canonicalize MEM_REFs invariant address operand after propagation.  */
3464
3465static bool
3466maybe_canonicalize_mem_ref_addr (tree *t)
3467{
3468  bool res = false;
3469
3470  if (TREE_CODE (*t) == ADDR_EXPR)
3471    t = &TREE_OPERAND (*t, 0);
3472
3473  while (handled_component_p (*t))
3474    t = &TREE_OPERAND (*t, 0);
3475
3476  /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3477     of invariant addresses into a SSA name MEM_REF address.  */
3478  if (TREE_CODE (*t) == MEM_REF
3479      || TREE_CODE (*t) == TARGET_MEM_REF)
3480    {
3481      tree addr = TREE_OPERAND (*t, 0);
3482      if (TREE_CODE (addr) == ADDR_EXPR
3483	  && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3484	      || handled_component_p (TREE_OPERAND (addr, 0))))
3485	{
3486	  tree base;
3487	  HOST_WIDE_INT coffset;
3488	  base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3489						&coffset);
3490	  if (!base)
3491	    gcc_unreachable ();
3492
3493	  TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3494	  TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3495						  TREE_OPERAND (*t, 1),
3496						  size_int (coffset));
3497	  res = true;
3498	}
3499      gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3500			   || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3501    }
3502
3503  /* Canonicalize back MEM_REFs to plain reference trees if the object
3504     accessed is a decl that has the same access semantics as the MEM_REF.  */
3505  if (TREE_CODE (*t) == MEM_REF
3506      && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3507      && integer_zerop (TREE_OPERAND (*t, 1))
3508      && MR_DEPENDENCE_CLIQUE (*t) == 0)
3509    {
3510      tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3511      tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3512      if (/* Same volatile qualification.  */
3513	  TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3514	  /* Same TBAA behavior with -fstrict-aliasing.  */
3515	  && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3516	  && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3517	      == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3518	  /* Same alignment.  */
3519	  && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3520	  /* We have to look out here to not drop a required conversion
3521	     from the rhs to the lhs if *t appears on the lhs or vice-versa
3522	     if it appears on the rhs.  Thus require strict type
3523	     compatibility.  */
3524	  && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3525	{
3526	  *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3527	  res = true;
3528	}
3529    }
3530
3531  /* Canonicalize TARGET_MEM_REF in particular with respect to
3532     the indexes becoming constant.  */
3533  else if (TREE_CODE (*t) == TARGET_MEM_REF)
3534    {
3535      tree tem = maybe_fold_tmr (*t);
3536      if (tem)
3537	{
3538	  *t = tem;
3539	  res = true;
3540	}
3541    }
3542
3543  return res;
3544}
3545
3546/* Worker for both fold_stmt and fold_stmt_inplace.  The INPLACE argument
3547   distinguishes both cases.  */
3548
3549static bool
3550fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3551{
3552  bool changed = false;
3553  gimple stmt = gsi_stmt (*gsi);
3554  unsigned i;
3555
3556  /* First do required canonicalization of [TARGET_]MEM_REF addresses
3557     after propagation.
3558     ???  This shouldn't be done in generic folding but in the
3559     propagation helpers which also know whether an address was
3560     propagated.  */
3561  switch (gimple_code (stmt))
3562    {
3563    case GIMPLE_ASSIGN:
3564      if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3565	{
3566	  tree *rhs = gimple_assign_rhs1_ptr (stmt);
3567	  if ((REFERENCE_CLASS_P (*rhs)
3568	       || TREE_CODE (*rhs) == ADDR_EXPR)
3569	      && maybe_canonicalize_mem_ref_addr (rhs))
3570	    changed = true;
3571	  tree *lhs = gimple_assign_lhs_ptr (stmt);
3572	  if (REFERENCE_CLASS_P (*lhs)
3573	      && maybe_canonicalize_mem_ref_addr (lhs))
3574	    changed = true;
3575	}
3576      break;
3577    case GIMPLE_CALL:
3578      {
3579	for (i = 0; i < gimple_call_num_args (stmt); ++i)
3580	  {
3581	    tree *arg = gimple_call_arg_ptr (stmt, i);
3582	    if (REFERENCE_CLASS_P (*arg)
3583		&& maybe_canonicalize_mem_ref_addr (arg))
3584	      changed = true;
3585	  }
3586	tree *lhs = gimple_call_lhs_ptr (stmt);
3587	if (*lhs
3588	    && REFERENCE_CLASS_P (*lhs)
3589	    && maybe_canonicalize_mem_ref_addr (lhs))
3590	  changed = true;
3591	break;
3592      }
3593    case GIMPLE_ASM:
3594      {
3595	gasm *asm_stmt = as_a <gasm *> (stmt);
3596	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3597	  {
3598	    tree link = gimple_asm_output_op (asm_stmt, i);
3599	    tree op = TREE_VALUE (link);
3600	    if (REFERENCE_CLASS_P (op)
3601		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3602	      changed = true;
3603	  }
3604	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3605	  {
3606	    tree link = gimple_asm_input_op (asm_stmt, i);
3607	    tree op = TREE_VALUE (link);
3608	    if ((REFERENCE_CLASS_P (op)
3609		 || TREE_CODE (op) == ADDR_EXPR)
3610		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3611	      changed = true;
3612	  }
3613      }
3614      break;
3615    case GIMPLE_DEBUG:
3616      if (gimple_debug_bind_p (stmt))
3617	{
3618	  tree *val = gimple_debug_bind_get_value_ptr (stmt);
3619	  if (*val
3620	      && (REFERENCE_CLASS_P (*val)
3621		  || TREE_CODE (*val) == ADDR_EXPR)
3622	      && maybe_canonicalize_mem_ref_addr (val))
3623	    changed = true;
3624	}
3625      break;
3626    default:;
3627    }
3628
3629  /* Dispatch to pattern-based folding.  */
3630  if (!inplace
3631      || is_gimple_assign (stmt)
3632      || gimple_code (stmt) == GIMPLE_COND)
3633    {
3634      gimple_seq seq = NULL;
3635      code_helper rcode;
3636      tree ops[3] = {};
3637      if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3638	{
3639	  if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3640	    changed = true;
3641	  else
3642	    gimple_seq_discard (seq);
3643	}
3644    }
3645
3646  stmt = gsi_stmt (*gsi);
3647
3648  /* Fold the main computation performed by the statement.  */
3649  switch (gimple_code (stmt))
3650    {
3651    case GIMPLE_ASSIGN:
3652      {
3653	unsigned old_num_ops = gimple_num_ops (stmt);
3654	enum tree_code subcode = gimple_assign_rhs_code (stmt);
3655	tree lhs = gimple_assign_lhs (stmt);
3656	tree new_rhs;
3657	/* First canonicalize operand order.  This avoids building new
3658	   trees if this is the only thing fold would later do.  */
3659	if ((commutative_tree_code (subcode)
3660	     || commutative_ternary_tree_code (subcode))
3661	    && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3662				     gimple_assign_rhs2 (stmt), false))
3663	  {
3664	    tree tem = gimple_assign_rhs1 (stmt);
3665	    gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3666	    gimple_assign_set_rhs2 (stmt, tem);
3667	    changed = true;
3668	  }
3669	new_rhs = fold_gimple_assign (gsi);
3670	if (new_rhs
3671	    && !useless_type_conversion_p (TREE_TYPE (lhs),
3672					   TREE_TYPE (new_rhs)))
3673	  new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3674	if (new_rhs
3675	    && (!inplace
3676		|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3677	  {
3678	    gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3679	    changed = true;
3680	  }
3681	break;
3682      }
3683
3684    case GIMPLE_COND:
3685      changed |= fold_gimple_cond (as_a <gcond *> (stmt));
3686      break;
3687
3688    case GIMPLE_CALL:
3689      changed |= gimple_fold_call (gsi, inplace);
3690      break;
3691
3692    case GIMPLE_ASM:
3693      /* Fold *& in asm operands.  */
3694      {
3695	gasm *asm_stmt = as_a <gasm *> (stmt);
3696	size_t noutputs;
3697	const char **oconstraints;
3698	const char *constraint;
3699	bool allows_mem, allows_reg;
3700
3701	noutputs = gimple_asm_noutputs (asm_stmt);
3702	oconstraints = XALLOCAVEC (const char *, noutputs);
3703
3704	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3705	  {
3706	    tree link = gimple_asm_output_op (asm_stmt, i);
3707	    tree op = TREE_VALUE (link);
3708	    oconstraints[i]
3709	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3710	    if (REFERENCE_CLASS_P (op)
3711		&& (op = maybe_fold_reference (op, true)) != NULL_TREE)
3712	      {
3713		TREE_VALUE (link) = op;
3714		changed = true;
3715	      }
3716	  }
3717	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3718	  {
3719	    tree link = gimple_asm_input_op (asm_stmt, i);
3720	    tree op = TREE_VALUE (link);
3721	    constraint
3722	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3723	    parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3724				    oconstraints, &allows_mem, &allows_reg);
3725	    if (REFERENCE_CLASS_P (op)
3726		&& (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3727		   != NULL_TREE)
3728	      {
3729		TREE_VALUE (link) = op;
3730		changed = true;
3731	      }
3732	  }
3733      }
3734      break;
3735
3736    case GIMPLE_DEBUG:
3737      if (gimple_debug_bind_p (stmt))
3738	{
3739	  tree val = gimple_debug_bind_get_value (stmt);
3740	  if (val
3741	      && REFERENCE_CLASS_P (val))
3742	    {
3743	      tree tem = maybe_fold_reference (val, false);
3744	      if (tem)
3745		{
3746		  gimple_debug_bind_set_value (stmt, tem);
3747		  changed = true;
3748		}
3749	    }
3750	  else if (val
3751		   && TREE_CODE (val) == ADDR_EXPR)
3752	    {
3753	      tree ref = TREE_OPERAND (val, 0);
3754	      tree tem = maybe_fold_reference (ref, false);
3755	      if (tem)
3756		{
3757		  tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3758		  gimple_debug_bind_set_value (stmt, tem);
3759		  changed = true;
3760		}
3761	    }
3762	}
3763      break;
3764
3765    default:;
3766    }
3767
3768  stmt = gsi_stmt (*gsi);
3769
3770  /* Fold *& on the lhs.  */
3771  if (gimple_has_lhs (stmt))
3772    {
3773      tree lhs = gimple_get_lhs (stmt);
3774      if (lhs && REFERENCE_CLASS_P (lhs))
3775	{
3776	  tree new_lhs = maybe_fold_reference (lhs, true);
3777	  if (new_lhs)
3778	    {
3779	      gimple_set_lhs (stmt, new_lhs);
3780	      changed = true;
3781	    }
3782	}
3783    }
3784
3785  return changed;
3786}
3787
3788/* Valueziation callback that ends up not following SSA edges.  */
3789
3790tree
3791no_follow_ssa_edges (tree)
3792{
3793  return NULL_TREE;
3794}
3795
3796/* Valueization callback that ends up following single-use SSA edges only.  */
3797
3798tree
3799follow_single_use_edges (tree val)
3800{
3801  if (TREE_CODE (val) == SSA_NAME
3802      && !has_single_use (val))
3803    return NULL_TREE;
3804  return val;
3805}
3806
3807/* Fold the statement pointed to by GSI.  In some cases, this function may
3808   replace the whole statement with a new one.  Returns true iff folding
3809   makes any changes.
3810   The statement pointed to by GSI should be in valid gimple form but may
3811   be in unfolded state as resulting from for example constant propagation
3812   which can produce *&x = 0.  */
3813
3814bool
3815fold_stmt (gimple_stmt_iterator *gsi)
3816{
3817  return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3818}
3819
3820bool
3821fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3822{
3823  return fold_stmt_1 (gsi, false, valueize);
3824}
3825
3826/* Perform the minimal folding on statement *GSI.  Only operations like
3827   *&x created by constant propagation are handled.  The statement cannot
3828   be replaced with a new one.  Return true if the statement was
3829   changed, false otherwise.
3830   The statement *GSI should be in valid gimple form but may
3831   be in unfolded state as resulting from for example constant propagation
3832   which can produce *&x = 0.  */
3833
3834bool
3835fold_stmt_inplace (gimple_stmt_iterator *gsi)
3836{
3837  gimple stmt = gsi_stmt (*gsi);
3838  bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3839  gcc_assert (gsi_stmt (*gsi) == stmt);
3840  return changed;
3841}
3842
3843/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3844   if EXPR is null or we don't know how.
3845   If non-null, the result always has boolean type.  */
3846
3847static tree
3848canonicalize_bool (tree expr, bool invert)
3849{
3850  if (!expr)
3851    return NULL_TREE;
3852  else if (invert)
3853    {
3854      if (integer_nonzerop (expr))
3855	return boolean_false_node;
3856      else if (integer_zerop (expr))
3857	return boolean_true_node;
3858      else if (TREE_CODE (expr) == SSA_NAME)
3859	return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3860			    build_int_cst (TREE_TYPE (expr), 0));
3861      else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3862	return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3863			    boolean_type_node,
3864			    TREE_OPERAND (expr, 0),
3865			    TREE_OPERAND (expr, 1));
3866      else
3867	return NULL_TREE;
3868    }
3869  else
3870    {
3871      if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3872	return expr;
3873      if (integer_nonzerop (expr))
3874	return boolean_true_node;
3875      else if (integer_zerop (expr))
3876	return boolean_false_node;
3877      else if (TREE_CODE (expr) == SSA_NAME)
3878	return fold_build2 (NE_EXPR, boolean_type_node, expr,
3879			    build_int_cst (TREE_TYPE (expr), 0));
3880      else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3881	return fold_build2 (TREE_CODE (expr),
3882			    boolean_type_node,
3883			    TREE_OPERAND (expr, 0),
3884			    TREE_OPERAND (expr, 1));
3885      else
3886	return NULL_TREE;
3887    }
3888}
3889
3890/* Check to see if a boolean expression EXPR is logically equivalent to the
3891   comparison (OP1 CODE OP2).  Check for various identities involving
3892   SSA_NAMEs.  */
3893
3894static bool
3895same_bool_comparison_p (const_tree expr, enum tree_code code,
3896			const_tree op1, const_tree op2)
3897{
3898  gimple s;
3899
3900  /* The obvious case.  */
3901  if (TREE_CODE (expr) == code
3902      && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3903      && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3904    return true;
3905
3906  /* Check for comparing (name, name != 0) and the case where expr
3907     is an SSA_NAME with a definition matching the comparison.  */
3908  if (TREE_CODE (expr) == SSA_NAME
3909      && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3910    {
3911      if (operand_equal_p (expr, op1, 0))
3912	return ((code == NE_EXPR && integer_zerop (op2))
3913		|| (code == EQ_EXPR && integer_nonzerop (op2)));
3914      s = SSA_NAME_DEF_STMT (expr);
3915      if (is_gimple_assign (s)
3916	  && gimple_assign_rhs_code (s) == code
3917	  && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3918	  && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3919	return true;
3920    }
3921
3922  /* If op1 is of the form (name != 0) or (name == 0), and the definition
3923     of name is a comparison, recurse.  */
3924  if (TREE_CODE (op1) == SSA_NAME
3925      && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3926    {
3927      s = SSA_NAME_DEF_STMT (op1);
3928      if (is_gimple_assign (s)
3929	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3930	{
3931	  enum tree_code c = gimple_assign_rhs_code (s);
3932	  if ((c == NE_EXPR && integer_zerop (op2))
3933	      || (c == EQ_EXPR && integer_nonzerop (op2)))
3934	    return same_bool_comparison_p (expr, c,
3935					   gimple_assign_rhs1 (s),
3936					   gimple_assign_rhs2 (s));
3937	  if ((c == EQ_EXPR && integer_zerop (op2))
3938	      || (c == NE_EXPR && integer_nonzerop (op2)))
3939	    return same_bool_comparison_p (expr,
3940					   invert_tree_comparison (c, false),
3941					   gimple_assign_rhs1 (s),
3942					   gimple_assign_rhs2 (s));
3943	}
3944    }
3945  return false;
3946}
3947
3948/* Check to see if two boolean expressions OP1 and OP2 are logically
3949   equivalent.  */
3950
3951static bool
3952same_bool_result_p (const_tree op1, const_tree op2)
3953{
3954  /* Simple cases first.  */
3955  if (operand_equal_p (op1, op2, 0))
3956    return true;
3957
3958  /* Check the cases where at least one of the operands is a comparison.
3959     These are a bit smarter than operand_equal_p in that they apply some
3960     identifies on SSA_NAMEs.  */
3961  if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3962      && same_bool_comparison_p (op1, TREE_CODE (op2),
3963				 TREE_OPERAND (op2, 0),
3964				 TREE_OPERAND (op2, 1)))
3965    return true;
3966  if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3967      && same_bool_comparison_p (op2, TREE_CODE (op1),
3968				 TREE_OPERAND (op1, 0),
3969				 TREE_OPERAND (op1, 1)))
3970    return true;
3971
3972  /* Default case.  */
3973  return false;
3974}
3975
3976/* Forward declarations for some mutually recursive functions.  */
3977
3978static tree
3979and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3980		   enum tree_code code2, tree op2a, tree op2b);
3981static tree
3982and_var_with_comparison (tree var, bool invert,
3983			 enum tree_code code2, tree op2a, tree op2b);
3984static tree
3985and_var_with_comparison_1 (gimple stmt,
3986			   enum tree_code code2, tree op2a, tree op2b);
3987static tree
3988or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3989		  enum tree_code code2, tree op2a, tree op2b);
3990static tree
3991or_var_with_comparison (tree var, bool invert,
3992			enum tree_code code2, tree op2a, tree op2b);
3993static tree
3994or_var_with_comparison_1 (gimple stmt,
3995			  enum tree_code code2, tree op2a, tree op2b);
3996
3997/* Helper function for and_comparisons_1:  try to simplify the AND of the
3998   ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3999   If INVERT is true, invert the value of the VAR before doing the AND.
4000   Return NULL_EXPR if we can't simplify this to a single expression.  */
4001
4002static tree
4003and_var_with_comparison (tree var, bool invert,
4004			 enum tree_code code2, tree op2a, tree op2b)
4005{
4006  tree t;
4007  gimple stmt = SSA_NAME_DEF_STMT (var);
4008
4009  /* We can only deal with variables whose definitions are assignments.  */
4010  if (!is_gimple_assign (stmt))
4011    return NULL_TREE;
4012
4013  /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4014     !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4015     Then we only have to consider the simpler non-inverted cases.  */
4016  if (invert)
4017    t = or_var_with_comparison_1 (stmt,
4018				  invert_tree_comparison (code2, false),
4019				  op2a, op2b);
4020  else
4021    t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4022  return canonicalize_bool (t, invert);
4023}
4024
4025/* Try to simplify the AND of the ssa variable defined by the assignment
4026   STMT with the comparison specified by (OP2A CODE2 OP2B).
4027   Return NULL_EXPR if we can't simplify this to a single expression.  */
4028
4029static tree
4030and_var_with_comparison_1 (gimple stmt,
4031			   enum tree_code code2, tree op2a, tree op2b)
4032{
4033  tree var = gimple_assign_lhs (stmt);
4034  tree true_test_var = NULL_TREE;
4035  tree false_test_var = NULL_TREE;
4036  enum tree_code innercode = gimple_assign_rhs_code (stmt);
4037
4038  /* Check for identities like (var AND (var == 0)) => false.  */
4039  if (TREE_CODE (op2a) == SSA_NAME
4040      && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4041    {
4042      if ((code2 == NE_EXPR && integer_zerop (op2b))
4043	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4044	{
4045	  true_test_var = op2a;
4046	  if (var == true_test_var)
4047	    return var;
4048	}
4049      else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4050	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4051	{
4052	  false_test_var = op2a;
4053	  if (var == false_test_var)
4054	    return boolean_false_node;
4055	}
4056    }
4057
4058  /* If the definition is a comparison, recurse on it.  */
4059  if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4060    {
4061      tree t = and_comparisons_1 (innercode,
4062				  gimple_assign_rhs1 (stmt),
4063				  gimple_assign_rhs2 (stmt),
4064				  code2,
4065				  op2a,
4066				  op2b);
4067      if (t)
4068	return t;
4069    }
4070
4071  /* If the definition is an AND or OR expression, we may be able to
4072     simplify by reassociating.  */
4073  if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4074      && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4075    {
4076      tree inner1 = gimple_assign_rhs1 (stmt);
4077      tree inner2 = gimple_assign_rhs2 (stmt);
4078      gimple s;
4079      tree t;
4080      tree partial = NULL_TREE;
4081      bool is_and = (innercode == BIT_AND_EXPR);
4082
4083      /* Check for boolean identities that don't require recursive examination
4084	 of inner1/inner2:
4085	 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4086	 inner1 AND (inner1 OR inner2) => inner1
4087	 !inner1 AND (inner1 AND inner2) => false
4088	 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4089         Likewise for similar cases involving inner2.  */
4090      if (inner1 == true_test_var)
4091	return (is_and ? var : inner1);
4092      else if (inner2 == true_test_var)
4093	return (is_and ? var : inner2);
4094      else if (inner1 == false_test_var)
4095	return (is_and
4096		? boolean_false_node
4097		: and_var_with_comparison (inner2, false, code2, op2a, op2b));
4098      else if (inner2 == false_test_var)
4099	return (is_and
4100		? boolean_false_node
4101		: and_var_with_comparison (inner1, false, code2, op2a, op2b));
4102
4103      /* Next, redistribute/reassociate the AND across the inner tests.
4104	 Compute the first partial result, (inner1 AND (op2a code op2b))  */
4105      if (TREE_CODE (inner1) == SSA_NAME
4106	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4107	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4108	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4109					      gimple_assign_rhs1 (s),
4110					      gimple_assign_rhs2 (s),
4111					      code2, op2a, op2b)))
4112	{
4113	  /* Handle the AND case, where we are reassociating:
4114	     (inner1 AND inner2) AND (op2a code2 op2b)
4115	     => (t AND inner2)
4116	     If the partial result t is a constant, we win.  Otherwise
4117	     continue on to try reassociating with the other inner test.  */
4118	  if (is_and)
4119	    {
4120	      if (integer_onep (t))
4121		return inner2;
4122	      else if (integer_zerop (t))
4123		return boolean_false_node;
4124	    }
4125
4126	  /* Handle the OR case, where we are redistributing:
4127	     (inner1 OR inner2) AND (op2a code2 op2b)
4128	     => (t OR (inner2 AND (op2a code2 op2b)))  */
4129	  else if (integer_onep (t))
4130	    return boolean_true_node;
4131
4132	  /* Save partial result for later.  */
4133	  partial = t;
4134	}
4135
4136      /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4137      if (TREE_CODE (inner2) == SSA_NAME
4138	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4139	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4140	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4141					      gimple_assign_rhs1 (s),
4142					      gimple_assign_rhs2 (s),
4143					      code2, op2a, op2b)))
4144	{
4145	  /* Handle the AND case, where we are reassociating:
4146	     (inner1 AND inner2) AND (op2a code2 op2b)
4147	     => (inner1 AND t)  */
4148	  if (is_and)
4149	    {
4150	      if (integer_onep (t))
4151		return inner1;
4152	      else if (integer_zerop (t))
4153		return boolean_false_node;
4154	      /* If both are the same, we can apply the identity
4155		 (x AND x) == x.  */
4156	      else if (partial && same_bool_result_p (t, partial))
4157		return t;
4158	    }
4159
4160	  /* Handle the OR case. where we are redistributing:
4161	     (inner1 OR inner2) AND (op2a code2 op2b)
4162	     => (t OR (inner1 AND (op2a code2 op2b)))
4163	     => (t OR partial)  */
4164	  else
4165	    {
4166	      if (integer_onep (t))
4167		return boolean_true_node;
4168	      else if (partial)
4169		{
4170		  /* We already got a simplification for the other
4171		     operand to the redistributed OR expression.  The
4172		     interesting case is when at least one is false.
4173		     Or, if both are the same, we can apply the identity
4174		     (x OR x) == x.  */
4175		  if (integer_zerop (partial))
4176		    return t;
4177		  else if (integer_zerop (t))
4178		    return partial;
4179		  else if (same_bool_result_p (t, partial))
4180		    return t;
4181		}
4182	    }
4183	}
4184    }
4185  return NULL_TREE;
4186}
4187
4188/* Try to simplify the AND of two comparisons defined by
4189   (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4190   If this can be done without constructing an intermediate value,
4191   return the resulting tree; otherwise NULL_TREE is returned.
4192   This function is deliberately asymmetric as it recurses on SSA_DEFs
4193   in the first comparison but not the second.  */
4194
4195static tree
4196and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4197		   enum tree_code code2, tree op2a, tree op2b)
4198{
4199  tree truth_type = truth_type_for (TREE_TYPE (op1a));
4200
4201  /* First check for ((x CODE1 y) AND (x CODE2 y)).  */
4202  if (operand_equal_p (op1a, op2a, 0)
4203      && operand_equal_p (op1b, op2b, 0))
4204    {
4205      /* Result will be either NULL_TREE, or a combined comparison.  */
4206      tree t = combine_comparisons (UNKNOWN_LOCATION,
4207				    TRUTH_ANDIF_EXPR, code1, code2,
4208				    truth_type, op1a, op1b);
4209      if (t)
4210	return t;
4211    }
4212
4213  /* Likewise the swapped case of the above.  */
4214  if (operand_equal_p (op1a, op2b, 0)
4215      && operand_equal_p (op1b, op2a, 0))
4216    {
4217      /* Result will be either NULL_TREE, or a combined comparison.  */
4218      tree t = combine_comparisons (UNKNOWN_LOCATION,
4219				    TRUTH_ANDIF_EXPR, code1,
4220				    swap_tree_comparison (code2),
4221				    truth_type, op1a, op1b);
4222      if (t)
4223	return t;
4224    }
4225
4226  /* If both comparisons are of the same value against constants, we might
4227     be able to merge them.  */
4228  if (operand_equal_p (op1a, op2a, 0)
4229      && TREE_CODE (op1b) == INTEGER_CST
4230      && TREE_CODE (op2b) == INTEGER_CST)
4231    {
4232      int cmp = tree_int_cst_compare (op1b, op2b);
4233
4234      /* If we have (op1a == op1b), we should either be able to
4235	 return that or FALSE, depending on whether the constant op1b
4236	 also satisfies the other comparison against op2b.  */
4237      if (code1 == EQ_EXPR)
4238	{
4239	  bool done = true;
4240	  bool val;
4241	  switch (code2)
4242	    {
4243	    case EQ_EXPR: val = (cmp == 0); break;
4244	    case NE_EXPR: val = (cmp != 0); break;
4245	    case LT_EXPR: val = (cmp < 0); break;
4246	    case GT_EXPR: val = (cmp > 0); break;
4247	    case LE_EXPR: val = (cmp <= 0); break;
4248	    case GE_EXPR: val = (cmp >= 0); break;
4249	    default: done = false;
4250	    }
4251	  if (done)
4252	    {
4253	      if (val)
4254		return fold_build2 (code1, boolean_type_node, op1a, op1b);
4255	      else
4256		return boolean_false_node;
4257	    }
4258	}
4259      /* Likewise if the second comparison is an == comparison.  */
4260      else if (code2 == EQ_EXPR)
4261	{
4262	  bool done = true;
4263	  bool val;
4264	  switch (code1)
4265	    {
4266	    case EQ_EXPR: val = (cmp == 0); break;
4267	    case NE_EXPR: val = (cmp != 0); break;
4268	    case LT_EXPR: val = (cmp > 0); break;
4269	    case GT_EXPR: val = (cmp < 0); break;
4270	    case LE_EXPR: val = (cmp >= 0); break;
4271	    case GE_EXPR: val = (cmp <= 0); break;
4272	    default: done = false;
4273	    }
4274	  if (done)
4275	    {
4276	      if (val)
4277		return fold_build2 (code2, boolean_type_node, op2a, op2b);
4278	      else
4279		return boolean_false_node;
4280	    }
4281	}
4282
4283      /* Same business with inequality tests.  */
4284      else if (code1 == NE_EXPR)
4285	{
4286	  bool val;
4287	  switch (code2)
4288	    {
4289	    case EQ_EXPR: val = (cmp != 0); break;
4290	    case NE_EXPR: val = (cmp == 0); break;
4291	    case LT_EXPR: val = (cmp >= 0); break;
4292	    case GT_EXPR: val = (cmp <= 0); break;
4293	    case LE_EXPR: val = (cmp > 0); break;
4294	    case GE_EXPR: val = (cmp < 0); break;
4295	    default:
4296	      val = false;
4297	    }
4298	  if (val)
4299	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4300	}
4301      else if (code2 == NE_EXPR)
4302	{
4303	  bool val;
4304	  switch (code1)
4305	    {
4306	    case EQ_EXPR: val = (cmp == 0); break;
4307	    case NE_EXPR: val = (cmp != 0); break;
4308	    case LT_EXPR: val = (cmp <= 0); break;
4309	    case GT_EXPR: val = (cmp >= 0); break;
4310	    case LE_EXPR: val = (cmp < 0); break;
4311	    case GE_EXPR: val = (cmp > 0); break;
4312	    default:
4313	      val = false;
4314	    }
4315	  if (val)
4316	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4317	}
4318
4319      /* Chose the more restrictive of two < or <= comparisons.  */
4320      else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4321	       && (code2 == LT_EXPR || code2 == LE_EXPR))
4322	{
4323	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4324	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4325	  else
4326	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4327	}
4328
4329      /* Likewise chose the more restrictive of two > or >= comparisons.  */
4330      else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4331	       && (code2 == GT_EXPR || code2 == GE_EXPR))
4332	{
4333	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4334	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4335	  else
4336	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4337	}
4338
4339      /* Check for singleton ranges.  */
4340      else if (cmp == 0
4341	       && ((code1 == LE_EXPR && code2 == GE_EXPR)
4342		   || (code1 == GE_EXPR && code2 == LE_EXPR)))
4343	return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4344
4345      /* Check for disjoint ranges. */
4346      else if (cmp <= 0
4347	       && (code1 == LT_EXPR || code1 == LE_EXPR)
4348	       && (code2 == GT_EXPR || code2 == GE_EXPR))
4349	return boolean_false_node;
4350      else if (cmp >= 0
4351	       && (code1 == GT_EXPR || code1 == GE_EXPR)
4352	       && (code2 == LT_EXPR || code2 == LE_EXPR))
4353	return boolean_false_node;
4354    }
4355
4356  /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4357     NAME's definition is a truth value.  See if there are any simplifications
4358     that can be done against the NAME's definition.  */
4359  if (TREE_CODE (op1a) == SSA_NAME
4360      && (code1 == NE_EXPR || code1 == EQ_EXPR)
4361      && (integer_zerop (op1b) || integer_onep (op1b)))
4362    {
4363      bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4364		     || (code1 == NE_EXPR && integer_onep (op1b)));
4365      gimple stmt = SSA_NAME_DEF_STMT (op1a);
4366      switch (gimple_code (stmt))
4367	{
4368	case GIMPLE_ASSIGN:
4369	  /* Try to simplify by copy-propagating the definition.  */
4370	  return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4371
4372	case GIMPLE_PHI:
4373	  /* If every argument to the PHI produces the same result when
4374	     ANDed with the second comparison, we win.
4375	     Do not do this unless the type is bool since we need a bool
4376	     result here anyway.  */
4377	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4378	    {
4379	      tree result = NULL_TREE;
4380	      unsigned i;
4381	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
4382		{
4383		  tree arg = gimple_phi_arg_def (stmt, i);
4384
4385		  /* If this PHI has itself as an argument, ignore it.
4386		     If all the other args produce the same result,
4387		     we're still OK.  */
4388		  if (arg == gimple_phi_result (stmt))
4389		    continue;
4390		  else if (TREE_CODE (arg) == INTEGER_CST)
4391		    {
4392		      if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4393			{
4394			  if (!result)
4395			    result = boolean_false_node;
4396			  else if (!integer_zerop (result))
4397			    return NULL_TREE;
4398			}
4399		      else if (!result)
4400			result = fold_build2 (code2, boolean_type_node,
4401					      op2a, op2b);
4402		      else if (!same_bool_comparison_p (result,
4403							code2, op2a, op2b))
4404			return NULL_TREE;
4405		    }
4406		  else if (TREE_CODE (arg) == SSA_NAME
4407			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
4408		    {
4409		      tree temp;
4410		      gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4411		      /* In simple cases we can look through PHI nodes,
4412			 but we have to be careful with loops.
4413			 See PR49073.  */
4414		      if (! dom_info_available_p (CDI_DOMINATORS)
4415			  || gimple_bb (def_stmt) == gimple_bb (stmt)
4416			  || dominated_by_p (CDI_DOMINATORS,
4417					     gimple_bb (def_stmt),
4418					     gimple_bb (stmt)))
4419			return NULL_TREE;
4420		      temp = and_var_with_comparison (arg, invert, code2,
4421						      op2a, op2b);
4422		      if (!temp)
4423			return NULL_TREE;
4424		      else if (!result)
4425			result = temp;
4426		      else if (!same_bool_result_p (result, temp))
4427			return NULL_TREE;
4428		    }
4429		  else
4430		    return NULL_TREE;
4431		}
4432	      return result;
4433	    }
4434
4435	default:
4436	  break;
4437	}
4438    }
4439  return NULL_TREE;
4440}
4441
4442/* Try to simplify the AND of two comparisons, specified by
4443   (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4444   If this can be simplified to a single expression (without requiring
4445   introducing more SSA variables to hold intermediate values),
4446   return the resulting tree.  Otherwise return NULL_TREE.
4447   If the result expression is non-null, it has boolean type.  */
4448
4449tree
4450maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4451			    enum tree_code code2, tree op2a, tree op2b)
4452{
4453  tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4454  if (t)
4455    return t;
4456  else
4457    return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4458}
4459
4460/* Helper function for or_comparisons_1:  try to simplify the OR of the
4461   ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4462   If INVERT is true, invert the value of VAR before doing the OR.
4463   Return NULL_EXPR if we can't simplify this to a single expression.  */
4464
4465static tree
4466or_var_with_comparison (tree var, bool invert,
4467			enum tree_code code2, tree op2a, tree op2b)
4468{
4469  tree t;
4470  gimple stmt = SSA_NAME_DEF_STMT (var);
4471
4472  /* We can only deal with variables whose definitions are assignments.  */
4473  if (!is_gimple_assign (stmt))
4474    return NULL_TREE;
4475
4476  /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4477     !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4478     Then we only have to consider the simpler non-inverted cases.  */
4479  if (invert)
4480    t = and_var_with_comparison_1 (stmt,
4481				   invert_tree_comparison (code2, false),
4482				   op2a, op2b);
4483  else
4484    t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4485  return canonicalize_bool (t, invert);
4486}
4487
4488/* Try to simplify the OR of the ssa variable defined by the assignment
4489   STMT with the comparison specified by (OP2A CODE2 OP2B).
4490   Return NULL_EXPR if we can't simplify this to a single expression.  */
4491
4492static tree
4493or_var_with_comparison_1 (gimple stmt,
4494			  enum tree_code code2, tree op2a, tree op2b)
4495{
4496  tree var = gimple_assign_lhs (stmt);
4497  tree true_test_var = NULL_TREE;
4498  tree false_test_var = NULL_TREE;
4499  enum tree_code innercode = gimple_assign_rhs_code (stmt);
4500
4501  /* Check for identities like (var OR (var != 0)) => true .  */
4502  if (TREE_CODE (op2a) == SSA_NAME
4503      && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4504    {
4505      if ((code2 == NE_EXPR && integer_zerop (op2b))
4506	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4507	{
4508	  true_test_var = op2a;
4509	  if (var == true_test_var)
4510	    return var;
4511	}
4512      else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4513	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4514	{
4515	  false_test_var = op2a;
4516	  if (var == false_test_var)
4517	    return boolean_true_node;
4518	}
4519    }
4520
4521  /* If the definition is a comparison, recurse on it.  */
4522  if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4523    {
4524      tree t = or_comparisons_1 (innercode,
4525				 gimple_assign_rhs1 (stmt),
4526				 gimple_assign_rhs2 (stmt),
4527				 code2,
4528				 op2a,
4529				 op2b);
4530      if (t)
4531	return t;
4532    }
4533
4534  /* If the definition is an AND or OR expression, we may be able to
4535     simplify by reassociating.  */
4536  if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4537      && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4538    {
4539      tree inner1 = gimple_assign_rhs1 (stmt);
4540      tree inner2 = gimple_assign_rhs2 (stmt);
4541      gimple s;
4542      tree t;
4543      tree partial = NULL_TREE;
4544      bool is_or = (innercode == BIT_IOR_EXPR);
4545
4546      /* Check for boolean identities that don't require recursive examination
4547	 of inner1/inner2:
4548	 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4549	 inner1 OR (inner1 AND inner2) => inner1
4550	 !inner1 OR (inner1 OR inner2) => true
4551	 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4552      */
4553      if (inner1 == true_test_var)
4554	return (is_or ? var : inner1);
4555      else if (inner2 == true_test_var)
4556	return (is_or ? var : inner2);
4557      else if (inner1 == false_test_var)
4558	return (is_or
4559		? boolean_true_node
4560		: or_var_with_comparison (inner2, false, code2, op2a, op2b));
4561      else if (inner2 == false_test_var)
4562	return (is_or
4563		? boolean_true_node
4564		: or_var_with_comparison (inner1, false, code2, op2a, op2b));
4565
4566      /* Next, redistribute/reassociate the OR across the inner tests.
4567	 Compute the first partial result, (inner1 OR (op2a code op2b))  */
4568      if (TREE_CODE (inner1) == SSA_NAME
4569	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4570	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4571	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4572					     gimple_assign_rhs1 (s),
4573					     gimple_assign_rhs2 (s),
4574					     code2, op2a, op2b)))
4575	{
4576	  /* Handle the OR case, where we are reassociating:
4577	     (inner1 OR inner2) OR (op2a code2 op2b)
4578	     => (t OR inner2)
4579	     If the partial result t is a constant, we win.  Otherwise
4580	     continue on to try reassociating with the other inner test.  */
4581	  if (is_or)
4582	    {
4583	      if (integer_onep (t))
4584		return boolean_true_node;
4585	      else if (integer_zerop (t))
4586		return inner2;
4587	    }
4588
4589	  /* Handle the AND case, where we are redistributing:
4590	     (inner1 AND inner2) OR (op2a code2 op2b)
4591	     => (t AND (inner2 OR (op2a code op2b)))  */
4592	  else if (integer_zerop (t))
4593	    return boolean_false_node;
4594
4595	  /* Save partial result for later.  */
4596	  partial = t;
4597	}
4598
4599      /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4600      if (TREE_CODE (inner2) == SSA_NAME
4601	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4602	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4603	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4604					     gimple_assign_rhs1 (s),
4605					     gimple_assign_rhs2 (s),
4606					     code2, op2a, op2b)))
4607	{
4608	  /* Handle the OR case, where we are reassociating:
4609	     (inner1 OR inner2) OR (op2a code2 op2b)
4610	     => (inner1 OR t)
4611	     => (t OR partial)  */
4612	  if (is_or)
4613	    {
4614	      if (integer_zerop (t))
4615		return inner1;
4616	      else if (integer_onep (t))
4617		return boolean_true_node;
4618	      /* If both are the same, we can apply the identity
4619		 (x OR x) == x.  */
4620	      else if (partial && same_bool_result_p (t, partial))
4621		return t;
4622	    }
4623
4624	  /* Handle the AND case, where we are redistributing:
4625	     (inner1 AND inner2) OR (op2a code2 op2b)
4626	     => (t AND (inner1 OR (op2a code2 op2b)))
4627	     => (t AND partial)  */
4628	  else
4629	    {
4630	      if (integer_zerop (t))
4631		return boolean_false_node;
4632	      else if (partial)
4633		{
4634		  /* We already got a simplification for the other
4635		     operand to the redistributed AND expression.  The
4636		     interesting case is when at least one is true.
4637		     Or, if both are the same, we can apply the identity
4638		     (x AND x) == x.  */
4639		  if (integer_onep (partial))
4640		    return t;
4641		  else if (integer_onep (t))
4642		    return partial;
4643		  else if (same_bool_result_p (t, partial))
4644		    return t;
4645		}
4646	    }
4647	}
4648    }
4649  return NULL_TREE;
4650}
4651
4652/* Try to simplify the OR of two comparisons defined by
4653   (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4654   If this can be done without constructing an intermediate value,
4655   return the resulting tree; otherwise NULL_TREE is returned.
4656   This function is deliberately asymmetric as it recurses on SSA_DEFs
4657   in the first comparison but not the second.  */
4658
4659static tree
4660or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4661		  enum tree_code code2, tree op2a, tree op2b)
4662{
4663  tree truth_type = truth_type_for (TREE_TYPE (op1a));
4664
4665  /* First check for ((x CODE1 y) OR (x CODE2 y)).  */
4666  if (operand_equal_p (op1a, op2a, 0)
4667      && operand_equal_p (op1b, op2b, 0))
4668    {
4669      /* Result will be either NULL_TREE, or a combined comparison.  */
4670      tree t = combine_comparisons (UNKNOWN_LOCATION,
4671				    TRUTH_ORIF_EXPR, code1, code2,
4672				    truth_type, op1a, op1b);
4673      if (t)
4674	return t;
4675    }
4676
4677  /* Likewise the swapped case of the above.  */
4678  if (operand_equal_p (op1a, op2b, 0)
4679      && operand_equal_p (op1b, op2a, 0))
4680    {
4681      /* Result will be either NULL_TREE, or a combined comparison.  */
4682      tree t = combine_comparisons (UNKNOWN_LOCATION,
4683				    TRUTH_ORIF_EXPR, code1,
4684				    swap_tree_comparison (code2),
4685				    truth_type, op1a, op1b);
4686      if (t)
4687	return t;
4688    }
4689
4690  /* If both comparisons are of the same value against constants, we might
4691     be able to merge them.  */
4692  if (operand_equal_p (op1a, op2a, 0)
4693      && TREE_CODE (op1b) == INTEGER_CST
4694      && TREE_CODE (op2b) == INTEGER_CST)
4695    {
4696      int cmp = tree_int_cst_compare (op1b, op2b);
4697
4698      /* If we have (op1a != op1b), we should either be able to
4699	 return that or TRUE, depending on whether the constant op1b
4700	 also satisfies the other comparison against op2b.  */
4701      if (code1 == NE_EXPR)
4702	{
4703	  bool done = true;
4704	  bool val;
4705	  switch (code2)
4706	    {
4707	    case EQ_EXPR: val = (cmp == 0); break;
4708	    case NE_EXPR: val = (cmp != 0); break;
4709	    case LT_EXPR: val = (cmp < 0); break;
4710	    case GT_EXPR: val = (cmp > 0); break;
4711	    case LE_EXPR: val = (cmp <= 0); break;
4712	    case GE_EXPR: val = (cmp >= 0); break;
4713	    default: done = false;
4714	    }
4715	  if (done)
4716	    {
4717	      if (val)
4718		return boolean_true_node;
4719	      else
4720		return fold_build2 (code1, boolean_type_node, op1a, op1b);
4721	    }
4722	}
4723      /* Likewise if the second comparison is a != comparison.  */
4724      else if (code2 == NE_EXPR)
4725	{
4726	  bool done = true;
4727	  bool val;
4728	  switch (code1)
4729	    {
4730	    case EQ_EXPR: val = (cmp == 0); break;
4731	    case NE_EXPR: val = (cmp != 0); break;
4732	    case LT_EXPR: val = (cmp > 0); break;
4733	    case GT_EXPR: val = (cmp < 0); break;
4734	    case LE_EXPR: val = (cmp >= 0); break;
4735	    case GE_EXPR: val = (cmp <= 0); break;
4736	    default: done = false;
4737	    }
4738	  if (done)
4739	    {
4740	      if (val)
4741		return boolean_true_node;
4742	      else
4743		return fold_build2 (code2, boolean_type_node, op2a, op2b);
4744	    }
4745	}
4746
4747      /* See if an equality test is redundant with the other comparison.  */
4748      else if (code1 == EQ_EXPR)
4749	{
4750	  bool val;
4751	  switch (code2)
4752	    {
4753	    case EQ_EXPR: val = (cmp == 0); break;
4754	    case NE_EXPR: val = (cmp != 0); break;
4755	    case LT_EXPR: val = (cmp < 0); break;
4756	    case GT_EXPR: val = (cmp > 0); break;
4757	    case LE_EXPR: val = (cmp <= 0); break;
4758	    case GE_EXPR: val = (cmp >= 0); break;
4759	    default:
4760	      val = false;
4761	    }
4762	  if (val)
4763	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4764	}
4765      else if (code2 == EQ_EXPR)
4766	{
4767	  bool val;
4768	  switch (code1)
4769	    {
4770	    case EQ_EXPR: val = (cmp == 0); break;
4771	    case NE_EXPR: val = (cmp != 0); break;
4772	    case LT_EXPR: val = (cmp > 0); break;
4773	    case GT_EXPR: val = (cmp < 0); break;
4774	    case LE_EXPR: val = (cmp >= 0); break;
4775	    case GE_EXPR: val = (cmp <= 0); break;
4776	    default:
4777	      val = false;
4778	    }
4779	  if (val)
4780	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4781	}
4782
4783      /* Chose the less restrictive of two < or <= comparisons.  */
4784      else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4785	       && (code2 == LT_EXPR || code2 == LE_EXPR))
4786	{
4787	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4788	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4789	  else
4790	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4791	}
4792
4793      /* Likewise chose the less restrictive of two > or >= comparisons.  */
4794      else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4795	       && (code2 == GT_EXPR || code2 == GE_EXPR))
4796	{
4797	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4798	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4799	  else
4800	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4801	}
4802
4803      /* Check for singleton ranges.  */
4804      else if (cmp == 0
4805	       && ((code1 == LT_EXPR && code2 == GT_EXPR)
4806		   || (code1 == GT_EXPR && code2 == LT_EXPR)))
4807	return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4808
4809      /* Check for less/greater pairs that don't restrict the range at all.  */
4810      else if (cmp >= 0
4811	       && (code1 == LT_EXPR || code1 == LE_EXPR)
4812	       && (code2 == GT_EXPR || code2 == GE_EXPR))
4813	return boolean_true_node;
4814      else if (cmp <= 0
4815	       && (code1 == GT_EXPR || code1 == GE_EXPR)
4816	       && (code2 == LT_EXPR || code2 == LE_EXPR))
4817	return boolean_true_node;
4818    }
4819
4820  /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4821     NAME's definition is a truth value.  See if there are any simplifications
4822     that can be done against the NAME's definition.  */
4823  if (TREE_CODE (op1a) == SSA_NAME
4824      && (code1 == NE_EXPR || code1 == EQ_EXPR)
4825      && (integer_zerop (op1b) || integer_onep (op1b)))
4826    {
4827      bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4828		     || (code1 == NE_EXPR && integer_onep (op1b)));
4829      gimple stmt = SSA_NAME_DEF_STMT (op1a);
4830      switch (gimple_code (stmt))
4831	{
4832	case GIMPLE_ASSIGN:
4833	  /* Try to simplify by copy-propagating the definition.  */
4834	  return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4835
4836	case GIMPLE_PHI:
4837	  /* If every argument to the PHI produces the same result when
4838	     ORed with the second comparison, we win.
4839	     Do not do this unless the type is bool since we need a bool
4840	     result here anyway.  */
4841	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4842	    {
4843	      tree result = NULL_TREE;
4844	      unsigned i;
4845	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
4846		{
4847		  tree arg = gimple_phi_arg_def (stmt, i);
4848
4849		  /* If this PHI has itself as an argument, ignore it.
4850		     If all the other args produce the same result,
4851		     we're still OK.  */
4852		  if (arg == gimple_phi_result (stmt))
4853		    continue;
4854		  else if (TREE_CODE (arg) == INTEGER_CST)
4855		    {
4856		      if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4857			{
4858			  if (!result)
4859			    result = boolean_true_node;
4860			  else if (!integer_onep (result))
4861			    return NULL_TREE;
4862			}
4863		      else if (!result)
4864			result = fold_build2 (code2, boolean_type_node,
4865					      op2a, op2b);
4866		      else if (!same_bool_comparison_p (result,
4867							code2, op2a, op2b))
4868			return NULL_TREE;
4869		    }
4870		  else if (TREE_CODE (arg) == SSA_NAME
4871			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
4872		    {
4873		      tree temp;
4874		      gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4875		      /* In simple cases we can look through PHI nodes,
4876			 but we have to be careful with loops.
4877			 See PR49073.  */
4878		      if (! dom_info_available_p (CDI_DOMINATORS)
4879			  || gimple_bb (def_stmt) == gimple_bb (stmt)
4880			  || dominated_by_p (CDI_DOMINATORS,
4881					     gimple_bb (def_stmt),
4882					     gimple_bb (stmt)))
4883			return NULL_TREE;
4884		      temp = or_var_with_comparison (arg, invert, code2,
4885						     op2a, op2b);
4886		      if (!temp)
4887			return NULL_TREE;
4888		      else if (!result)
4889			result = temp;
4890		      else if (!same_bool_result_p (result, temp))
4891			return NULL_TREE;
4892		    }
4893		  else
4894		    return NULL_TREE;
4895		}
4896	      return result;
4897	    }
4898
4899	default:
4900	  break;
4901	}
4902    }
4903  return NULL_TREE;
4904}
4905
4906/* Try to simplify the OR of two comparisons, specified by
4907   (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4908   If this can be simplified to a single expression (without requiring
4909   introducing more SSA variables to hold intermediate values),
4910   return the resulting tree.  Otherwise return NULL_TREE.
4911   If the result expression is non-null, it has boolean type.  */
4912
4913tree
4914maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4915			   enum tree_code code2, tree op2a, tree op2b)
4916{
4917  tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4918  if (t)
4919    return t;
4920  else
4921    return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4922}
4923
4924
4925/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4926
4927   Either NULL_TREE, a simplified but non-constant or a constant
4928   is returned.
4929
4930   ???  This should go into a gimple-fold-inline.h file to be eventually
4931   privatized with the single valueize function used in the various TUs
4932   to avoid the indirect function call overhead.  */
4933
4934tree
4935gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4936				tree (*gvalueize) (tree))
4937{
4938  code_helper rcode;
4939  tree ops[3] = {};
4940  /* ???  The SSA propagators do not correctly deal with following SSA use-def
4941     edges if there are intermediate VARYING defs.  For this reason
4942     do not follow SSA edges here even though SCCVN can technically
4943     just deal fine with that.  */
4944  if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize)
4945      && rcode.is_tree_code ()
4946      && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4947	  || ((tree_code) rcode) == ADDR_EXPR)
4948      && is_gimple_val (ops[0]))
4949    {
4950      tree res = ops[0];
4951      if (dump_file && dump_flags & TDF_DETAILS)
4952	{
4953	  fprintf (dump_file, "Match-and-simplified ");
4954	  print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4955	  fprintf (dump_file, " to ");
4956	  print_generic_expr (dump_file, res, 0);
4957	  fprintf (dump_file, "\n");
4958	}
4959      return res;
4960    }
4961
4962  location_t loc = gimple_location (stmt);
4963  switch (gimple_code (stmt))
4964    {
4965    case GIMPLE_ASSIGN:
4966      {
4967        enum tree_code subcode = gimple_assign_rhs_code (stmt);
4968
4969        switch (get_gimple_rhs_class (subcode))
4970          {
4971          case GIMPLE_SINGLE_RHS:
4972            {
4973              tree rhs = gimple_assign_rhs1 (stmt);
4974              enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4975
4976              if (TREE_CODE (rhs) == SSA_NAME)
4977                {
4978                  /* If the RHS is an SSA_NAME, return its known constant value,
4979                     if any.  */
4980                  return (*valueize) (rhs);
4981                }
4982	      /* Handle propagating invariant addresses into address
4983		 operations.  */
4984	      else if (TREE_CODE (rhs) == ADDR_EXPR
4985		       && !is_gimple_min_invariant (rhs))
4986		{
4987		  HOST_WIDE_INT offset = 0;
4988		  tree base;
4989		  base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4990							  &offset,
4991							  valueize);
4992		  if (base
4993		      && (CONSTANT_CLASS_P (base)
4994			  || decl_address_invariant_p (base)))
4995		    return build_invariant_address (TREE_TYPE (rhs),
4996						    base, offset);
4997		}
4998	      else if (TREE_CODE (rhs) == CONSTRUCTOR
4999		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5000		       && (CONSTRUCTOR_NELTS (rhs)
5001			   == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5002		{
5003		  unsigned i;
5004		  tree val, *vec;
5005
5006		  vec = XALLOCAVEC (tree,
5007				    TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5008		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5009		    {
5010		      val = (*valueize) (val);
5011		      if (TREE_CODE (val) == INTEGER_CST
5012			  || TREE_CODE (val) == REAL_CST
5013			  || TREE_CODE (val) == FIXED_CST)
5014			vec[i] = val;
5015		      else
5016			return NULL_TREE;
5017		    }
5018
5019		  return build_vector (TREE_TYPE (rhs), vec);
5020		}
5021	      if (subcode == OBJ_TYPE_REF)
5022		{
5023		  tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5024		  /* If callee is constant, we can fold away the wrapper.  */
5025		  if (is_gimple_min_invariant (val))
5026		    return val;
5027		}
5028
5029              if (kind == tcc_reference)
5030		{
5031		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5032		       || TREE_CODE (rhs) == REALPART_EXPR
5033		       || TREE_CODE (rhs) == IMAGPART_EXPR)
5034		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5035		    {
5036		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5037		      return fold_unary_loc (EXPR_LOCATION (rhs),
5038					     TREE_CODE (rhs),
5039					     TREE_TYPE (rhs), val);
5040		    }
5041		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
5042			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5043		    {
5044		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5045		      return fold_ternary_loc (EXPR_LOCATION (rhs),
5046					       TREE_CODE (rhs),
5047					       TREE_TYPE (rhs), val,
5048					       TREE_OPERAND (rhs, 1),
5049					       TREE_OPERAND (rhs, 2));
5050		    }
5051		  else if (TREE_CODE (rhs) == MEM_REF
5052			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5053		    {
5054		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5055		      if (TREE_CODE (val) == ADDR_EXPR
5056			  && is_gimple_min_invariant (val))
5057			{
5058			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5059						  unshare_expr (val),
5060						  TREE_OPERAND (rhs, 1));
5061			  if (tem)
5062			    rhs = tem;
5063			}
5064		    }
5065		  return fold_const_aggregate_ref_1 (rhs, valueize);
5066		}
5067              else if (kind == tcc_declaration)
5068                return get_symbol_constant_value (rhs);
5069              return rhs;
5070            }
5071
5072          case GIMPLE_UNARY_RHS:
5073	    return NULL_TREE;
5074
5075          case GIMPLE_BINARY_RHS:
5076            {
5077              /* Handle binary operators that can appear in GIMPLE form.  */
5078              tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5079              tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5080
5081	      /* Translate &x + CST into an invariant form suitable for
5082	         further propagation.  */
5083	      if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5084		  && TREE_CODE (op0) == ADDR_EXPR
5085		  && TREE_CODE (op1) == INTEGER_CST)
5086		{
5087		  tree off = fold_convert (ptr_type_node, op1);
5088		  return build_fold_addr_expr_loc
5089			   (loc,
5090			    fold_build2 (MEM_REF,
5091					 TREE_TYPE (TREE_TYPE (op0)),
5092					 unshare_expr (op0), off));
5093		}
5094
5095              return fold_binary_loc (loc, subcode,
5096				      gimple_expr_type (stmt), op0, op1);
5097            }
5098
5099          case GIMPLE_TERNARY_RHS:
5100            {
5101              /* Handle ternary operators that can appear in GIMPLE form.  */
5102              tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5103              tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5104              tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5105
5106	      /* Fold embedded expressions in ternary codes.  */
5107	      if ((subcode == COND_EXPR
5108		   || subcode == VEC_COND_EXPR)
5109		  && COMPARISON_CLASS_P (op0))
5110		{
5111		  tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5112		  tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5113		  tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5114					      TREE_TYPE (op0), op00, op01);
5115		  if (tem)
5116		    op0 = tem;
5117		}
5118
5119              return fold_ternary_loc (loc, subcode,
5120				       gimple_expr_type (stmt), op0, op1, op2);
5121            }
5122
5123          default:
5124            gcc_unreachable ();
5125          }
5126      }
5127
5128    case GIMPLE_CALL:
5129      {
5130	tree fn;
5131	gcall *call_stmt = as_a <gcall *> (stmt);
5132
5133	if (gimple_call_internal_p (stmt))
5134	  {
5135	    enum tree_code subcode = ERROR_MARK;
5136	    switch (gimple_call_internal_fn (stmt))
5137	      {
5138	      case IFN_UBSAN_CHECK_ADD:
5139		subcode = PLUS_EXPR;
5140		break;
5141	      case IFN_UBSAN_CHECK_SUB:
5142		subcode = MINUS_EXPR;
5143		break;
5144	      case IFN_UBSAN_CHECK_MUL:
5145		subcode = MULT_EXPR;
5146		break;
5147	      default:
5148		return NULL_TREE;
5149	      }
5150	    tree arg0 = gimple_call_arg (stmt, 0);
5151	    tree arg1 = gimple_call_arg (stmt, 1);
5152	    tree op0 = (*valueize) (arg0);
5153	    tree op1 = (*valueize) (arg1);
5154
5155	    if (TREE_CODE (op0) != INTEGER_CST
5156		|| TREE_CODE (op1) != INTEGER_CST)
5157	      {
5158		switch (subcode)
5159		  {
5160		  case MULT_EXPR:
5161		    /* x * 0 = 0 * x = 0 without overflow.  */
5162		    if (integer_zerop (op0) || integer_zerop (op1))
5163		      return build_zero_cst (TREE_TYPE (arg0));
5164		    break;
5165		  case MINUS_EXPR:
5166		    /* y - y = 0 without overflow.  */
5167		    if (operand_equal_p (op0, op1, 0))
5168		      return build_zero_cst (TREE_TYPE (arg0));
5169		    break;
5170		  default:
5171		    break;
5172		  }
5173	      }
5174	    tree res
5175	      = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5176	    if (res
5177		&& TREE_CODE (res) == INTEGER_CST
5178		&& !TREE_OVERFLOW (res))
5179	      return res;
5180	    return NULL_TREE;
5181	  }
5182
5183	fn = (*valueize) (gimple_call_fn (stmt));
5184	if (TREE_CODE (fn) == ADDR_EXPR
5185	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5186	    && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5187	    && gimple_builtin_call_types_compatible_p (stmt,
5188						       TREE_OPERAND (fn, 0)))
5189	  {
5190	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5191	    tree retval;
5192	    unsigned i;
5193	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
5194	      args[i] = (*valueize) (gimple_call_arg (stmt, i));
5195	    retval = fold_builtin_call_array (loc,
5196					 gimple_call_return_type (call_stmt),
5197					 fn, gimple_call_num_args (stmt), args);
5198	    if (retval)
5199	      {
5200		/* fold_call_expr wraps the result inside a NOP_EXPR.  */
5201		STRIP_NOPS (retval);
5202		retval = fold_convert (gimple_call_return_type (call_stmt),
5203				       retval);
5204	      }
5205	    return retval;
5206	  }
5207	return NULL_TREE;
5208      }
5209
5210    default:
5211      return NULL_TREE;
5212    }
5213}
5214
5215/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5216   Returns NULL_TREE if folding to a constant is not possible, otherwise
5217   returns a constant according to is_gimple_min_invariant.  */
5218
5219tree
5220gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5221{
5222  tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5223  if (res && is_gimple_min_invariant (res))
5224    return res;
5225  return NULL_TREE;
5226}
5227
5228
5229/* The following set of functions are supposed to fold references using
5230   their constant initializers.  */
5231
5232/* See if we can find constructor defining value of BASE.
5233   When we know the consructor with constant offset (such as
5234   base is array[40] and we do know constructor of array), then
5235   BIT_OFFSET is adjusted accordingly.
5236
5237   As a special case, return error_mark_node when constructor
5238   is not explicitly available, but it is known to be zero
5239   such as 'static const int a;'.  */
5240static tree
5241get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5242		      tree (*valueize)(tree))
5243{
5244  HOST_WIDE_INT bit_offset2, size, max_size;
5245  if (TREE_CODE (base) == MEM_REF)
5246    {
5247      if (!integer_zerop (TREE_OPERAND (base, 1)))
5248	{
5249	  if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5250	    return NULL_TREE;
5251	  *bit_offset += (mem_ref_offset (base).to_short_addr ()
5252			  * BITS_PER_UNIT);
5253	}
5254
5255      if (valueize
5256	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5257	base = valueize (TREE_OPERAND (base, 0));
5258      if (!base || TREE_CODE (base) != ADDR_EXPR)
5259        return NULL_TREE;
5260      base = TREE_OPERAND (base, 0);
5261    }
5262
5263  /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
5264     DECL_INITIAL.  If BASE is a nested reference into another
5265     ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5266     the inner reference.  */
5267  switch (TREE_CODE (base))
5268    {
5269    case VAR_DECL:
5270    case CONST_DECL:
5271      {
5272	tree init = ctor_for_folding (base);
5273
5274	/* Our semantic is exact opposite of ctor_for_folding;
5275	   NULL means unknown, while error_mark_node is 0.  */
5276	if (init == error_mark_node)
5277	  return NULL_TREE;
5278	if (!init)
5279	  return error_mark_node;
5280	return init;
5281      }
5282
5283    case ARRAY_REF:
5284    case COMPONENT_REF:
5285      base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5286      if (max_size == -1 || size != max_size)
5287	return NULL_TREE;
5288      *bit_offset +=  bit_offset2;
5289      return get_base_constructor (base, bit_offset, valueize);
5290
5291    case STRING_CST:
5292    case CONSTRUCTOR:
5293      return base;
5294
5295    default:
5296      return NULL_TREE;
5297    }
5298}
5299
5300/* CTOR is CONSTRUCTOR of an array type.  Fold reference of type TYPE and size
5301   SIZE to the memory at bit OFFSET.  */
5302
5303static tree
5304fold_array_ctor_reference (tree type, tree ctor,
5305			   unsigned HOST_WIDE_INT offset,
5306			   unsigned HOST_WIDE_INT size,
5307			   tree from_decl)
5308{
5309  unsigned HOST_WIDE_INT cnt;
5310  tree cfield, cval;
5311  offset_int low_bound;
5312  offset_int elt_size;
5313  offset_int index, max_index;
5314  offset_int access_index;
5315  tree domain_type = NULL_TREE, index_type = NULL_TREE;
5316  HOST_WIDE_INT inner_offset;
5317
5318  /* Compute low bound and elt size.  */
5319  if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5320    domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5321  if (domain_type && TYPE_MIN_VALUE (domain_type))
5322    {
5323      /* Static constructors for variably sized objects makes no sense.  */
5324      gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5325      index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5326      low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5327    }
5328  else
5329    low_bound = 0;
5330  /* Static constructors for variably sized objects makes no sense.  */
5331  gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5332	      == INTEGER_CST);
5333  elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5334
5335  /* We can handle only constantly sized accesses that are known to not
5336     be larger than size of array element.  */
5337  if (!TYPE_SIZE_UNIT (type)
5338      || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5339      || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5340      || elt_size == 0)
5341    return NULL_TREE;
5342
5343  /* Compute the array index we look for.  */
5344  access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5345				 elt_size);
5346  access_index += low_bound;
5347  if (index_type)
5348    access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5349			    TYPE_SIGN (index_type));
5350
5351  /* And offset within the access.  */
5352  inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5353
5354  /* See if the array field is large enough to span whole access.  We do not
5355     care to fold accesses spanning multiple array indexes.  */
5356  if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5357    return NULL_TREE;
5358
5359  index = low_bound - 1;
5360  if (index_type)
5361    index = wi::ext (index, TYPE_PRECISION (index_type),
5362		     TYPE_SIGN (index_type));
5363
5364  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5365    {
5366      /* Array constructor might explicitely set index, or specify range
5367	 or leave index NULL meaning that it is next index after previous
5368	 one.  */
5369      if (cfield)
5370	{
5371	  if (TREE_CODE (cfield) == INTEGER_CST)
5372	    max_index = index = wi::to_offset (cfield);
5373	  else
5374	    {
5375	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5376	      index = wi::to_offset (TREE_OPERAND (cfield, 0));
5377	      max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5378	    }
5379	}
5380      else
5381	{
5382	  index += 1;
5383	  if (index_type)
5384	    index = wi::ext (index, TYPE_PRECISION (index_type),
5385			     TYPE_SIGN (index_type));
5386	  max_index = index;
5387	}
5388
5389      /* Do we have match?  */
5390      if (wi::cmpu (access_index, index) >= 0
5391	  && wi::cmpu (access_index, max_index) <= 0)
5392	return fold_ctor_reference (type, cval, inner_offset, size,
5393				    from_decl);
5394    }
5395  /* When memory is not explicitely mentioned in constructor,
5396     it is 0 (or out of range).  */
5397  return build_zero_cst (type);
5398}
5399
5400/* CTOR is CONSTRUCTOR of an aggregate or vector.
5401   Fold reference of type TYPE and size SIZE to the memory at bit OFFSET.  */
5402
5403static tree
5404fold_nonarray_ctor_reference (tree type, tree ctor,
5405			      unsigned HOST_WIDE_INT offset,
5406			      unsigned HOST_WIDE_INT size,
5407			      tree from_decl)
5408{
5409  unsigned HOST_WIDE_INT cnt;
5410  tree cfield, cval;
5411
5412  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5413			    cval)
5414    {
5415      tree byte_offset = DECL_FIELD_OFFSET (cfield);
5416      tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5417      tree field_size = DECL_SIZE (cfield);
5418      offset_int bitoffset;
5419      offset_int bitoffset_end, access_end;
5420
5421      /* Variable sized objects in static constructors makes no sense,
5422	 but field_size can be NULL for flexible array members.  */
5423      gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5424		  && TREE_CODE (byte_offset) == INTEGER_CST
5425		  && (field_size != NULL_TREE
5426		      ? TREE_CODE (field_size) == INTEGER_CST
5427		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5428
5429      /* Compute bit offset of the field.  */
5430      bitoffset = (wi::to_offset (field_offset)
5431		   + wi::lshift (wi::to_offset (byte_offset),
5432				 LOG2_BITS_PER_UNIT));
5433      /* Compute bit offset where the field ends.  */
5434      if (field_size != NULL_TREE)
5435	bitoffset_end = bitoffset + wi::to_offset (field_size);
5436      else
5437	bitoffset_end = 0;
5438
5439      access_end = offset_int (offset) + size;
5440
5441      /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5442	 [BITOFFSET, BITOFFSET_END)?  */
5443      if (wi::cmps (access_end, bitoffset) > 0
5444	  && (field_size == NULL_TREE
5445	      || wi::lts_p (offset, bitoffset_end)))
5446	{
5447	  offset_int inner_offset = offset_int (offset) - bitoffset;
5448	  /* We do have overlap.  Now see if field is large enough to
5449	     cover the access.  Give up for accesses spanning multiple
5450	     fields.  */
5451	  if (wi::cmps (access_end, bitoffset_end) > 0)
5452	    return NULL_TREE;
5453	  if (wi::lts_p (offset, bitoffset))
5454	    return NULL_TREE;
5455	  return fold_ctor_reference (type, cval,
5456				      inner_offset.to_uhwi (), size,
5457				      from_decl);
5458	}
5459    }
5460  /* When memory is not explicitely mentioned in constructor, it is 0.  */
5461  return build_zero_cst (type);
5462}
5463
5464/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5465   to the memory at bit OFFSET.  */
5466
5467tree
5468fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5469		     unsigned HOST_WIDE_INT size, tree from_decl)
5470{
5471  tree ret;
5472
5473  /* We found the field with exact match.  */
5474  if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5475      && !offset)
5476    return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5477
5478  /* We are at the end of walk, see if we can view convert the
5479     result.  */
5480  if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5481      /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
5482      && !compare_tree_int (TYPE_SIZE (type), size)
5483      && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5484    {
5485      ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5486      ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5487      if (ret)
5488	STRIP_USELESS_TYPE_CONVERSION (ret);
5489      return ret;
5490    }
5491  /* For constants and byte-aligned/sized reads try to go through
5492     native_encode/interpret.  */
5493  if (CONSTANT_CLASS_P (ctor)
5494      && BITS_PER_UNIT == 8
5495      && offset % BITS_PER_UNIT == 0
5496      && size % BITS_PER_UNIT == 0
5497      && size <= MAX_BITSIZE_MODE_ANY_MODE)
5498    {
5499      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5500      if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5501			      offset / BITS_PER_UNIT) > 0)
5502	return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5503    }
5504  if (TREE_CODE (ctor) == CONSTRUCTOR)
5505    {
5506
5507      if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5508	  || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5509	return fold_array_ctor_reference (type, ctor, offset, size,
5510					  from_decl);
5511      else
5512	return fold_nonarray_ctor_reference (type, ctor, offset, size,
5513					     from_decl);
5514    }
5515
5516  return NULL_TREE;
5517}
5518
5519/* Return the tree representing the element referenced by T if T is an
5520   ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5521   names using VALUEIZE.  Return NULL_TREE otherwise.  */
5522
5523tree
5524fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5525{
5526  tree ctor, idx, base;
5527  HOST_WIDE_INT offset, size, max_size;
5528  tree tem;
5529
5530  if (TREE_THIS_VOLATILE (t))
5531    return NULL_TREE;
5532
5533  if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5534    return get_symbol_constant_value (t);
5535
5536  tem = fold_read_from_constant_string (t);
5537  if (tem)
5538    return tem;
5539
5540  switch (TREE_CODE (t))
5541    {
5542    case ARRAY_REF:
5543    case ARRAY_RANGE_REF:
5544      /* Constant indexes are handled well by get_base_constructor.
5545	 Only special case variable offsets.
5546	 FIXME: This code can't handle nested references with variable indexes
5547	 (they will be handled only by iteration of ccp).  Perhaps we can bring
5548	 get_ref_base_and_extent here and make it use a valueize callback.  */
5549      if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5550	  && valueize
5551	  && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5552	  && TREE_CODE (idx) == INTEGER_CST)
5553	{
5554	  tree low_bound, unit_size;
5555
5556	  /* If the resulting bit-offset is constant, track it.  */
5557	  if ((low_bound = array_ref_low_bound (t),
5558	       TREE_CODE (low_bound) == INTEGER_CST)
5559	      && (unit_size = array_ref_element_size (t),
5560		  tree_fits_uhwi_p (unit_size)))
5561	    {
5562	      offset_int woffset
5563		= wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5564			    TYPE_PRECISION (TREE_TYPE (idx)));
5565
5566	      if (wi::fits_shwi_p (woffset))
5567		{
5568		  offset = woffset.to_shwi ();
5569		  /* TODO: This code seems wrong, multiply then check
5570		     to see if it fits.  */
5571		  offset *= tree_to_uhwi (unit_size);
5572		  offset *= BITS_PER_UNIT;
5573
5574		  base = TREE_OPERAND (t, 0);
5575		  ctor = get_base_constructor (base, &offset, valueize);
5576		  /* Empty constructor.  Always fold to 0.  */
5577		  if (ctor == error_mark_node)
5578		    return build_zero_cst (TREE_TYPE (t));
5579		  /* Out of bound array access.  Value is undefined,
5580		     but don't fold.  */
5581		  if (offset < 0)
5582		    return NULL_TREE;
5583		  /* We can not determine ctor.  */
5584		  if (!ctor)
5585		    return NULL_TREE;
5586		  return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5587					      tree_to_uhwi (unit_size)
5588					      * BITS_PER_UNIT,
5589					      base);
5590		}
5591	    }
5592	}
5593      /* Fallthru.  */
5594
5595    case COMPONENT_REF:
5596    case BIT_FIELD_REF:
5597    case TARGET_MEM_REF:
5598    case MEM_REF:
5599      base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5600      ctor = get_base_constructor (base, &offset, valueize);
5601
5602      /* Empty constructor.  Always fold to 0.  */
5603      if (ctor == error_mark_node)
5604	return build_zero_cst (TREE_TYPE (t));
5605      /* We do not know precise address.  */
5606      if (max_size == -1 || max_size != size)
5607	return NULL_TREE;
5608      /* We can not determine ctor.  */
5609      if (!ctor)
5610	return NULL_TREE;
5611
5612      /* Out of bound array access.  Value is undefined, but don't fold.  */
5613      if (offset < 0)
5614	return NULL_TREE;
5615
5616      return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5617				  base);
5618
5619    case REALPART_EXPR:
5620    case IMAGPART_EXPR:
5621      {
5622	tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5623	if (c && TREE_CODE (c) == COMPLEX_CST)
5624	  return fold_build1_loc (EXPR_LOCATION (t),
5625			      TREE_CODE (t), TREE_TYPE (t), c);
5626	break;
5627      }
5628
5629    default:
5630      break;
5631    }
5632
5633  return NULL_TREE;
5634}
5635
5636tree
5637fold_const_aggregate_ref (tree t)
5638{
5639  return fold_const_aggregate_ref_1 (t, NULL);
5640}
5641
5642/* Lookup virtual method with index TOKEN in a virtual table V
5643   at OFFSET.
5644   Set CAN_REFER if non-NULL to false if method
5645   is not referable or if the virtual table is ill-formed (such as rewriten
5646   by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
5647
5648tree
5649gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5650				   tree v,
5651				   unsigned HOST_WIDE_INT offset,
5652				   bool *can_refer)
5653{
5654  tree vtable = v, init, fn;
5655  unsigned HOST_WIDE_INT size;
5656  unsigned HOST_WIDE_INT elt_size, access_index;
5657  tree domain_type;
5658
5659  if (can_refer)
5660    *can_refer = true;
5661
5662  /* First of all double check we have virtual table.  */
5663  if (TREE_CODE (v) != VAR_DECL
5664      || !DECL_VIRTUAL_P (v))
5665    {
5666      /* Pass down that we lost track of the target.  */
5667      if (can_refer)
5668	*can_refer = false;
5669      return NULL_TREE;
5670    }
5671
5672  init = ctor_for_folding (v);
5673
5674  /* The virtual tables should always be born with constructors
5675     and we always should assume that they are avaialble for
5676     folding.  At the moment we do not stream them in all cases,
5677     but it should never happen that ctor seem unreachable.  */
5678  gcc_assert (init);
5679  if (init == error_mark_node)
5680    {
5681      gcc_assert (in_lto_p);
5682      /* Pass down that we lost track of the target.  */
5683      if (can_refer)
5684	*can_refer = false;
5685      return NULL_TREE;
5686    }
5687  gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5688  size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5689  offset *= BITS_PER_UNIT;
5690  offset += token * size;
5691
5692  /* Lookup the value in the constructor that is assumed to be array.
5693     This is equivalent to
5694     fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5695			       offset, size, NULL);
5696     but in a constant time.  We expect that frontend produced a simple
5697     array without indexed initializers.  */
5698
5699  gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5700  domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5701  gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5702  elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5703
5704  access_index = offset / BITS_PER_UNIT / elt_size;
5705  gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5706
5707  /* This code makes an assumption that there are no
5708     indexed fileds produced by C++ FE, so we can directly index the array. */
5709  if (access_index < CONSTRUCTOR_NELTS (init))
5710    {
5711      fn = CONSTRUCTOR_ELT (init, access_index)->value;
5712      gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5713      STRIP_NOPS (fn);
5714    }
5715  else
5716    fn = NULL;
5717
5718  /* For type inconsistent program we may end up looking up virtual method
5719     in virtual table that does not contain TOKEN entries.  We may overrun
5720     the virtual table and pick up a constant or RTTI info pointer.
5721     In any case the call is undefined.  */
5722  if (!fn
5723      || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5724      || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5725    fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5726  else
5727    {
5728      fn = TREE_OPERAND (fn, 0);
5729
5730      /* When cgraph node is missing and function is not public, we cannot
5731	 devirtualize.  This can happen in WHOPR when the actual method
5732	 ends up in other partition, because we found devirtualization
5733	 possibility too late.  */
5734      if (!can_refer_decl_in_current_unit_p (fn, vtable))
5735	{
5736	  if (can_refer)
5737	    {
5738	      *can_refer = false;
5739	      return fn;
5740	    }
5741	  return NULL_TREE;
5742	}
5743    }
5744
5745  /* Make sure we create a cgraph node for functions we'll reference.
5746     They can be non-existent if the reference comes from an entry
5747     of an external vtable for example.  */
5748  cgraph_node::get_create (fn);
5749
5750  return fn;
5751}
5752
5753/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5754   is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5755   KNOWN_BINFO carries the binfo describing the true type of
5756   OBJ_TYPE_REF_OBJECT(REF).
5757   Set CAN_REFER if non-NULL to false if method
5758   is not referable or if the virtual table is ill-formed (such as rewriten
5759   by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
5760
5761tree
5762gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5763				  bool *can_refer)
5764{
5765  unsigned HOST_WIDE_INT offset;
5766  tree v;
5767
5768  v = BINFO_VTABLE (known_binfo);
5769  /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone.  */
5770  if (!v)
5771    return NULL_TREE;
5772
5773  if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5774    {
5775      if (can_refer)
5776	*can_refer = false;
5777      return NULL_TREE;
5778    }
5779  return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5780}
5781
5782/* Return true iff VAL is a gimple expression that is known to be
5783   non-negative.  Restricted to floating-point inputs.  */
5784
5785bool
5786gimple_val_nonnegative_real_p (tree val)
5787{
5788  gimple def_stmt;
5789
5790  gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5791
5792  /* Use existing logic for non-gimple trees.  */
5793  if (tree_expr_nonnegative_p (val))
5794    return true;
5795
5796  if (TREE_CODE (val) != SSA_NAME)
5797    return false;
5798
5799  /* Currently we look only at the immediately defining statement
5800     to make this determination, since recursion on defining
5801     statements of operands can lead to quadratic behavior in the
5802     worst case.  This is expected to catch almost all occurrences
5803     in practice.  It would be possible to implement limited-depth
5804     recursion if important cases are lost.  Alternatively, passes
5805     that need this information (such as the pow/powi lowering code
5806     in the cse_sincos pass) could be revised to provide it through
5807     dataflow propagation.  */
5808
5809  def_stmt = SSA_NAME_DEF_STMT (val);
5810
5811  if (is_gimple_assign (def_stmt))
5812    {
5813      tree op0, op1;
5814
5815      /* See fold-const.c:tree_expr_nonnegative_p for additional
5816	 cases that could be handled with recursion.  */
5817
5818      switch (gimple_assign_rhs_code (def_stmt))
5819	{
5820	case ABS_EXPR:
5821	  /* Always true for floating-point operands.  */
5822	  return true;
5823
5824	case MULT_EXPR:
5825	  /* True if the two operands are identical (since we are
5826	     restricted to floating-point inputs).  */
5827	  op0 = gimple_assign_rhs1 (def_stmt);
5828	  op1 = gimple_assign_rhs2 (def_stmt);
5829
5830	  if (op0 == op1
5831	      || operand_equal_p (op0, op1, 0))
5832	    return true;
5833
5834	default:
5835	  return false;
5836	}
5837    }
5838  else if (is_gimple_call (def_stmt))
5839    {
5840      tree fndecl = gimple_call_fndecl (def_stmt);
5841      if (fndecl
5842	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5843	{
5844	  tree arg1;
5845
5846	  switch (DECL_FUNCTION_CODE (fndecl))
5847	    {
5848	    CASE_FLT_FN (BUILT_IN_ACOS):
5849	    CASE_FLT_FN (BUILT_IN_ACOSH):
5850	    CASE_FLT_FN (BUILT_IN_CABS):
5851	    CASE_FLT_FN (BUILT_IN_COSH):
5852	    CASE_FLT_FN (BUILT_IN_ERFC):
5853	    CASE_FLT_FN (BUILT_IN_EXP):
5854	    CASE_FLT_FN (BUILT_IN_EXP10):
5855	    CASE_FLT_FN (BUILT_IN_EXP2):
5856	    CASE_FLT_FN (BUILT_IN_FABS):
5857	    CASE_FLT_FN (BUILT_IN_FDIM):
5858	    CASE_FLT_FN (BUILT_IN_HYPOT):
5859	    CASE_FLT_FN (BUILT_IN_POW10):
5860	      return true;
5861
5862	    CASE_FLT_FN (BUILT_IN_SQRT):
5863	      /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5864		 nonnegative inputs.  */
5865	      if (!HONOR_SIGNED_ZEROS (val))
5866		return true;
5867
5868	      break;
5869
5870	    CASE_FLT_FN (BUILT_IN_POWI):
5871	      /* True if the second argument is an even integer.  */
5872	      arg1 = gimple_call_arg (def_stmt, 1);
5873
5874	      if (TREE_CODE (arg1) == INTEGER_CST
5875		  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5876		return true;
5877
5878	      break;
5879
5880	    CASE_FLT_FN (BUILT_IN_POW):
5881	      /* True if the second argument is an even integer-valued
5882		 real.  */
5883	      arg1 = gimple_call_arg (def_stmt, 1);
5884
5885	      if (TREE_CODE (arg1) == REAL_CST)
5886		{
5887		  REAL_VALUE_TYPE c;
5888		  HOST_WIDE_INT n;
5889
5890		  c = TREE_REAL_CST (arg1);
5891		  n = real_to_integer (&c);
5892
5893		  if ((n & 1) == 0)
5894		    {
5895		      REAL_VALUE_TYPE cint;
5896		      real_from_integer (&cint, VOIDmode, n, SIGNED);
5897		      if (real_identical (&c, &cint))
5898			return true;
5899		    }
5900		}
5901
5902	      break;
5903
5904	    default:
5905	      return false;
5906	    }
5907	}
5908    }
5909
5910  return false;
5911}
5912
5913/* Given a pointer value OP0, return a simplified version of an
5914   indirection through OP0, or NULL_TREE if no simplification is
5915   possible.  Note that the resulting type may be different from
5916   the type pointed to in the sense that it is still compatible
5917   from the langhooks point of view. */
5918
5919tree
5920gimple_fold_indirect_ref (tree t)
5921{
5922  tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5923  tree sub = t;
5924  tree subtype;
5925
5926  STRIP_NOPS (sub);
5927  subtype = TREE_TYPE (sub);
5928  if (!POINTER_TYPE_P (subtype))
5929    return NULL_TREE;
5930
5931  if (TREE_CODE (sub) == ADDR_EXPR)
5932    {
5933      tree op = TREE_OPERAND (sub, 0);
5934      tree optype = TREE_TYPE (op);
5935      /* *&p => p */
5936      if (useless_type_conversion_p (type, optype))
5937        return op;
5938
5939      /* *(foo *)&fooarray => fooarray[0] */
5940      if (TREE_CODE (optype) == ARRAY_TYPE
5941	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5942	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
5943       {
5944         tree type_domain = TYPE_DOMAIN (optype);
5945         tree min_val = size_zero_node;
5946         if (type_domain && TYPE_MIN_VALUE (type_domain))
5947           min_val = TYPE_MIN_VALUE (type_domain);
5948	 if (TREE_CODE (min_val) == INTEGER_CST)
5949	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5950       }
5951      /* *(foo *)&complexfoo => __real__ complexfoo */
5952      else if (TREE_CODE (optype) == COMPLEX_TYPE
5953               && useless_type_conversion_p (type, TREE_TYPE (optype)))
5954        return fold_build1 (REALPART_EXPR, type, op);
5955      /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5956      else if (TREE_CODE (optype) == VECTOR_TYPE
5957               && useless_type_conversion_p (type, TREE_TYPE (optype)))
5958        {
5959          tree part_width = TYPE_SIZE (type);
5960          tree index = bitsize_int (0);
5961          return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5962        }
5963    }
5964
5965  /* *(p + CST) -> ...  */
5966  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5967      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5968    {
5969      tree addr = TREE_OPERAND (sub, 0);
5970      tree off = TREE_OPERAND (sub, 1);
5971      tree addrtype;
5972
5973      STRIP_NOPS (addr);
5974      addrtype = TREE_TYPE (addr);
5975
5976      /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5977      if (TREE_CODE (addr) == ADDR_EXPR
5978	  && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5979	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5980	  && tree_fits_uhwi_p (off))
5981	{
5982          unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5983          tree part_width = TYPE_SIZE (type);
5984          unsigned HOST_WIDE_INT part_widthi
5985            = tree_to_shwi (part_width) / BITS_PER_UNIT;
5986          unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5987          tree index = bitsize_int (indexi);
5988          if (offset / part_widthi
5989	      < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5990            return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5991                                part_width, index);
5992	}
5993
5994      /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5995      if (TREE_CODE (addr) == ADDR_EXPR
5996	  && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5997	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5998        {
5999          tree size = TYPE_SIZE_UNIT (type);
6000          if (tree_int_cst_equal (size, off))
6001            return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6002        }
6003
6004      /* *(p + CST) -> MEM_REF <p, CST>.  */
6005      if (TREE_CODE (addr) != ADDR_EXPR
6006	  || DECL_P (TREE_OPERAND (addr, 0)))
6007	return fold_build2 (MEM_REF, type,
6008			    addr,
6009			    wide_int_to_tree (ptype, off));
6010    }
6011
6012  /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6013  if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6014      && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6015      && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6016    {
6017      tree type_domain;
6018      tree min_val = size_zero_node;
6019      tree osub = sub;
6020      sub = gimple_fold_indirect_ref (sub);
6021      if (! sub)
6022	sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6023      type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6024      if (type_domain && TYPE_MIN_VALUE (type_domain))
6025        min_val = TYPE_MIN_VALUE (type_domain);
6026      if (TREE_CODE (min_val) == INTEGER_CST)
6027	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6028    }
6029
6030  return NULL_TREE;
6031}
6032
6033/* Return true if CODE is an operation that when operating on signed
6034   integer types involves undefined behavior on overflow and the
6035   operation can be expressed with unsigned arithmetic.  */
6036
6037bool
6038arith_code_with_undefined_signed_overflow (tree_code code)
6039{
6040  switch (code)
6041    {
6042    case PLUS_EXPR:
6043    case MINUS_EXPR:
6044    case MULT_EXPR:
6045    case NEGATE_EXPR:
6046    case POINTER_PLUS_EXPR:
6047      return true;
6048    default:
6049      return false;
6050    }
6051}
6052
6053/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6054   operation that can be transformed to unsigned arithmetic by converting
6055   its operand, carrying out the operation in the corresponding unsigned
6056   type and converting the result back to the original type.
6057
6058   Returns a sequence of statements that replace STMT and also contain
6059   a modified form of STMT itself.  */
6060
6061gimple_seq
6062rewrite_to_defined_overflow (gimple stmt)
6063{
6064  if (dump_file && (dump_flags & TDF_DETAILS))
6065    {
6066      fprintf (dump_file, "rewriting stmt with undefined signed "
6067	       "overflow ");
6068      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6069    }
6070
6071  tree lhs = gimple_assign_lhs (stmt);
6072  tree type = unsigned_type_for (TREE_TYPE (lhs));
6073  gimple_seq stmts = NULL;
6074  for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6075    {
6076      gimple_seq stmts2 = NULL;
6077      gimple_set_op (stmt, i,
6078		     force_gimple_operand (fold_convert (type,
6079							 gimple_op (stmt, i)),
6080					   &stmts2, true, NULL_TREE));
6081      gimple_seq_add_seq (&stmts, stmts2);
6082    }
6083  gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6084  if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6085    gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6086  gimple_seq_add_stmt (&stmts, stmt);
6087  gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6088  gimple_seq_add_stmt (&stmts, cvt);
6089
6090  return stmts;
6091}
6092
6093
6094/* Build the expression CODE OP0 of type TYPE with location LOC,
6095   simplifying it first if possible using VALUEIZE if not NULL.
6096   OP0 is expected to be valueized already.  Returns the built
6097   expression value and appends statements possibly defining it
6098   to SEQ.  */
6099
6100tree
6101gimple_build (gimple_seq *seq, location_t loc,
6102	      enum tree_code code, tree type, tree op0,
6103	      tree (*valueize)(tree))
6104{
6105  tree res = gimple_simplify (code, type, op0, seq, valueize);
6106  if (!res)
6107    {
6108      if (gimple_in_ssa_p (cfun))
6109	res = make_ssa_name (type);
6110      else
6111	res = create_tmp_reg (type);
6112      gimple stmt;
6113      if (code == REALPART_EXPR
6114	  || code == IMAGPART_EXPR
6115	  || code == VIEW_CONVERT_EXPR)
6116	stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6117      else
6118	stmt = gimple_build_assign (res, code, op0);
6119      gimple_set_location (stmt, loc);
6120      gimple_seq_add_stmt_without_update (seq, stmt);
6121    }
6122  return res;
6123}
6124
6125/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6126   simplifying it first if possible using VALUEIZE if not NULL.
6127   OP0 and OP1 are expected to be valueized already.  Returns the built
6128   expression value and appends statements possibly defining it
6129   to SEQ.  */
6130
6131tree
6132gimple_build (gimple_seq *seq, location_t loc,
6133	      enum tree_code code, tree type, tree op0, tree op1,
6134	      tree (*valueize)(tree))
6135{
6136  tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
6137  if (!res)
6138    {
6139      if (gimple_in_ssa_p (cfun))
6140	res = make_ssa_name (type);
6141      else
6142	res = create_tmp_reg (type);
6143      gimple stmt = gimple_build_assign (res, code, op0, op1);
6144      gimple_set_location (stmt, loc);
6145      gimple_seq_add_stmt_without_update (seq, stmt);
6146    }
6147  return res;
6148}
6149
6150/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6151   simplifying it first if possible using VALUEIZE if not NULL.
6152   OP0, OP1 and OP2 are expected to be valueized already.  Returns the built
6153   expression value and appends statements possibly defining it
6154   to SEQ.  */
6155
6156tree
6157gimple_build (gimple_seq *seq, location_t loc,
6158	      enum tree_code code, tree type, tree op0, tree op1, tree op2,
6159	      tree (*valueize)(tree))
6160{
6161  tree res = gimple_simplify (code, type, op0, op1, op2,
6162			      seq, valueize);
6163  if (!res)
6164    {
6165      if (gimple_in_ssa_p (cfun))
6166	res = make_ssa_name (type);
6167      else
6168	res = create_tmp_reg (type);
6169      gimple stmt;
6170      if (code == BIT_FIELD_REF)
6171	stmt = gimple_build_assign (res, code,
6172				    build3 (code, type, op0, op1, op2));
6173      else
6174	stmt = gimple_build_assign (res, code, op0, op1, op2);
6175      gimple_set_location (stmt, loc);
6176      gimple_seq_add_stmt_without_update (seq, stmt);
6177    }
6178  return res;
6179}
6180
6181/* Build the call FN (ARG0) with a result of type TYPE
6182   (or no result if TYPE is void) with location LOC,
6183   simplifying it first if possible using VALUEIZE if not NULL.
6184   ARG0 is expected to be valueized already.  Returns the built
6185   expression value (or NULL_TREE if TYPE is void) and appends
6186   statements possibly defining it to SEQ.  */
6187
6188tree
6189gimple_build (gimple_seq *seq, location_t loc,
6190	      enum built_in_function fn, tree type, tree arg0,
6191	      tree (*valueize)(tree))
6192{
6193  tree res = gimple_simplify (fn, type, arg0, seq, valueize);
6194  if (!res)
6195    {
6196      tree decl = builtin_decl_implicit (fn);
6197      gimple stmt = gimple_build_call (decl, 1, arg0);
6198      if (!VOID_TYPE_P (type))
6199	{
6200	  if (gimple_in_ssa_p (cfun))
6201	    res = make_ssa_name (type);
6202	  else
6203	    res = create_tmp_reg (type);
6204	  gimple_call_set_lhs (stmt, res);
6205	}
6206      gimple_set_location (stmt, loc);
6207      gimple_seq_add_stmt_without_update (seq, stmt);
6208    }
6209  return res;
6210}
6211
6212/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6213   (or no result if TYPE is void) with location LOC,
6214   simplifying it first if possible using VALUEIZE if not NULL.
6215   ARG0 is expected to be valueized already.  Returns the built
6216   expression value (or NULL_TREE if TYPE is void) and appends
6217   statements possibly defining it to SEQ.  */
6218
6219tree
6220gimple_build (gimple_seq *seq, location_t loc,
6221	      enum built_in_function fn, tree type, tree arg0, tree arg1,
6222	      tree (*valueize)(tree))
6223{
6224  tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
6225  if (!res)
6226    {
6227      tree decl = builtin_decl_implicit (fn);
6228      gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6229      if (!VOID_TYPE_P (type))
6230	{
6231	  if (gimple_in_ssa_p (cfun))
6232	    res = make_ssa_name (type);
6233	  else
6234	    res = create_tmp_reg (type);
6235	  gimple_call_set_lhs (stmt, res);
6236	}
6237      gimple_set_location (stmt, loc);
6238      gimple_seq_add_stmt_without_update (seq, stmt);
6239    }
6240  return res;
6241}
6242
6243/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6244   (or no result if TYPE is void) with location LOC,
6245   simplifying it first if possible using VALUEIZE if not NULL.
6246   ARG0 is expected to be valueized already.  Returns the built
6247   expression value (or NULL_TREE if TYPE is void) and appends
6248   statements possibly defining it to SEQ.  */
6249
6250tree
6251gimple_build (gimple_seq *seq, location_t loc,
6252	      enum built_in_function fn, tree type,
6253	      tree arg0, tree arg1, tree arg2,
6254	      tree (*valueize)(tree))
6255{
6256  tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
6257  if (!res)
6258    {
6259      tree decl = builtin_decl_implicit (fn);
6260      gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6261      if (!VOID_TYPE_P (type))
6262	{
6263	  if (gimple_in_ssa_p (cfun))
6264	    res = make_ssa_name (type);
6265	  else
6266	    res = create_tmp_reg (type);
6267	  gimple_call_set_lhs (stmt, res);
6268	}
6269      gimple_set_location (stmt, loc);
6270      gimple_seq_add_stmt_without_update (seq, stmt);
6271    }
6272  return res;
6273}
6274
6275/* Build the conversion (TYPE) OP with a result of type TYPE
6276   with location LOC if such conversion is neccesary in GIMPLE,
6277   simplifying it first.
6278   Returns the built expression value and appends
6279   statements possibly defining it to SEQ.  */
6280
6281tree
6282gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6283{
6284  if (useless_type_conversion_p (type, TREE_TYPE (op)))
6285    return op;
6286  return gimple_build (seq, loc, NOP_EXPR, type, op);
6287}
6288