1/* Fold a constant sub-tree into a single node for C-compiler
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23/*@@ This file should be rewritten to use an arbitrary precision
24  @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25  @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26  @@ The routines that translate from the ap rep should
27  @@ warn if precision et. al. is lost.
28  @@ This would also make life easier when this technology is used
29  @@ for cross-compilers.  */
30
31/* The entry points in this file are fold, size_int_wide, size_binop
32   and force_fit_type.
33
34   fold takes a tree as argument and returns a simplified tree.
35
36   size_binop takes a tree code for an arithmetic operation
37   and two operands that are trees, and produces a tree for the
38   result, assuming the type comes from `sizetype'.
39
40   size_int takes an integer value, and creates a tree constant
41   with type from `sizetype'.
42
43   force_fit_type takes a constant, an overflowable flag and prior
44   overflow indicators.  It forces the value to fit the type and sets
45   TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.  */
46
47#include "config.h"
48#include "system.h"
49#include "coretypes.h"
50#include "tm.h"
51#include "flags.h"
52#include "tree.h"
53#include "real.h"
54#include "rtl.h"
55#include "expr.h"
56#include "tm_p.h"
57#include "toplev.h"
58#include "intl.h"
59#include "ggc.h"
60#include "hashtab.h"
61#include "langhooks.h"
62#include "md5.h"
63
64/* Non-zero if we are folding constants inside an initializer; zero
65   otherwise.  */
66int folding_initializer = 0;
67
68/* The following constants represent a bit based encoding of GCC's
69   comparison operators.  This encoding simplifies transformations
70   on relational comparison operators, such as AND and OR.  */
71enum comparison_code {
72  COMPCODE_FALSE = 0,
73  COMPCODE_LT = 1,
74  COMPCODE_EQ = 2,
75  COMPCODE_LE = 3,
76  COMPCODE_GT = 4,
77  COMPCODE_LTGT = 5,
78  COMPCODE_GE = 6,
79  COMPCODE_ORD = 7,
80  COMPCODE_UNORD = 8,
81  COMPCODE_UNLT = 9,
82  COMPCODE_UNEQ = 10,
83  COMPCODE_UNLE = 11,
84  COMPCODE_UNGT = 12,
85  COMPCODE_NE = 13,
86  COMPCODE_UNGE = 14,
87  COMPCODE_TRUE = 15
88};
89
90static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
91static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
92static bool negate_mathfn_p (enum built_in_function);
93static bool negate_expr_p (tree);
94static tree negate_expr (tree);
95static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96static tree associate_trees (tree, tree, enum tree_code, tree);
97static tree const_binop (enum tree_code, tree, tree, int);
98static enum comparison_code comparison_to_compcode (enum tree_code);
99static enum tree_code compcode_to_comparison (enum comparison_code);
100static tree combine_comparisons (enum tree_code, enum tree_code,
101				 enum tree_code, tree, tree, tree);
102static int truth_value_p (enum tree_code);
103static int operand_equal_for_comparison_p (tree, tree, tree);
104static int twoval_comparison_p (tree, tree *, tree *, int *);
105static tree eval_subst (tree, tree, tree, tree, tree);
106static tree pedantic_omit_one_operand (tree, tree, tree);
107static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
108static tree make_bit_field_ref (tree, tree, int, int, int);
109static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
110static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
111				    enum machine_mode *, int *, int *,
112				    tree *, tree *);
113static int all_ones_mask_p (tree, int);
114static tree sign_bit_p (tree, tree);
115static int simple_operand_p (tree);
116static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117static tree range_predecessor (tree);
118static tree range_successor (tree);
119static tree make_range (tree, int *, tree *, tree *, bool *);
120static tree build_range_check (tree, tree, int, tree, tree);
121static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
122			 tree);
123static tree fold_range_test (enum tree_code, tree, tree, tree);
124static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
125static tree unextend (tree, int, int, tree);
126static tree fold_truthop (enum tree_code, tree, tree, tree);
127static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
128static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130static int multiple_of_p (tree, tree, tree);
131static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132						 tree, tree,
133						 tree, tree, int);
134static bool fold_real_zero_addition_p (tree, tree, int);
135static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
136				 tree, tree, tree);
137static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138static tree fold_div_compare (enum tree_code, tree, tree, tree);
139static bool reorder_operands_p (tree, tree);
140static tree fold_negate_const (tree, tree);
141static tree fold_not_const (tree, tree);
142static tree fold_relational_const (enum tree_code, tree, tree, tree);
143static int native_encode_expr (tree, unsigned char *, int);
144static tree native_interpret_expr (tree, unsigned char *, int);
145
146
147/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148   overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
149   and SUM1.  Then this yields nonzero if overflow occurred during the
150   addition.
151
152   Overflow occurs if A and B have the same sign, but A and SUM differ in
153   sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
154   sign.  */
155#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156
157/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158   We do that by representing the two-word integer in 4 words, with only
159   HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160   number.  The value of the word is LOWPART + HIGHPART * BASE.  */
161
162#define LOWPART(x) \
163  ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164#define HIGHPART(x) \
165  ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167
168/* Unpack a two-word integer into 4 words.
169   LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170   WORDS points to the array of HOST_WIDE_INTs.  */
171
172static void
173encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174{
175  words[0] = LOWPART (low);
176  words[1] = HIGHPART (low);
177  words[2] = LOWPART (hi);
178  words[3] = HIGHPART (hi);
179}
180
181/* Pack an array of 4 words into a two-word integer.
182   WORDS points to the array of words.
183   The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
184
185static void
186decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187	HOST_WIDE_INT *hi)
188{
189  *low = words[0] + words[1] * BASE;
190  *hi = words[2] + words[3] * BASE;
191}
192
193/* T is an INT_CST node.  OVERFLOWABLE indicates if we are interested
194   in overflow of the value, when >0 we are only interested in signed
195   overflow, for <0 we are interested in any overflow.  OVERFLOWED
196   indicates whether overflow has already occurred.  CONST_OVERFLOWED
197   indicates whether constant overflow has already occurred.  We force
198   T's value to be within range of T's type (by setting to 0 or 1 all
199   the bits outside the type's range).  We set TREE_OVERFLOWED if,
200  	OVERFLOWED is nonzero,
201	or OVERFLOWABLE is >0 and signed overflow occurs
202	or OVERFLOWABLE is <0 and any overflow occurs
203   We set TREE_CONSTANT_OVERFLOWED if,
204        CONST_OVERFLOWED is nonzero
205	or we set TREE_OVERFLOWED.
206  We return either the original T, or a copy.  */
207
208tree
209force_fit_type (tree t, int overflowable,
210		bool overflowed, bool overflowed_const)
211{
212  unsigned HOST_WIDE_INT low;
213  HOST_WIDE_INT high;
214  unsigned int prec;
215  int sign_extended_type;
216
217  gcc_assert (TREE_CODE (t) == INTEGER_CST);
218
219  low = TREE_INT_CST_LOW (t);
220  high = TREE_INT_CST_HIGH (t);
221
222  if (POINTER_TYPE_P (TREE_TYPE (t))
223      || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224    prec = POINTER_SIZE;
225  else
226    prec = TYPE_PRECISION (TREE_TYPE (t));
227  /* Size types *are* sign extended.  */
228  sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
229			|| (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
230			    && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
231
232  /* First clear all bits that are beyond the type's precision.  */
233
234  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235    ;
236  else if (prec > HOST_BITS_PER_WIDE_INT)
237    high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238  else
239    {
240      high = 0;
241      if (prec < HOST_BITS_PER_WIDE_INT)
242	low &= ~((HOST_WIDE_INT) (-1) << prec);
243    }
244
245  if (!sign_extended_type)
246    /* No sign extension */;
247  else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
248    /* Correct width already.  */;
249  else if (prec > HOST_BITS_PER_WIDE_INT)
250    {
251      /* Sign extend top half? */
252      if (high & ((unsigned HOST_WIDE_INT)1
253		  << (prec - HOST_BITS_PER_WIDE_INT - 1)))
254	high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
255    }
256  else if (prec == HOST_BITS_PER_WIDE_INT)
257    {
258      if ((HOST_WIDE_INT)low < 0)
259	high = -1;
260    }
261  else
262    {
263      /* Sign extend bottom half? */
264      if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265	{
266	  high = -1;
267	  low |= (HOST_WIDE_INT)(-1) << prec;
268	}
269    }
270
271  /* If the value changed, return a new node.  */
272  if (overflowed || overflowed_const
273      || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
274    {
275      t = build_int_cst_wide (TREE_TYPE (t), low, high);
276
277      if (overflowed
278	  || overflowable < 0
279	  || (overflowable > 0 && sign_extended_type))
280	{
281	  t = copy_node (t);
282	  TREE_OVERFLOW (t) = 1;
283	  TREE_CONSTANT_OVERFLOW (t) = 1;
284	}
285      else if (overflowed_const)
286	{
287	  t = copy_node (t);
288	  TREE_CONSTANT_OVERFLOW (t) = 1;
289	}
290    }
291
292  return t;
293}
294
295/* Add two doubleword integers with doubleword result.
296   Return nonzero if the operation overflows according to UNSIGNED_P.
297   Each argument is given as two `HOST_WIDE_INT' pieces.
298   One argument is L1 and H1; the other, L2 and H2.
299   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
300
301int
302add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
303		      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
304		      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
305		      bool unsigned_p)
306{
307  unsigned HOST_WIDE_INT l;
308  HOST_WIDE_INT h;
309
310  l = l1 + l2;
311  h = h1 + h2 + (l < l1);
312
313  *lv = l;
314  *hv = h;
315
316  if (unsigned_p)
317    return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
318  else
319    return OVERFLOW_SUM_SIGN (h1, h2, h);
320}
321
322/* Negate a doubleword integer with doubleword result.
323   Return nonzero if the operation overflows, assuming it's signed.
324   The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
325   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
326
327int
328neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
329	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
330{
331  if (l1 == 0)
332    {
333      *lv = 0;
334      *hv = - h1;
335      return (*hv & h1) < 0;
336    }
337  else
338    {
339      *lv = -l1;
340      *hv = ~h1;
341      return 0;
342    }
343}
344
345/* Multiply two doubleword integers with doubleword result.
346   Return nonzero if the operation overflows according to UNSIGNED_P.
347   Each argument is given as two `HOST_WIDE_INT' pieces.
348   One argument is L1 and H1; the other, L2 and H2.
349   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
350
351int
352mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353		      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
354		      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
355		      bool unsigned_p)
356{
357  HOST_WIDE_INT arg1[4];
358  HOST_WIDE_INT arg2[4];
359  HOST_WIDE_INT prod[4 * 2];
360  unsigned HOST_WIDE_INT carry;
361  int i, j, k;
362  unsigned HOST_WIDE_INT toplow, neglow;
363  HOST_WIDE_INT tophigh, neghigh;
364
365  encode (arg1, l1, h1);
366  encode (arg2, l2, h2);
367
368  memset (prod, 0, sizeof prod);
369
370  for (i = 0; i < 4; i++)
371    {
372      carry = 0;
373      for (j = 0; j < 4; j++)
374	{
375	  k = i + j;
376	  /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
377	  carry += arg1[i] * arg2[j];
378	  /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
379	  carry += prod[k];
380	  prod[k] = LOWPART (carry);
381	  carry = HIGHPART (carry);
382	}
383      prod[i + 4] = carry;
384    }
385
386  decode (prod, lv, hv);
387  decode (prod + 4, &toplow, &tophigh);
388
389  /* Unsigned overflow is immediate.  */
390  if (unsigned_p)
391    return (toplow | tophigh) != 0;
392
393  /* Check for signed overflow by calculating the signed representation of the
394     top half of the result; it should agree with the low half's sign bit.  */
395  if (h1 < 0)
396    {
397      neg_double (l2, h2, &neglow, &neghigh);
398      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
399    }
400  if (h2 < 0)
401    {
402      neg_double (l1, h1, &neglow, &neghigh);
403      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
404    }
405  return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406}
407
408/* Shift the doubleword integer in L1, H1 left by COUNT places
409   keeping only PREC bits of result.
410   Shift right if COUNT is negative.
411   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
412   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
413
414void
415lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
416	       HOST_WIDE_INT count, unsigned int prec,
417	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
418{
419  unsigned HOST_WIDE_INT signmask;
420
421  if (count < 0)
422    {
423      rshift_double (l1, h1, -count, prec, lv, hv, arith);
424      return;
425    }
426
427  if (SHIFT_COUNT_TRUNCATED)
428    count %= prec;
429
430  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
431    {
432      /* Shifting by the host word size is undefined according to the
433	 ANSI standard, so we must handle this as a special case.  */
434      *hv = 0;
435      *lv = 0;
436    }
437  else if (count >= HOST_BITS_PER_WIDE_INT)
438    {
439      *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
440      *lv = 0;
441    }
442  else
443    {
444      *hv = (((unsigned HOST_WIDE_INT) h1 << count)
445	     | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
446      *lv = l1 << count;
447    }
448
449  /* Sign extend all bits that are beyond the precision.  */
450
451  signmask = -((prec > HOST_BITS_PER_WIDE_INT
452		? ((unsigned HOST_WIDE_INT) *hv
453		   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
454		: (*lv >> (prec - 1))) & 1);
455
456  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
457    ;
458  else if (prec >= HOST_BITS_PER_WIDE_INT)
459    {
460      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
461      *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
462    }
463  else
464    {
465      *hv = signmask;
466      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
467      *lv |= signmask << prec;
468    }
469}
470
471/* Shift the doubleword integer in L1, H1 right by COUNT places
472   keeping only PREC bits of result.  COUNT must be positive.
473   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
474   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
475
476void
477rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
478	       HOST_WIDE_INT count, unsigned int prec,
479	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
480	       int arith)
481{
482  unsigned HOST_WIDE_INT signmask;
483
484  signmask = (arith
485	      ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
486	      : 0);
487
488  if (SHIFT_COUNT_TRUNCATED)
489    count %= prec;
490
491  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
492    {
493      /* Shifting by the host word size is undefined according to the
494	 ANSI standard, so we must handle this as a special case.  */
495      *hv = 0;
496      *lv = 0;
497    }
498  else if (count >= HOST_BITS_PER_WIDE_INT)
499    {
500      *hv = 0;
501      *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
502    }
503  else
504    {
505      *hv = (unsigned HOST_WIDE_INT) h1 >> count;
506      *lv = ((l1 >> count)
507	     | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508    }
509
510  /* Zero / sign extend all bits that are beyond the precision.  */
511
512  if (count >= (HOST_WIDE_INT)prec)
513    {
514      *hv = signmask;
515      *lv = signmask;
516    }
517  else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
518    ;
519  else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
520    {
521      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
522      *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
523    }
524  else
525    {
526      *hv = signmask;
527      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
528      *lv |= signmask << (prec - count);
529    }
530}
531
532/* Rotate the doubleword integer in L1, H1 left by COUNT places
533   keeping only PREC bits of result.
534   Rotate right if COUNT is negative.
535   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
536
537void
538lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539		HOST_WIDE_INT count, unsigned int prec,
540		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541{
542  unsigned HOST_WIDE_INT s1l, s2l;
543  HOST_WIDE_INT s1h, s2h;
544
545  count %= prec;
546  if (count < 0)
547    count += prec;
548
549  lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550  rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551  *lv = s1l | s2l;
552  *hv = s1h | s2h;
553}
554
555/* Rotate the doubleword integer in L1, H1 left by COUNT places
556   keeping only PREC bits of result.  COUNT must be positive.
557   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
558
559void
560rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561		HOST_WIDE_INT count, unsigned int prec,
562		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563{
564  unsigned HOST_WIDE_INT s1l, s2l;
565  HOST_WIDE_INT s1h, s2h;
566
567  count %= prec;
568  if (count < 0)
569    count += prec;
570
571  rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572  lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573  *lv = s1l | s2l;
574  *hv = s1h | s2h;
575}
576
577/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
578   for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
579   CODE is a tree code for a kind of division, one of
580   TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
581   or EXACT_DIV_EXPR
582   It controls how the quotient is rounded to an integer.
583   Return nonzero if the operation overflows.
584   UNS nonzero says do unsigned division.  */
585
586int
587div_and_round_double (enum tree_code code, int uns,
588		      unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
589		      HOST_WIDE_INT hnum_orig,
590		      unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
591		      HOST_WIDE_INT hden_orig,
592		      unsigned HOST_WIDE_INT *lquo,
593		      HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
594		      HOST_WIDE_INT *hrem)
595{
596  int quo_neg = 0;
597  HOST_WIDE_INT num[4 + 1];	/* extra element for scaling.  */
598  HOST_WIDE_INT den[4], quo[4];
599  int i, j;
600  unsigned HOST_WIDE_INT work;
601  unsigned HOST_WIDE_INT carry = 0;
602  unsigned HOST_WIDE_INT lnum = lnum_orig;
603  HOST_WIDE_INT hnum = hnum_orig;
604  unsigned HOST_WIDE_INT lden = lden_orig;
605  HOST_WIDE_INT hden = hden_orig;
606  int overflow = 0;
607
608  if (hden == 0 && lden == 0)
609    overflow = 1, lden = 1;
610
611  /* Calculate quotient sign and convert operands to unsigned.  */
612  if (!uns)
613    {
614      if (hnum < 0)
615	{
616	  quo_neg = ~ quo_neg;
617	  /* (minimum integer) / (-1) is the only overflow case.  */
618	  if (neg_double (lnum, hnum, &lnum, &hnum)
619	      && ((HOST_WIDE_INT) lden & hden) == -1)
620	    overflow = 1;
621	}
622      if (hden < 0)
623	{
624	  quo_neg = ~ quo_neg;
625	  neg_double (lden, hden, &lden, &hden);
626	}
627    }
628
629  if (hnum == 0 && hden == 0)
630    {				/* single precision */
631      *hquo = *hrem = 0;
632      /* This unsigned division rounds toward zero.  */
633      *lquo = lnum / lden;
634      goto finish_up;
635    }
636
637  if (hnum == 0)
638    {				/* trivial case: dividend < divisor */
639      /* hden != 0 already checked.  */
640      *hquo = *lquo = 0;
641      *hrem = hnum;
642      *lrem = lnum;
643      goto finish_up;
644    }
645
646  memset (quo, 0, sizeof quo);
647
648  memset (num, 0, sizeof num);	/* to zero 9th element */
649  memset (den, 0, sizeof den);
650
651  encode (num, lnum, hnum);
652  encode (den, lden, hden);
653
654  /* Special code for when the divisor < BASE.  */
655  if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
656    {
657      /* hnum != 0 already checked.  */
658      for (i = 4 - 1; i >= 0; i--)
659	{
660	  work = num[i] + carry * BASE;
661	  quo[i] = work / lden;
662	  carry = work % lden;
663	}
664    }
665  else
666    {
667      /* Full double precision division,
668	 with thanks to Don Knuth's "Seminumerical Algorithms".  */
669      int num_hi_sig, den_hi_sig;
670      unsigned HOST_WIDE_INT quo_est, scale;
671
672      /* Find the highest nonzero divisor digit.  */
673      for (i = 4 - 1;; i--)
674	if (den[i] != 0)
675	  {
676	    den_hi_sig = i;
677	    break;
678	  }
679
680      /* Insure that the first digit of the divisor is at least BASE/2.
681	 This is required by the quotient digit estimation algorithm.  */
682
683      scale = BASE / (den[den_hi_sig] + 1);
684      if (scale > 1)
685	{		/* scale divisor and dividend */
686	  carry = 0;
687	  for (i = 0; i <= 4 - 1; i++)
688	    {
689	      work = (num[i] * scale) + carry;
690	      num[i] = LOWPART (work);
691	      carry = HIGHPART (work);
692	    }
693
694	  num[4] = carry;
695	  carry = 0;
696	  for (i = 0; i <= 4 - 1; i++)
697	    {
698	      work = (den[i] * scale) + carry;
699	      den[i] = LOWPART (work);
700	      carry = HIGHPART (work);
701	      if (den[i] != 0) den_hi_sig = i;
702	    }
703	}
704
705      num_hi_sig = 4;
706
707      /* Main loop */
708      for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
709	{
710	  /* Guess the next quotient digit, quo_est, by dividing the first
711	     two remaining dividend digits by the high order quotient digit.
712	     quo_est is never low and is at most 2 high.  */
713	  unsigned HOST_WIDE_INT tmp;
714
715	  num_hi_sig = i + den_hi_sig + 1;
716	  work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
717	  if (num[num_hi_sig] != den[den_hi_sig])
718	    quo_est = work / den[den_hi_sig];
719	  else
720	    quo_est = BASE - 1;
721
722	  /* Refine quo_est so it's usually correct, and at most one high.  */
723	  tmp = work - quo_est * den[den_hi_sig];
724	  if (tmp < BASE
725	      && (den[den_hi_sig - 1] * quo_est
726		  > (tmp * BASE + num[num_hi_sig - 2])))
727	    quo_est--;
728
729	  /* Try QUO_EST as the quotient digit, by multiplying the
730	     divisor by QUO_EST and subtracting from the remaining dividend.
731	     Keep in mind that QUO_EST is the I - 1st digit.  */
732
733	  carry = 0;
734	  for (j = 0; j <= den_hi_sig; j++)
735	    {
736	      work = quo_est * den[j] + carry;
737	      carry = HIGHPART (work);
738	      work = num[i + j] - LOWPART (work);
739	      num[i + j] = LOWPART (work);
740	      carry += HIGHPART (work) != 0;
741	    }
742
743	  /* If quo_est was high by one, then num[i] went negative and
744	     we need to correct things.  */
745	  if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746	    {
747	      quo_est--;
748	      carry = 0;		/* add divisor back in */
749	      for (j = 0; j <= den_hi_sig; j++)
750		{
751		  work = num[i + j] + den[j] + carry;
752		  carry = HIGHPART (work);
753		  num[i + j] = LOWPART (work);
754		}
755
756	      num [num_hi_sig] += carry;
757	    }
758
759	  /* Store the quotient digit.  */
760	  quo[i] = quo_est;
761	}
762    }
763
764  decode (quo, lquo, hquo);
765
766 finish_up:
767  /* If result is negative, make it so.  */
768  if (quo_neg)
769    neg_double (*lquo, *hquo, lquo, hquo);
770
771  /* Compute trial remainder:  rem = num - (quo * den)  */
772  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
773  neg_double (*lrem, *hrem, lrem, hrem);
774  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
775
776  switch (code)
777    {
778    case TRUNC_DIV_EXPR:
779    case TRUNC_MOD_EXPR:	/* round toward zero */
780    case EXACT_DIV_EXPR:	/* for this one, it shouldn't matter */
781      return overflow;
782
783    case FLOOR_DIV_EXPR:
784    case FLOOR_MOD_EXPR:	/* round toward negative infinity */
785      if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
786	{
787	  /* quo = quo - 1;  */
788	  add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
789		      lquo, hquo);
790	}
791      else
792	return overflow;
793      break;
794
795    case CEIL_DIV_EXPR:
796    case CEIL_MOD_EXPR:		/* round toward positive infinity */
797      if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
798	{
799	  add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
800		      lquo, hquo);
801	}
802      else
803	return overflow;
804      break;
805
806    case ROUND_DIV_EXPR:
807    case ROUND_MOD_EXPR:	/* round to closest integer */
808      {
809	unsigned HOST_WIDE_INT labs_rem = *lrem;
810	HOST_WIDE_INT habs_rem = *hrem;
811	unsigned HOST_WIDE_INT labs_den = lden, ltwice;
812	HOST_WIDE_INT habs_den = hden, htwice;
813
814	/* Get absolute values.  */
815	if (*hrem < 0)
816	  neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
817	if (hden < 0)
818	  neg_double (lden, hden, &labs_den, &habs_den);
819
820	/* If (2 * abs (lrem) >= abs (lden)) */
821	mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
822		    labs_rem, habs_rem, &ltwice, &htwice);
823
824	if (((unsigned HOST_WIDE_INT) habs_den
825	     < (unsigned HOST_WIDE_INT) htwice)
826	    || (((unsigned HOST_WIDE_INT) habs_den
827		 == (unsigned HOST_WIDE_INT) htwice)
828		&& (labs_den < ltwice)))
829	  {
830	    if (*hquo < 0)
831	      /* quo = quo - 1;  */
832	      add_double (*lquo, *hquo,
833			  (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
834	    else
835	      /* quo = quo + 1; */
836	      add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
837			  lquo, hquo);
838	  }
839	else
840	  return overflow;
841      }
842      break;
843
844    default:
845      gcc_unreachable ();
846    }
847
848  /* Compute true remainder:  rem = num - (quo * den)  */
849  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
850  neg_double (*lrem, *hrem, lrem, hrem);
851  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
852  return overflow;
853}
854
855/* If ARG2 divides ARG1 with zero remainder, carries out the division
856   of type CODE and returns the quotient.
857   Otherwise returns NULL_TREE.  */
858
859static tree
860div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
861{
862  unsigned HOST_WIDE_INT int1l, int2l;
863  HOST_WIDE_INT int1h, int2h;
864  unsigned HOST_WIDE_INT quol, reml;
865  HOST_WIDE_INT quoh, remh;
866  tree type = TREE_TYPE (arg1);
867  int uns = TYPE_UNSIGNED (type);
868
869  int1l = TREE_INT_CST_LOW (arg1);
870  int1h = TREE_INT_CST_HIGH (arg1);
871  int2l = TREE_INT_CST_LOW (arg2);
872  int2h = TREE_INT_CST_HIGH (arg2);
873
874  div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
875		  	&quol, &quoh, &reml, &remh);
876  if (remh != 0 || reml != 0)
877    return NULL_TREE;
878
879  return build_int_cst_wide (type, quol, quoh);
880}
881
882/* This is non-zero if we should defer warnings about undefined
883   overflow.  This facility exists because these warnings are a
884   special case.  The code to estimate loop iterations does not want
885   to issue any warnings, since it works with expressions which do not
886   occur in user code.  Various bits of cleanup code call fold(), but
887   only use the result if it has certain characteristics (e.g., is a
888   constant); that code only wants to issue a warning if the result is
889   used.  */
890
891static int fold_deferring_overflow_warnings;
892
893/* If a warning about undefined overflow is deferred, this is the
894   warning.  Note that this may cause us to turn two warnings into
895   one, but that is fine since it is sufficient to only give one
896   warning per expression.  */
897
898static const char* fold_deferred_overflow_warning;
899
900/* If a warning about undefined overflow is deferred, this is the
901   level at which the warning should be emitted.  */
902
903static enum warn_strict_overflow_code fold_deferred_overflow_code;
904
905/* Start deferring overflow warnings.  We could use a stack here to
906   permit nested calls, but at present it is not necessary.  */
907
908void
909fold_defer_overflow_warnings (void)
910{
911  ++fold_deferring_overflow_warnings;
912}
913
914/* Stop deferring overflow warnings.  If there is a pending warning,
915   and ISSUE is true, then issue the warning if appropriate.  STMT is
916   the statement with which the warning should be associated (used for
917   location information); STMT may be NULL.  CODE is the level of the
918   warning--a warn_strict_overflow_code value.  This function will use
919   the smaller of CODE and the deferred code when deciding whether to
920   issue the warning.  CODE may be zero to mean to always use the
921   deferred code.  */
922
923void
924fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
925{
926  const char *warnmsg;
927  location_t locus;
928
929  gcc_assert (fold_deferring_overflow_warnings > 0);
930  --fold_deferring_overflow_warnings;
931  if (fold_deferring_overflow_warnings > 0)
932    {
933      if (fold_deferred_overflow_warning != NULL
934	  && code != 0
935	  && code < (int) fold_deferred_overflow_code)
936	fold_deferred_overflow_code = code;
937      return;
938    }
939
940  warnmsg = fold_deferred_overflow_warning;
941  fold_deferred_overflow_warning = NULL;
942
943  if (!issue || warnmsg == NULL)
944    return;
945
946  /* Use the smallest code level when deciding to issue the
947     warning.  */
948  if (code == 0 || code > (int) fold_deferred_overflow_code)
949    code = fold_deferred_overflow_code;
950
951  if (!issue_strict_overflow_warning (code))
952    return;
953
954  if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt))
955    locus = input_location;
956  else
957    locus = EXPR_LOCATION (stmt);
958  warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
959}
960
961/* Stop deferring overflow warnings, ignoring any deferred
962   warnings.  */
963
964void
965fold_undefer_and_ignore_overflow_warnings (void)
966{
967  fold_undefer_overflow_warnings (false, NULL_TREE, 0);
968}
969
970/* Whether we are deferring overflow warnings.  */
971
972bool
973fold_deferring_overflow_warnings_p (void)
974{
975  return fold_deferring_overflow_warnings > 0;
976}
977
978/* This is called when we fold something based on the fact that signed
979   overflow is undefined.  */
980
981static void
982fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
983{
984  gcc_assert (!flag_wrapv && !flag_trapv);
985  if (fold_deferring_overflow_warnings > 0)
986    {
987      if (fold_deferred_overflow_warning == NULL
988	  || wc < fold_deferred_overflow_code)
989	{
990	  fold_deferred_overflow_warning = gmsgid;
991	  fold_deferred_overflow_code = wc;
992	}
993    }
994  else if (issue_strict_overflow_warning (wc))
995    warning (OPT_Wstrict_overflow, "%s", gmsgid);
996}
997
998/* Return true if the built-in mathematical function specified by CODE
999   is odd, i.e. -f(x) == f(-x).  */
1000
1001static bool
1002negate_mathfn_p (enum built_in_function code)
1003{
1004  switch (code)
1005    {
1006    CASE_FLT_FN (BUILT_IN_ASIN):
1007    CASE_FLT_FN (BUILT_IN_ASINH):
1008    CASE_FLT_FN (BUILT_IN_ATAN):
1009    CASE_FLT_FN (BUILT_IN_ATANH):
1010    CASE_FLT_FN (BUILT_IN_CBRT):
1011    CASE_FLT_FN (BUILT_IN_SIN):
1012    CASE_FLT_FN (BUILT_IN_SINH):
1013    CASE_FLT_FN (BUILT_IN_TAN):
1014    CASE_FLT_FN (BUILT_IN_TANH):
1015      return true;
1016
1017    default:
1018      break;
1019    }
1020  return false;
1021}
1022
1023/* Check whether we may negate an integer constant T without causing
1024   overflow.  */
1025
1026bool
1027may_negate_without_overflow_p (tree t)
1028{
1029  unsigned HOST_WIDE_INT val;
1030  unsigned int prec;
1031  tree type;
1032
1033  gcc_assert (TREE_CODE (t) == INTEGER_CST);
1034
1035  type = TREE_TYPE (t);
1036  if (TYPE_UNSIGNED (type))
1037    return false;
1038
1039  prec = TYPE_PRECISION (type);
1040  if (prec > HOST_BITS_PER_WIDE_INT)
1041    {
1042      if (TREE_INT_CST_LOW (t) != 0)
1043	return true;
1044      prec -= HOST_BITS_PER_WIDE_INT;
1045      val = TREE_INT_CST_HIGH (t);
1046    }
1047  else
1048    val = TREE_INT_CST_LOW (t);
1049  if (prec < HOST_BITS_PER_WIDE_INT)
1050    val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1051  return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1052}
1053
1054/* Determine whether an expression T can be cheaply negated using
1055   the function negate_expr without introducing undefined overflow.  */
1056
1057static bool
1058negate_expr_p (tree t)
1059{
1060  tree type;
1061
1062  if (t == 0)
1063    return false;
1064
1065  type = TREE_TYPE (t);
1066
1067  STRIP_SIGN_NOPS (t);
1068  switch (TREE_CODE (t))
1069    {
1070    case INTEGER_CST:
1071      if (TYPE_OVERFLOW_WRAPS (type))
1072	return true;
1073
1074      /* Check that -CST will not overflow type.  */
1075      return may_negate_without_overflow_p (t);
1076    case BIT_NOT_EXPR:
1077      return (INTEGRAL_TYPE_P (type)
1078	      && TYPE_OVERFLOW_WRAPS (type));
1079
1080    case REAL_CST:
1081    case NEGATE_EXPR:
1082      return true;
1083
1084    case COMPLEX_CST:
1085      return negate_expr_p (TREE_REALPART (t))
1086	     && negate_expr_p (TREE_IMAGPART (t));
1087
1088    case PLUS_EXPR:
1089      if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
1090	return false;
1091      /* -(A + B) -> (-B) - A.  */
1092      if (negate_expr_p (TREE_OPERAND (t, 1))
1093	  && reorder_operands_p (TREE_OPERAND (t, 0),
1094				 TREE_OPERAND (t, 1)))
1095	return true;
1096      /* -(A + B) -> (-A) - B.  */
1097      return negate_expr_p (TREE_OPERAND (t, 0));
1098
1099    case MINUS_EXPR:
1100      /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
1101      return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1102	     && reorder_operands_p (TREE_OPERAND (t, 0),
1103				    TREE_OPERAND (t, 1));
1104
1105    case MULT_EXPR:
1106      if (TYPE_UNSIGNED (TREE_TYPE (t)))
1107        break;
1108
1109      /* Fall through.  */
1110
1111    case RDIV_EXPR:
1112      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1113	return negate_expr_p (TREE_OPERAND (t, 1))
1114	       || negate_expr_p (TREE_OPERAND (t, 0));
1115      break;
1116
1117    case TRUNC_DIV_EXPR:
1118    case ROUND_DIV_EXPR:
1119    case FLOOR_DIV_EXPR:
1120    case CEIL_DIV_EXPR:
1121    case EXACT_DIV_EXPR:
1122      /* In general we can't negate A / B, because if A is INT_MIN and
1123	 B is 1, we may turn this into INT_MIN / -1 which is undefined
1124	 and actually traps on some architectures.  But if overflow is
1125	 undefined, we can negate, because - (INT_MIN / 1) is an
1126	 overflow.  */
1127      if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1128	  && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1129        break;
1130      return negate_expr_p (TREE_OPERAND (t, 1))
1131             || negate_expr_p (TREE_OPERAND (t, 0));
1132
1133    case NOP_EXPR:
1134      /* Negate -((double)float) as (double)(-float).  */
1135      if (TREE_CODE (type) == REAL_TYPE)
1136	{
1137	  tree tem = strip_float_extensions (t);
1138	  if (tem != t)
1139	    return negate_expr_p (tem);
1140	}
1141      break;
1142
1143    case CALL_EXPR:
1144      /* Negate -f(x) as f(-x).  */
1145      if (negate_mathfn_p (builtin_mathfn_code (t)))
1146	return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1147      break;
1148
1149    case RSHIFT_EXPR:
1150      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1151      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1152	{
1153	  tree op1 = TREE_OPERAND (t, 1);
1154	  if (TREE_INT_CST_HIGH (op1) == 0
1155	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1156		 == TREE_INT_CST_LOW (op1))
1157	    return true;
1158	}
1159      break;
1160
1161    default:
1162      break;
1163    }
1164  return false;
1165}
1166
1167/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1168   simplification is possible.
1169   If negate_expr_p would return true for T, NULL_TREE will never be
1170   returned.  */
1171
1172static tree
1173fold_negate_expr (tree t)
1174{
1175  tree type = TREE_TYPE (t);
1176  tree tem;
1177
1178  switch (TREE_CODE (t))
1179    {
1180    /* Convert - (~A) to A + 1.  */
1181    case BIT_NOT_EXPR:
1182      if (INTEGRAL_TYPE_P (type))
1183        return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1184                            build_int_cst (type, 1));
1185      break;
1186
1187    case INTEGER_CST:
1188      tem = fold_negate_const (t, type);
1189      if (!TREE_OVERFLOW (tem)
1190	  || !TYPE_OVERFLOW_TRAPS (type))
1191	return tem;
1192      break;
1193
1194    case REAL_CST:
1195      tem = fold_negate_const (t, type);
1196      /* Two's complement FP formats, such as c4x, may overflow.  */
1197      if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1198	return tem;
1199      break;
1200
1201    case COMPLEX_CST:
1202      {
1203	tree rpart = negate_expr (TREE_REALPART (t));
1204	tree ipart = negate_expr (TREE_IMAGPART (t));
1205
1206	if ((TREE_CODE (rpart) == REAL_CST
1207	     && TREE_CODE (ipart) == REAL_CST)
1208	    || (TREE_CODE (rpart) == INTEGER_CST
1209		&& TREE_CODE (ipart) == INTEGER_CST))
1210	  return build_complex (type, rpart, ipart);
1211      }
1212      break;
1213
1214    case NEGATE_EXPR:
1215      return TREE_OPERAND (t, 0);
1216
1217    case PLUS_EXPR:
1218      if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1219	{
1220	  /* -(A + B) -> (-B) - A.  */
1221	  if (negate_expr_p (TREE_OPERAND (t, 1))
1222	      && reorder_operands_p (TREE_OPERAND (t, 0),
1223				     TREE_OPERAND (t, 1)))
1224	    {
1225	      tem = negate_expr (TREE_OPERAND (t, 1));
1226	      return fold_build2 (MINUS_EXPR, type,
1227				  tem, TREE_OPERAND (t, 0));
1228	    }
1229
1230	  /* -(A + B) -> (-A) - B.  */
1231	  if (negate_expr_p (TREE_OPERAND (t, 0)))
1232	    {
1233	      tem = negate_expr (TREE_OPERAND (t, 0));
1234	      return fold_build2 (MINUS_EXPR, type,
1235				  tem, TREE_OPERAND (t, 1));
1236	    }
1237	}
1238      break;
1239
1240    case MINUS_EXPR:
1241      /* - (A - B) -> B - A  */
1242      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1243	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1244	return fold_build2 (MINUS_EXPR, type,
1245			    TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1246      break;
1247
1248    case MULT_EXPR:
1249      if (TYPE_UNSIGNED (type))
1250        break;
1251
1252      /* Fall through.  */
1253
1254    case RDIV_EXPR:
1255      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1256	{
1257	  tem = TREE_OPERAND (t, 1);
1258	  if (negate_expr_p (tem))
1259	    return fold_build2 (TREE_CODE (t), type,
1260				TREE_OPERAND (t, 0), negate_expr (tem));
1261	  tem = TREE_OPERAND (t, 0);
1262	  if (negate_expr_p (tem))
1263	    return fold_build2 (TREE_CODE (t), type,
1264				negate_expr (tem), TREE_OPERAND (t, 1));
1265	}
1266      break;
1267
1268    case TRUNC_DIV_EXPR:
1269    case ROUND_DIV_EXPR:
1270    case FLOOR_DIV_EXPR:
1271    case CEIL_DIV_EXPR:
1272    case EXACT_DIV_EXPR:
1273      /* In general we can't negate A / B, because if A is INT_MIN and
1274	 B is 1, we may turn this into INT_MIN / -1 which is undefined
1275	 and actually traps on some architectures.  But if overflow is
1276	 undefined, we can negate, because - (INT_MIN / 1) is an
1277	 overflow.  */
1278      if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1279        {
1280	  const char * const warnmsg = G_("assuming signed overflow does not "
1281					  "occur when negating a division");
1282          tem = TREE_OPERAND (t, 1);
1283          if (negate_expr_p (tem))
1284	    {
1285	      if (INTEGRAL_TYPE_P (type)
1286		  && (TREE_CODE (tem) != INTEGER_CST
1287		      || integer_onep (tem)))
1288		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1289	      return fold_build2 (TREE_CODE (t), type,
1290				  TREE_OPERAND (t, 0), negate_expr (tem));
1291	    }
1292          tem = TREE_OPERAND (t, 0);
1293          if (negate_expr_p (tem))
1294	    {
1295	      if (INTEGRAL_TYPE_P (type)
1296		  && (TREE_CODE (tem) != INTEGER_CST
1297		      || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1298		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1299	      return fold_build2 (TREE_CODE (t), type,
1300				  negate_expr (tem), TREE_OPERAND (t, 1));
1301	    }
1302        }
1303      break;
1304
1305    case NOP_EXPR:
1306      /* Convert -((double)float) into (double)(-float).  */
1307      if (TREE_CODE (type) == REAL_TYPE)
1308	{
1309	  tem = strip_float_extensions (t);
1310	  if (tem != t && negate_expr_p (tem))
1311	    return negate_expr (tem);
1312	}
1313      break;
1314
1315    case CALL_EXPR:
1316      /* Negate -f(x) as f(-x).  */
1317      if (negate_mathfn_p (builtin_mathfn_code (t))
1318	  && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1319	{
1320	  tree fndecl, arg, arglist;
1321
1322	  fndecl = get_callee_fndecl (t);
1323	  arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1324	  arglist = build_tree_list (NULL_TREE, arg);
1325	  return build_function_call_expr (fndecl, arglist);
1326	}
1327      break;
1328
1329    case RSHIFT_EXPR:
1330      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1331      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1332	{
1333	  tree op1 = TREE_OPERAND (t, 1);
1334	  if (TREE_INT_CST_HIGH (op1) == 0
1335	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1336		 == TREE_INT_CST_LOW (op1))
1337	    {
1338	      tree ntype = TYPE_UNSIGNED (type)
1339			   ? lang_hooks.types.signed_type (type)
1340			   : lang_hooks.types.unsigned_type (type);
1341	      tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1342	      temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1343	      return fold_convert (type, temp);
1344	    }
1345	}
1346      break;
1347
1348    default:
1349      break;
1350    }
1351
1352  return NULL_TREE;
1353}
1354
1355/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1356   negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
1357   return NULL_TREE. */
1358
1359static tree
1360negate_expr (tree t)
1361{
1362  tree type, tem;
1363
1364  if (t == NULL_TREE)
1365    return NULL_TREE;
1366
1367  type = TREE_TYPE (t);
1368  STRIP_SIGN_NOPS (t);
1369
1370  tem = fold_negate_expr (t);
1371  if (!tem)
1372    tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1373  return fold_convert (type, tem);
1374}
1375
1376/* Split a tree IN into a constant, literal and variable parts that could be
1377   combined with CODE to make IN.  "constant" means an expression with
1378   TREE_CONSTANT but that isn't an actual constant.  CODE must be a
1379   commutative arithmetic operation.  Store the constant part into *CONP,
1380   the literal in *LITP and return the variable part.  If a part isn't
1381   present, set it to null.  If the tree does not decompose in this way,
1382   return the entire tree as the variable part and the other parts as null.
1383
1384   If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
1385   case, we negate an operand that was subtracted.  Except if it is a
1386   literal for which we use *MINUS_LITP instead.
1387
1388   If NEGATE_P is true, we are negating all of IN, again except a literal
1389   for which we use *MINUS_LITP instead.
1390
1391   If IN is itself a literal or constant, return it as appropriate.
1392
1393   Note that we do not guarantee that any of the three values will be the
1394   same type as IN, but they will have the same signedness and mode.  */
1395
1396static tree
1397split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1398	    tree *minus_litp, int negate_p)
1399{
1400  tree var = 0;
1401
1402  *conp = 0;
1403  *litp = 0;
1404  *minus_litp = 0;
1405
1406  /* Strip any conversions that don't change the machine mode or signedness.  */
1407  STRIP_SIGN_NOPS (in);
1408
1409  if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1410    *litp = in;
1411  else if (TREE_CODE (in) == code
1412	   || (! FLOAT_TYPE_P (TREE_TYPE (in))
1413	       /* We can associate addition and subtraction together (even
1414		  though the C standard doesn't say so) for integers because
1415		  the value is not affected.  For reals, the value might be
1416		  affected, so we can't.  */
1417	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1418		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1419    {
1420      tree op0 = TREE_OPERAND (in, 0);
1421      tree op1 = TREE_OPERAND (in, 1);
1422      int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1423      int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1424
1425      /* First see if either of the operands is a literal, then a constant.  */
1426      if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1427	*litp = op0, op0 = 0;
1428      else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1429	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
1430
1431      if (op0 != 0 && TREE_CONSTANT (op0))
1432	*conp = op0, op0 = 0;
1433      else if (op1 != 0 && TREE_CONSTANT (op1))
1434	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
1435
1436      /* If we haven't dealt with either operand, this is not a case we can
1437	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
1438      if (op0 != 0 && op1 != 0)
1439	var = in;
1440      else if (op0 != 0)
1441	var = op0;
1442      else
1443	var = op1, neg_var_p = neg1_p;
1444
1445      /* Now do any needed negations.  */
1446      if (neg_litp_p)
1447	*minus_litp = *litp, *litp = 0;
1448      if (neg_conp_p)
1449	*conp = negate_expr (*conp);
1450      if (neg_var_p)
1451	var = negate_expr (var);
1452    }
1453  else if (TREE_CONSTANT (in))
1454    *conp = in;
1455  else
1456    var = in;
1457
1458  if (negate_p)
1459    {
1460      if (*litp)
1461	*minus_litp = *litp, *litp = 0;
1462      else if (*minus_litp)
1463	*litp = *minus_litp, *minus_litp = 0;
1464      *conp = negate_expr (*conp);
1465      var = negate_expr (var);
1466    }
1467
1468  return var;
1469}
1470
1471/* Re-associate trees split by the above function.  T1 and T2 are either
1472   expressions to associate or null.  Return the new expression, if any.  If
1473   we build an operation, do it in TYPE and with CODE.  */
1474
1475static tree
1476associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1477{
1478  if (t1 == 0)
1479    return t2;
1480  else if (t2 == 0)
1481    return t1;
1482
1483  /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1484     try to fold this since we will have infinite recursion.  But do
1485     deal with any NEGATE_EXPRs.  */
1486  if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1487      || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1488    {
1489      if (code == PLUS_EXPR)
1490	{
1491	  if (TREE_CODE (t1) == NEGATE_EXPR)
1492	    return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1493			   fold_convert (type, TREE_OPERAND (t1, 0)));
1494	  else if (TREE_CODE (t2) == NEGATE_EXPR)
1495	    return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1496			   fold_convert (type, TREE_OPERAND (t2, 0)));
1497	  else if (integer_zerop (t2))
1498	    return fold_convert (type, t1);
1499	}
1500      else if (code == MINUS_EXPR)
1501	{
1502	  if (integer_zerop (t2))
1503	    return fold_convert (type, t1);
1504	}
1505
1506      return build2 (code, type, fold_convert (type, t1),
1507		     fold_convert (type, t2));
1508    }
1509
1510  return fold_build2 (code, type, fold_convert (type, t1),
1511		      fold_convert (type, t2));
1512}
1513
1514/* Combine two integer constants ARG1 and ARG2 under operation CODE
1515   to produce a new constant.  Return NULL_TREE if we don't know how
1516   to evaluate CODE at compile-time.
1517
1518   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1519
1520tree
1521int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1522{
1523  unsigned HOST_WIDE_INT int1l, int2l;
1524  HOST_WIDE_INT int1h, int2h;
1525  unsigned HOST_WIDE_INT low;
1526  HOST_WIDE_INT hi;
1527  unsigned HOST_WIDE_INT garbagel;
1528  HOST_WIDE_INT garbageh;
1529  tree t;
1530  tree type = TREE_TYPE (arg1);
1531  int uns = TYPE_UNSIGNED (type);
1532  int is_sizetype
1533    = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1534  int overflow = 0;
1535
1536  int1l = TREE_INT_CST_LOW (arg1);
1537  int1h = TREE_INT_CST_HIGH (arg1);
1538  int2l = TREE_INT_CST_LOW (arg2);
1539  int2h = TREE_INT_CST_HIGH (arg2);
1540
1541  switch (code)
1542    {
1543    case BIT_IOR_EXPR:
1544      low = int1l | int2l, hi = int1h | int2h;
1545      break;
1546
1547    case BIT_XOR_EXPR:
1548      low = int1l ^ int2l, hi = int1h ^ int2h;
1549      break;
1550
1551    case BIT_AND_EXPR:
1552      low = int1l & int2l, hi = int1h & int2h;
1553      break;
1554
1555    case RSHIFT_EXPR:
1556      int2l = -int2l;
1557    case LSHIFT_EXPR:
1558      /* It's unclear from the C standard whether shifts can overflow.
1559	 The following code ignores overflow; perhaps a C standard
1560	 interpretation ruling is needed.  */
1561      lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1562		     &low, &hi, !uns);
1563      break;
1564
1565    case RROTATE_EXPR:
1566      int2l = - int2l;
1567    case LROTATE_EXPR:
1568      lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1569		      &low, &hi);
1570      break;
1571
1572    case PLUS_EXPR:
1573      overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1574      break;
1575
1576    case MINUS_EXPR:
1577      neg_double (int2l, int2h, &low, &hi);
1578      add_double (int1l, int1h, low, hi, &low, &hi);
1579      overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1580      break;
1581
1582    case MULT_EXPR:
1583      overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1584      break;
1585
1586    case TRUNC_DIV_EXPR:
1587    case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1588    case EXACT_DIV_EXPR:
1589      /* This is a shortcut for a common special case.  */
1590      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1591	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1592	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1593	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1594	{
1595	  if (code == CEIL_DIV_EXPR)
1596	    int1l += int2l - 1;
1597
1598	  low = int1l / int2l, hi = 0;
1599	  break;
1600	}
1601
1602      /* ... fall through ...  */
1603
1604    case ROUND_DIV_EXPR:
1605      if (int2h == 0 && int2l == 0)
1606	return NULL_TREE;
1607      if (int2h == 0 && int2l == 1)
1608	{
1609	  low = int1l, hi = int1h;
1610	  break;
1611	}
1612      if (int1l == int2l && int1h == int2h
1613	  && ! (int1l == 0 && int1h == 0))
1614	{
1615	  low = 1, hi = 0;
1616	  break;
1617	}
1618      overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1619				       &low, &hi, &garbagel, &garbageh);
1620      break;
1621
1622    case TRUNC_MOD_EXPR:
1623    case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1624      /* This is a shortcut for a common special case.  */
1625      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1626	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1627	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1628	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1629	{
1630	  if (code == CEIL_MOD_EXPR)
1631	    int1l += int2l - 1;
1632	  low = int1l % int2l, hi = 0;
1633	  break;
1634	}
1635
1636      /* ... fall through ...  */
1637
1638    case ROUND_MOD_EXPR:
1639      if (int2h == 0 && int2l == 0)
1640	return NULL_TREE;
1641      overflow = div_and_round_double (code, uns,
1642				       int1l, int1h, int2l, int2h,
1643				       &garbagel, &garbageh, &low, &hi);
1644      break;
1645
1646    case MIN_EXPR:
1647    case MAX_EXPR:
1648      if (uns)
1649	low = (((unsigned HOST_WIDE_INT) int1h
1650		< (unsigned HOST_WIDE_INT) int2h)
1651	       || (((unsigned HOST_WIDE_INT) int1h
1652		    == (unsigned HOST_WIDE_INT) int2h)
1653		   && int1l < int2l));
1654      else
1655	low = (int1h < int2h
1656	       || (int1h == int2h && int1l < int2l));
1657
1658      if (low == (code == MIN_EXPR))
1659	low = int1l, hi = int1h;
1660      else
1661	low = int2l, hi = int2h;
1662      break;
1663
1664    default:
1665      return NULL_TREE;
1666    }
1667
1668  t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1669
1670  if (notrunc)
1671    {
1672      /* Propagate overflow flags ourselves.  */
1673      if (((!uns || is_sizetype) && overflow)
1674	  | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1675	{
1676	  t = copy_node (t);
1677	  TREE_OVERFLOW (t) = 1;
1678	  TREE_CONSTANT_OVERFLOW (t) = 1;
1679	}
1680      else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1681	{
1682	  t = copy_node (t);
1683	  TREE_CONSTANT_OVERFLOW (t) = 1;
1684	}
1685    }
1686  else
1687    t = force_fit_type (t, 1,
1688			((!uns || is_sizetype) && overflow)
1689			| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1690			TREE_CONSTANT_OVERFLOW (arg1)
1691			| TREE_CONSTANT_OVERFLOW (arg2));
1692
1693  return t;
1694}
1695
1696/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1697   constant.  We assume ARG1 and ARG2 have the same data type, or at least
1698   are the same kind of constant and the same machine mode.  Return zero if
1699   combining the constants is not allowed in the current operating mode.
1700
1701   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1702
1703static tree
1704const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1705{
1706  /* Sanity check for the recursive cases.  */
1707  if (!arg1 || !arg2)
1708    return NULL_TREE;
1709
1710  STRIP_NOPS (arg1);
1711  STRIP_NOPS (arg2);
1712
1713  if (TREE_CODE (arg1) == INTEGER_CST)
1714    return int_const_binop (code, arg1, arg2, notrunc);
1715
1716  if (TREE_CODE (arg1) == REAL_CST)
1717    {
1718      enum machine_mode mode;
1719      REAL_VALUE_TYPE d1;
1720      REAL_VALUE_TYPE d2;
1721      REAL_VALUE_TYPE value;
1722      REAL_VALUE_TYPE result;
1723      bool inexact;
1724      tree t, type;
1725
1726      /* The following codes are handled by real_arithmetic.  */
1727      switch (code)
1728	{
1729	case PLUS_EXPR:
1730	case MINUS_EXPR:
1731	case MULT_EXPR:
1732	case RDIV_EXPR:
1733	case MIN_EXPR:
1734	case MAX_EXPR:
1735	  break;
1736
1737	default:
1738	  return NULL_TREE;
1739	}
1740
1741      d1 = TREE_REAL_CST (arg1);
1742      d2 = TREE_REAL_CST (arg2);
1743
1744      type = TREE_TYPE (arg1);
1745      mode = TYPE_MODE (type);
1746
1747      /* Don't perform operation if we honor signaling NaNs and
1748	 either operand is a NaN.  */
1749      if (HONOR_SNANS (mode)
1750	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1751	return NULL_TREE;
1752
1753      /* Don't perform operation if it would raise a division
1754	 by zero exception.  */
1755      if (code == RDIV_EXPR
1756	  && REAL_VALUES_EQUAL (d2, dconst0)
1757	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1758	return NULL_TREE;
1759
1760      /* If either operand is a NaN, just return it.  Otherwise, set up
1761	 for floating-point trap; we return an overflow.  */
1762      if (REAL_VALUE_ISNAN (d1))
1763	return arg1;
1764      else if (REAL_VALUE_ISNAN (d2))
1765	return arg2;
1766
1767      inexact = real_arithmetic (&value, code, &d1, &d2);
1768      real_convert (&result, mode, &value);
1769
1770      /* Don't constant fold this floating point operation if
1771	 the result has overflowed and flag_trapping_math.  */
1772      if (flag_trapping_math
1773	  && MODE_HAS_INFINITIES (mode)
1774	  && REAL_VALUE_ISINF (result)
1775	  && !REAL_VALUE_ISINF (d1)
1776	  && !REAL_VALUE_ISINF (d2))
1777	return NULL_TREE;
1778
1779      /* Don't constant fold this floating point operation if the
1780	 result may dependent upon the run-time rounding mode and
1781	 flag_rounding_math is set, or if GCC's software emulation
1782	 is unable to accurately represent the result.  */
1783      if ((flag_rounding_math
1784	   || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1785	       && !flag_unsafe_math_optimizations))
1786	  && (inexact || !real_identical (&result, &value)))
1787	return NULL_TREE;
1788
1789      t = build_real (type, result);
1790
1791      TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1792      TREE_CONSTANT_OVERFLOW (t)
1793	= TREE_OVERFLOW (t)
1794	  | TREE_CONSTANT_OVERFLOW (arg1)
1795	  | TREE_CONSTANT_OVERFLOW (arg2);
1796      return t;
1797    }
1798
1799  if (TREE_CODE (arg1) == COMPLEX_CST)
1800    {
1801      tree type = TREE_TYPE (arg1);
1802      tree r1 = TREE_REALPART (arg1);
1803      tree i1 = TREE_IMAGPART (arg1);
1804      tree r2 = TREE_REALPART (arg2);
1805      tree i2 = TREE_IMAGPART (arg2);
1806      tree real, imag;
1807
1808      switch (code)
1809	{
1810	case PLUS_EXPR:
1811	case MINUS_EXPR:
1812	  real = const_binop (code, r1, r2, notrunc);
1813	  imag = const_binop (code, i1, i2, notrunc);
1814	  break;
1815
1816	case MULT_EXPR:
1817	  real = const_binop (MINUS_EXPR,
1818			      const_binop (MULT_EXPR, r1, r2, notrunc),
1819			      const_binop (MULT_EXPR, i1, i2, notrunc),
1820			      notrunc);
1821	  imag = const_binop (PLUS_EXPR,
1822			      const_binop (MULT_EXPR, r1, i2, notrunc),
1823			      const_binop (MULT_EXPR, i1, r2, notrunc),
1824			      notrunc);
1825	  break;
1826
1827	case RDIV_EXPR:
1828	  {
1829	    tree magsquared
1830	      = const_binop (PLUS_EXPR,
1831			     const_binop (MULT_EXPR, r2, r2, notrunc),
1832			     const_binop (MULT_EXPR, i2, i2, notrunc),
1833			     notrunc);
1834	    tree t1
1835	      = const_binop (PLUS_EXPR,
1836			     const_binop (MULT_EXPR, r1, r2, notrunc),
1837			     const_binop (MULT_EXPR, i1, i2, notrunc),
1838			     notrunc);
1839	    tree t2
1840	      = const_binop (MINUS_EXPR,
1841			     const_binop (MULT_EXPR, i1, r2, notrunc),
1842			     const_binop (MULT_EXPR, r1, i2, notrunc),
1843			     notrunc);
1844
1845	    if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1846	      code = TRUNC_DIV_EXPR;
1847
1848	    real = const_binop (code, t1, magsquared, notrunc);
1849	    imag = const_binop (code, t2, magsquared, notrunc);
1850	  }
1851	  break;
1852
1853	default:
1854	  return NULL_TREE;
1855	}
1856
1857      if (real && imag)
1858	return build_complex (type, real, imag);
1859    }
1860
1861  return NULL_TREE;
1862}
1863
1864/* Create a size type INT_CST node with NUMBER sign extended.  KIND
1865   indicates which particular sizetype to create.  */
1866
1867tree
1868size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1869{
1870  return build_int_cst (sizetype_tab[(int) kind], number);
1871}
1872
1873/* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1874   is a tree code.  The type of the result is taken from the operands.
1875   Both must be the same type integer type and it must be a size type.
1876   If the operands are constant, so is the result.  */
1877
1878tree
1879size_binop (enum tree_code code, tree arg0, tree arg1)
1880{
1881  tree type = TREE_TYPE (arg0);
1882
1883  if (arg0 == error_mark_node || arg1 == error_mark_node)
1884    return error_mark_node;
1885
1886  gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1887	      && type == TREE_TYPE (arg1));
1888
1889  /* Handle the special case of two integer constants faster.  */
1890  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1891    {
1892      /* And some specific cases even faster than that.  */
1893      if (code == PLUS_EXPR && integer_zerop (arg0))
1894	return arg1;
1895      else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1896	       && integer_zerop (arg1))
1897	return arg0;
1898      else if (code == MULT_EXPR && integer_onep (arg0))
1899	return arg1;
1900
1901      /* Handle general case of two integer constants.  */
1902      return int_const_binop (code, arg0, arg1, 0);
1903    }
1904
1905  return fold_build2 (code, type, arg0, arg1);
1906}
1907
1908/* Given two values, either both of sizetype or both of bitsizetype,
1909   compute the difference between the two values.  Return the value
1910   in signed type corresponding to the type of the operands.  */
1911
1912tree
1913size_diffop (tree arg0, tree arg1)
1914{
1915  tree type = TREE_TYPE (arg0);
1916  tree ctype;
1917
1918  gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1919	      && type == TREE_TYPE (arg1));
1920
1921  /* If the type is already signed, just do the simple thing.  */
1922  if (!TYPE_UNSIGNED (type))
1923    return size_binop (MINUS_EXPR, arg0, arg1);
1924
1925  ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1926
1927  /* If either operand is not a constant, do the conversions to the signed
1928     type and subtract.  The hardware will do the right thing with any
1929     overflow in the subtraction.  */
1930  if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1931    return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1932		       fold_convert (ctype, arg1));
1933
1934  /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1935     Otherwise, subtract the other way, convert to CTYPE (we know that can't
1936     overflow) and negate (which can't either).  Special-case a result
1937     of zero while we're here.  */
1938  if (tree_int_cst_equal (arg0, arg1))
1939    return build_int_cst (ctype, 0);
1940  else if (tree_int_cst_lt (arg1, arg0))
1941    return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1942  else
1943    return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1944		       fold_convert (ctype, size_binop (MINUS_EXPR,
1945							arg1, arg0)));
1946}
1947
1948/* A subroutine of fold_convert_const handling conversions of an
1949   INTEGER_CST to another integer type.  */
1950
1951static tree
1952fold_convert_const_int_from_int (tree type, tree arg1)
1953{
1954  tree t;
1955
1956  /* Given an integer constant, make new constant with new type,
1957     appropriately sign-extended or truncated.  */
1958  t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1959			  TREE_INT_CST_HIGH (arg1));
1960
1961  t = force_fit_type (t,
1962		      /* Don't set the overflow when
1963		      	 converting a pointer  */
1964		      !POINTER_TYPE_P (TREE_TYPE (arg1)),
1965		      (TREE_INT_CST_HIGH (arg1) < 0
1966		       && (TYPE_UNSIGNED (type)
1967			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1968		      | TREE_OVERFLOW (arg1),
1969		      TREE_CONSTANT_OVERFLOW (arg1));
1970
1971  return t;
1972}
1973
1974/* A subroutine of fold_convert_const handling conversions a REAL_CST
1975   to an integer type.  */
1976
1977static tree
1978fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1979{
1980  int overflow = 0;
1981  tree t;
1982
1983  /* The following code implements the floating point to integer
1984     conversion rules required by the Java Language Specification,
1985     that IEEE NaNs are mapped to zero and values that overflow
1986     the target precision saturate, i.e. values greater than
1987     INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1988     are mapped to INT_MIN.  These semantics are allowed by the
1989     C and C++ standards that simply state that the behavior of
1990     FP-to-integer conversion is unspecified upon overflow.  */
1991
1992  HOST_WIDE_INT high, low;
1993  REAL_VALUE_TYPE r;
1994  REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1995
1996  switch (code)
1997    {
1998    case FIX_TRUNC_EXPR:
1999      real_trunc (&r, VOIDmode, &x);
2000      break;
2001
2002    case FIX_CEIL_EXPR:
2003      real_ceil (&r, VOIDmode, &x);
2004      break;
2005
2006    case FIX_FLOOR_EXPR:
2007      real_floor (&r, VOIDmode, &x);
2008      break;
2009
2010    case FIX_ROUND_EXPR:
2011      real_round (&r, VOIDmode, &x);
2012      break;
2013
2014    default:
2015      gcc_unreachable ();
2016    }
2017
2018  /* If R is NaN, return zero and show we have an overflow.  */
2019  if (REAL_VALUE_ISNAN (r))
2020    {
2021      overflow = 1;
2022      high = 0;
2023      low = 0;
2024    }
2025
2026  /* See if R is less than the lower bound or greater than the
2027     upper bound.  */
2028
2029  if (! overflow)
2030    {
2031      tree lt = TYPE_MIN_VALUE (type);
2032      REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2033      if (REAL_VALUES_LESS (r, l))
2034	{
2035	  overflow = 1;
2036	  high = TREE_INT_CST_HIGH (lt);
2037	  low = TREE_INT_CST_LOW (lt);
2038	}
2039    }
2040
2041  if (! overflow)
2042    {
2043      tree ut = TYPE_MAX_VALUE (type);
2044      if (ut)
2045	{
2046	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047	  if (REAL_VALUES_LESS (u, r))
2048	    {
2049	      overflow = 1;
2050	      high = TREE_INT_CST_HIGH (ut);
2051	      low = TREE_INT_CST_LOW (ut);
2052	    }
2053	}
2054    }
2055
2056  if (! overflow)
2057    REAL_VALUE_TO_INT (&low, &high, r);
2058
2059  t = build_int_cst_wide (type, low, high);
2060
2061  t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
2062		      TREE_CONSTANT_OVERFLOW (arg1));
2063  return t;
2064}
2065
2066/* A subroutine of fold_convert_const handling conversions a REAL_CST
2067   to another floating point type.  */
2068
2069static tree
2070fold_convert_const_real_from_real (tree type, tree arg1)
2071{
2072  REAL_VALUE_TYPE value;
2073  tree t;
2074
2075  real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2076  t = build_real (type, value);
2077
2078  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2079  TREE_CONSTANT_OVERFLOW (t)
2080    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2081  return t;
2082}
2083
2084/* Attempt to fold type conversion operation CODE of expression ARG1 to
2085   type TYPE.  If no simplification can be done return NULL_TREE.  */
2086
2087static tree
2088fold_convert_const (enum tree_code code, tree type, tree arg1)
2089{
2090  if (TREE_TYPE (arg1) == type)
2091    return arg1;
2092
2093  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2094    {
2095      if (TREE_CODE (arg1) == INTEGER_CST)
2096	return fold_convert_const_int_from_int (type, arg1);
2097      else if (TREE_CODE (arg1) == REAL_CST)
2098	return fold_convert_const_int_from_real (code, type, arg1);
2099    }
2100  else if (TREE_CODE (type) == REAL_TYPE)
2101    {
2102      if (TREE_CODE (arg1) == INTEGER_CST)
2103	return build_real_from_int_cst (type, arg1);
2104      if (TREE_CODE (arg1) == REAL_CST)
2105	return fold_convert_const_real_from_real (type, arg1);
2106    }
2107  return NULL_TREE;
2108}
2109
2110/* Construct a vector of zero elements of vector type TYPE.  */
2111
2112static tree
2113build_zero_vector (tree type)
2114{
2115  tree elem, list;
2116  int i, units;
2117
2118  elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2119  units = TYPE_VECTOR_SUBPARTS (type);
2120
2121  list = NULL_TREE;
2122  for (i = 0; i < units; i++)
2123    list = tree_cons (NULL_TREE, elem, list);
2124  return build_vector (type, list);
2125}
2126
2127/* Convert expression ARG to type TYPE.  Used by the middle-end for
2128   simple conversions in preference to calling the front-end's convert.  */
2129
2130tree
2131fold_convert (tree type, tree arg)
2132{
2133  tree orig = TREE_TYPE (arg);
2134  tree tem;
2135
2136  if (type == orig)
2137    return arg;
2138
2139  if (TREE_CODE (arg) == ERROR_MARK
2140      || TREE_CODE (type) == ERROR_MARK
2141      || TREE_CODE (orig) == ERROR_MARK)
2142    return error_mark_node;
2143
2144  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2145      || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2146					TYPE_MAIN_VARIANT (orig)))
2147    return fold_build1 (NOP_EXPR, type, arg);
2148
2149  switch (TREE_CODE (type))
2150    {
2151    case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2152    case POINTER_TYPE: case REFERENCE_TYPE:
2153      /* APPLE LOCAL blocks 5862465 */
2154    case BLOCK_POINTER_TYPE:
2155    case OFFSET_TYPE:
2156      if (TREE_CODE (arg) == INTEGER_CST)
2157	{
2158	  tem = fold_convert_const (NOP_EXPR, type, arg);
2159	  if (tem != NULL_TREE)
2160	    return tem;
2161	}
2162      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2163	  || TREE_CODE (orig) == OFFSET_TYPE)
2164        return fold_build1 (NOP_EXPR, type, arg);
2165      if (TREE_CODE (orig) == COMPLEX_TYPE)
2166	{
2167	  tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2168	  return fold_convert (type, tem);
2169	}
2170      gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2171		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2172      return fold_build1 (NOP_EXPR, type, arg);
2173
2174    case REAL_TYPE:
2175      if (TREE_CODE (arg) == INTEGER_CST)
2176	{
2177	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2178	  if (tem != NULL_TREE)
2179	    return tem;
2180	}
2181      else if (TREE_CODE (arg) == REAL_CST)
2182	{
2183	  tem = fold_convert_const (NOP_EXPR, type, arg);
2184	  if (tem != NULL_TREE)
2185	    return tem;
2186	}
2187
2188      switch (TREE_CODE (orig))
2189	{
2190	case INTEGER_TYPE:
2191	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2192	case POINTER_TYPE: case REFERENCE_TYPE:
2193	  return fold_build1 (FLOAT_EXPR, type, arg);
2194
2195	case REAL_TYPE:
2196	  return fold_build1 (NOP_EXPR, type, arg);
2197
2198	case COMPLEX_TYPE:
2199	  tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2200	  return fold_convert (type, tem);
2201
2202	default:
2203	  gcc_unreachable ();
2204	}
2205
2206    case COMPLEX_TYPE:
2207      switch (TREE_CODE (orig))
2208	{
2209	case INTEGER_TYPE:
2210	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2211	case POINTER_TYPE: case REFERENCE_TYPE:
2212	case REAL_TYPE:
2213	  return build2 (COMPLEX_EXPR, type,
2214			 fold_convert (TREE_TYPE (type), arg),
2215			 fold_convert (TREE_TYPE (type), integer_zero_node));
2216	case COMPLEX_TYPE:
2217	  {
2218	    tree rpart, ipart;
2219
2220	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2221	      {
2222		rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2223		ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2224		return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2225	      }
2226
2227	    arg = save_expr (arg);
2228	    rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2229	    ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2230	    rpart = fold_convert (TREE_TYPE (type), rpart);
2231	    ipart = fold_convert (TREE_TYPE (type), ipart);
2232	    return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2233	  }
2234
2235	default:
2236	  gcc_unreachable ();
2237	}
2238
2239    case VECTOR_TYPE:
2240      if (integer_zerop (arg))
2241	return build_zero_vector (type);
2242      gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2243      gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2244		  || TREE_CODE (orig) == VECTOR_TYPE);
2245      return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2246
2247    case VOID_TYPE:
2248      return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2249
2250    default:
2251      gcc_unreachable ();
2252    }
2253}
2254
2255/* Return false if expr can be assumed not to be an lvalue, true
2256   otherwise.  */
2257
2258static bool
2259maybe_lvalue_p (tree x)
2260{
2261  /* We only need to wrap lvalue tree codes.  */
2262  switch (TREE_CODE (x))
2263  {
2264  case VAR_DECL:
2265  case PARM_DECL:
2266  case RESULT_DECL:
2267  case LABEL_DECL:
2268  case FUNCTION_DECL:
2269  case SSA_NAME:
2270
2271  case COMPONENT_REF:
2272  case INDIRECT_REF:
2273  case ALIGN_INDIRECT_REF:
2274  case MISALIGNED_INDIRECT_REF:
2275  case ARRAY_REF:
2276  case ARRAY_RANGE_REF:
2277  case BIT_FIELD_REF:
2278  case OBJ_TYPE_REF:
2279
2280  case REALPART_EXPR:
2281  case IMAGPART_EXPR:
2282  case PREINCREMENT_EXPR:
2283  case PREDECREMENT_EXPR:
2284  case SAVE_EXPR:
2285  case TRY_CATCH_EXPR:
2286  case WITH_CLEANUP_EXPR:
2287  case COMPOUND_EXPR:
2288  case MODIFY_EXPR:
2289  case TARGET_EXPR:
2290  case COND_EXPR:
2291  case BIND_EXPR:
2292  case MIN_EXPR:
2293  case MAX_EXPR:
2294    break;
2295
2296  default:
2297    /* Assume the worst for front-end tree codes.  */
2298    if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2299      break;
2300    return false;
2301  }
2302
2303  return true;
2304}
2305
2306/* Return an expr equal to X but certainly not valid as an lvalue.  */
2307
2308tree
2309non_lvalue (tree x)
2310{
2311  /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2312     us.  */
2313  if (in_gimple_form)
2314    return x;
2315
2316  if (! maybe_lvalue_p (x))
2317    return x;
2318  return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2319}
2320
2321/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2322   Zero means allow extended lvalues.  */
2323
2324int pedantic_lvalues;
2325
2326/* When pedantic, return an expr equal to X but certainly not valid as a
2327   pedantic lvalue.  Otherwise, return X.  */
2328
2329static tree
2330pedantic_non_lvalue (tree x)
2331{
2332  if (pedantic_lvalues)
2333    return non_lvalue (x);
2334  else
2335    return x;
2336}
2337
2338/* Given a tree comparison code, return the code that is the logical inverse
2339   of the given code.  It is not safe to do this for floating-point
2340   comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2341   as well: if reversing the comparison is unsafe, return ERROR_MARK.  */
2342
2343enum tree_code
2344invert_tree_comparison (enum tree_code code, bool honor_nans)
2345{
2346  if (honor_nans && flag_trapping_math)
2347    return ERROR_MARK;
2348
2349  switch (code)
2350    {
2351    case EQ_EXPR:
2352      return NE_EXPR;
2353    case NE_EXPR:
2354      return EQ_EXPR;
2355    case GT_EXPR:
2356      return honor_nans ? UNLE_EXPR : LE_EXPR;
2357    case GE_EXPR:
2358      return honor_nans ? UNLT_EXPR : LT_EXPR;
2359    case LT_EXPR:
2360      return honor_nans ? UNGE_EXPR : GE_EXPR;
2361    case LE_EXPR:
2362      return honor_nans ? UNGT_EXPR : GT_EXPR;
2363    case LTGT_EXPR:
2364      return UNEQ_EXPR;
2365    case UNEQ_EXPR:
2366      return LTGT_EXPR;
2367    case UNGT_EXPR:
2368      return LE_EXPR;
2369    case UNGE_EXPR:
2370      return LT_EXPR;
2371    case UNLT_EXPR:
2372      return GE_EXPR;
2373    case UNLE_EXPR:
2374      return GT_EXPR;
2375    case ORDERED_EXPR:
2376      return UNORDERED_EXPR;
2377    case UNORDERED_EXPR:
2378      return ORDERED_EXPR;
2379    default:
2380      gcc_unreachable ();
2381    }
2382}
2383
2384/* Similar, but return the comparison that results if the operands are
2385   swapped.  This is safe for floating-point.  */
2386
2387enum tree_code
2388swap_tree_comparison (enum tree_code code)
2389{
2390  switch (code)
2391    {
2392    case EQ_EXPR:
2393    case NE_EXPR:
2394    case ORDERED_EXPR:
2395    case UNORDERED_EXPR:
2396    case LTGT_EXPR:
2397    case UNEQ_EXPR:
2398      return code;
2399    case GT_EXPR:
2400      return LT_EXPR;
2401    case GE_EXPR:
2402      return LE_EXPR;
2403    case LT_EXPR:
2404      return GT_EXPR;
2405    case LE_EXPR:
2406      return GE_EXPR;
2407    case UNGT_EXPR:
2408      return UNLT_EXPR;
2409    case UNGE_EXPR:
2410      return UNLE_EXPR;
2411    case UNLT_EXPR:
2412      return UNGT_EXPR;
2413    case UNLE_EXPR:
2414      return UNGE_EXPR;
2415    default:
2416      gcc_unreachable ();
2417    }
2418}
2419
2420
2421/* Convert a comparison tree code from an enum tree_code representation
2422   into a compcode bit-based encoding.  This function is the inverse of
2423   compcode_to_comparison.  */
2424
2425static enum comparison_code
2426comparison_to_compcode (enum tree_code code)
2427{
2428  switch (code)
2429    {
2430    case LT_EXPR:
2431      return COMPCODE_LT;
2432    case EQ_EXPR:
2433      return COMPCODE_EQ;
2434    case LE_EXPR:
2435      return COMPCODE_LE;
2436    case GT_EXPR:
2437      return COMPCODE_GT;
2438    case NE_EXPR:
2439      return COMPCODE_NE;
2440    case GE_EXPR:
2441      return COMPCODE_GE;
2442    case ORDERED_EXPR:
2443      return COMPCODE_ORD;
2444    case UNORDERED_EXPR:
2445      return COMPCODE_UNORD;
2446    case UNLT_EXPR:
2447      return COMPCODE_UNLT;
2448    case UNEQ_EXPR:
2449      return COMPCODE_UNEQ;
2450    case UNLE_EXPR:
2451      return COMPCODE_UNLE;
2452    case UNGT_EXPR:
2453      return COMPCODE_UNGT;
2454    case LTGT_EXPR:
2455      return COMPCODE_LTGT;
2456    case UNGE_EXPR:
2457      return COMPCODE_UNGE;
2458    default:
2459      gcc_unreachable ();
2460    }
2461}
2462
2463/* Convert a compcode bit-based encoding of a comparison operator back
2464   to GCC's enum tree_code representation.  This function is the
2465   inverse of comparison_to_compcode.  */
2466
2467static enum tree_code
2468compcode_to_comparison (enum comparison_code code)
2469{
2470  switch (code)
2471    {
2472    case COMPCODE_LT:
2473      return LT_EXPR;
2474    case COMPCODE_EQ:
2475      return EQ_EXPR;
2476    case COMPCODE_LE:
2477      return LE_EXPR;
2478    case COMPCODE_GT:
2479      return GT_EXPR;
2480    case COMPCODE_NE:
2481      return NE_EXPR;
2482    case COMPCODE_GE:
2483      return GE_EXPR;
2484    case COMPCODE_ORD:
2485      return ORDERED_EXPR;
2486    case COMPCODE_UNORD:
2487      return UNORDERED_EXPR;
2488    case COMPCODE_UNLT:
2489      return UNLT_EXPR;
2490    case COMPCODE_UNEQ:
2491      return UNEQ_EXPR;
2492    case COMPCODE_UNLE:
2493      return UNLE_EXPR;
2494    case COMPCODE_UNGT:
2495      return UNGT_EXPR;
2496    case COMPCODE_LTGT:
2497      return LTGT_EXPR;
2498    case COMPCODE_UNGE:
2499      return UNGE_EXPR;
2500    default:
2501      gcc_unreachable ();
2502    }
2503}
2504
2505/* Return a tree for the comparison which is the combination of
2506   doing the AND or OR (depending on CODE) of the two operations LCODE
2507   and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2508   the possibility of trapping if the mode has NaNs, and return NULL_TREE
2509   if this makes the transformation invalid.  */
2510
2511tree
2512combine_comparisons (enum tree_code code, enum tree_code lcode,
2513		     enum tree_code rcode, tree truth_type,
2514		     tree ll_arg, tree lr_arg)
2515{
2516  bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2517  enum comparison_code lcompcode = comparison_to_compcode (lcode);
2518  enum comparison_code rcompcode = comparison_to_compcode (rcode);
2519  enum comparison_code compcode;
2520
2521  switch (code)
2522    {
2523    case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2524      compcode = lcompcode & rcompcode;
2525      break;
2526
2527    case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2528      compcode = lcompcode | rcompcode;
2529      break;
2530
2531    default:
2532      return NULL_TREE;
2533    }
2534
2535  if (!honor_nans)
2536    {
2537      /* Eliminate unordered comparisons, as well as LTGT and ORD
2538	 which are not used unless the mode has NaNs.  */
2539      compcode &= ~COMPCODE_UNORD;
2540      if (compcode == COMPCODE_LTGT)
2541	compcode = COMPCODE_NE;
2542      else if (compcode == COMPCODE_ORD)
2543	compcode = COMPCODE_TRUE;
2544    }
2545   else if (flag_trapping_math)
2546     {
2547	/* Check that the original operation and the optimized ones will trap
2548	   under the same condition.  */
2549	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2550		     && (lcompcode != COMPCODE_EQ)
2551		     && (lcompcode != COMPCODE_ORD);
2552	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2553		     && (rcompcode != COMPCODE_EQ)
2554		     && (rcompcode != COMPCODE_ORD);
2555	bool trap = (compcode & COMPCODE_UNORD) == 0
2556		    && (compcode != COMPCODE_EQ)
2557		    && (compcode != COMPCODE_ORD);
2558
2559        /* In a short-circuited boolean expression the LHS might be
2560	   such that the RHS, if evaluated, will never trap.  For
2561	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2562	   if neither x nor y is NaN.  (This is a mixed blessing: for
2563	   example, the expression above will never trap, hence
2564	   optimizing it to x < y would be invalid).  */
2565        if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2566            || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2567          rtrap = false;
2568
2569        /* If the comparison was short-circuited, and only the RHS
2570	   trapped, we may now generate a spurious trap.  */
2571	if (rtrap && !ltrap
2572	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2573	  return NULL_TREE;
2574
2575	/* If we changed the conditions that cause a trap, we lose.  */
2576	if ((ltrap || rtrap) != trap)
2577	  return NULL_TREE;
2578      }
2579
2580  if (compcode == COMPCODE_TRUE)
2581    return constant_boolean_node (true, truth_type);
2582  else if (compcode == COMPCODE_FALSE)
2583    return constant_boolean_node (false, truth_type);
2584  else
2585    return fold_build2 (compcode_to_comparison (compcode),
2586			truth_type, ll_arg, lr_arg);
2587}
2588
2589/* Return nonzero if CODE is a tree code that represents a truth value.  */
2590
2591static int
2592truth_value_p (enum tree_code code)
2593{
2594  return (TREE_CODE_CLASS (code) == tcc_comparison
2595	  || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2596	  || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2597	  || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2598}
2599
2600/* Return nonzero if two operands (typically of the same tree node)
2601   are necessarily equal.  If either argument has side-effects this
2602   function returns zero.  FLAGS modifies behavior as follows:
2603
2604   If OEP_ONLY_CONST is set, only return nonzero for constants.
2605   This function tests whether the operands are indistinguishable;
2606   it does not test whether they are equal using C's == operation.
2607   The distinction is important for IEEE floating point, because
2608   (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2609   (2) two NaNs may be indistinguishable, but NaN!=NaN.
2610
2611   If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2612   even though it may hold multiple values during a function.
2613   This is because a GCC tree node guarantees that nothing else is
2614   executed between the evaluation of its "operands" (which may often
2615   be evaluated in arbitrary order).  Hence if the operands themselves
2616   don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2617   same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2618   unset means assuming isochronic (or instantaneous) tree equivalence.
2619   Unless comparing arbitrary expression trees, such as from different
2620   statements, this flag can usually be left unset.
2621
2622   If OEP_PURE_SAME is set, then pure functions with identical arguments
2623   are considered the same.  It is used when the caller has other ways
2624   to ensure that global memory is unchanged in between.  */
2625
2626int
2627operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2628{
2629  /* If either is ERROR_MARK, they aren't equal.  */
2630  if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2631    return 0;
2632
2633  /* If both types don't have the same signedness, then we can't consider
2634     them equal.  We must check this before the STRIP_NOPS calls
2635     because they may change the signedness of the arguments.  */
2636  if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2637    return 0;
2638
2639  /* If both types don't have the same precision, then it is not safe
2640     to strip NOPs.  */
2641  if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2642    return 0;
2643
2644  STRIP_NOPS (arg0);
2645  STRIP_NOPS (arg1);
2646
2647  /* In case both args are comparisons but with different comparison
2648     code, try to swap the comparison operands of one arg to produce
2649     a match and compare that variant.  */
2650  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2651      && COMPARISON_CLASS_P (arg0)
2652      && COMPARISON_CLASS_P (arg1))
2653    {
2654      enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2655
2656      if (TREE_CODE (arg0) == swap_code)
2657	return operand_equal_p (TREE_OPERAND (arg0, 0),
2658			        TREE_OPERAND (arg1, 1), flags)
2659	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2660				   TREE_OPERAND (arg1, 0), flags);
2661    }
2662
2663  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2664      /* This is needed for conversions and for COMPONENT_REF.
2665	 Might as well play it safe and always test this.  */
2666      || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2667      || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2668      || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2669    return 0;
2670
2671  /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2672     We don't care about side effects in that case because the SAVE_EXPR
2673     takes care of that for us. In all other cases, two expressions are
2674     equal if they have no side effects.  If we have two identical
2675     expressions with side effects that should be treated the same due
2676     to the only side effects being identical SAVE_EXPR's, that will
2677     be detected in the recursive calls below.  */
2678  if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2679      && (TREE_CODE (arg0) == SAVE_EXPR
2680	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2681    return 1;
2682
2683  /* Next handle constant cases, those for which we can return 1 even
2684     if ONLY_CONST is set.  */
2685  if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2686    switch (TREE_CODE (arg0))
2687      {
2688      case INTEGER_CST:
2689	return (! TREE_CONSTANT_OVERFLOW (arg0)
2690		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2691		&& tree_int_cst_equal (arg0, arg1));
2692
2693      case REAL_CST:
2694	return (! TREE_CONSTANT_OVERFLOW (arg0)
2695		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2696		&& REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2697					  TREE_REAL_CST (arg1)));
2698
2699      case VECTOR_CST:
2700	{
2701	  tree v1, v2;
2702
2703	  if (TREE_CONSTANT_OVERFLOW (arg0)
2704	      || TREE_CONSTANT_OVERFLOW (arg1))
2705	    return 0;
2706
2707	  v1 = TREE_VECTOR_CST_ELTS (arg0);
2708	  v2 = TREE_VECTOR_CST_ELTS (arg1);
2709	  while (v1 && v2)
2710	    {
2711	      if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2712				    flags))
2713		return 0;
2714	      v1 = TREE_CHAIN (v1);
2715	      v2 = TREE_CHAIN (v2);
2716	    }
2717
2718	  return v1 == v2;
2719	}
2720
2721      case COMPLEX_CST:
2722	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2723				 flags)
2724		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2725				    flags));
2726
2727      case STRING_CST:
2728	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2729		&& ! memcmp (TREE_STRING_POINTER (arg0),
2730			      TREE_STRING_POINTER (arg1),
2731			      TREE_STRING_LENGTH (arg0)));
2732
2733      case ADDR_EXPR:
2734	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2735				0);
2736      default:
2737	break;
2738      }
2739
2740  if (flags & OEP_ONLY_CONST)
2741    return 0;
2742
2743/* Define macros to test an operand from arg0 and arg1 for equality and a
2744   variant that allows null and views null as being different from any
2745   non-null value.  In the latter case, if either is null, the both
2746   must be; otherwise, do the normal comparison.  */
2747#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2748				    TREE_OPERAND (arg1, N), flags)
2749
2750#define OP_SAME_WITH_NULL(N)				\
2751  ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
2752   ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2753
2754  switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2755    {
2756    case tcc_unary:
2757      /* Two conversions are equal only if signedness and modes match.  */
2758      switch (TREE_CODE (arg0))
2759        {
2760        case NOP_EXPR:
2761        case CONVERT_EXPR:
2762        case FIX_CEIL_EXPR:
2763        case FIX_TRUNC_EXPR:
2764        case FIX_FLOOR_EXPR:
2765        case FIX_ROUND_EXPR:
2766	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2767	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2768	    return 0;
2769	  break;
2770	default:
2771	  break;
2772	}
2773
2774      return OP_SAME (0);
2775
2776
2777    case tcc_comparison:
2778    case tcc_binary:
2779      if (OP_SAME (0) && OP_SAME (1))
2780	return 1;
2781
2782      /* For commutative ops, allow the other order.  */
2783      return (commutative_tree_code (TREE_CODE (arg0))
2784	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2785				  TREE_OPERAND (arg1, 1), flags)
2786	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2787				  TREE_OPERAND (arg1, 0), flags));
2788
2789    case tcc_reference:
2790      /* If either of the pointer (or reference) expressions we are
2791	 dereferencing contain a side effect, these cannot be equal.  */
2792      if (TREE_SIDE_EFFECTS (arg0)
2793	  || TREE_SIDE_EFFECTS (arg1))
2794	return 0;
2795
2796      switch (TREE_CODE (arg0))
2797	{
2798	case INDIRECT_REF:
2799	case ALIGN_INDIRECT_REF:
2800	case MISALIGNED_INDIRECT_REF:
2801	case REALPART_EXPR:
2802	case IMAGPART_EXPR:
2803	  return OP_SAME (0);
2804
2805	case ARRAY_REF:
2806	case ARRAY_RANGE_REF:
2807	  /* Operands 2 and 3 may be null.
2808	     Compare the array index by value if it is constant first as we
2809	     may have different types but same value here.  */
2810	  return (OP_SAME (0)
2811		  && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2812					  TREE_OPERAND (arg1, 1))
2813		      || OP_SAME (1))
2814		  && OP_SAME_WITH_NULL (2)
2815		  && OP_SAME_WITH_NULL (3));
2816
2817	case COMPONENT_REF:
2818	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
2819	     may be NULL when we're called to compare MEM_EXPRs.  */
2820	  return OP_SAME_WITH_NULL (0)
2821		 && OP_SAME (1)
2822		 && OP_SAME_WITH_NULL (2);
2823
2824	case BIT_FIELD_REF:
2825	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2826
2827	default:
2828	  return 0;
2829	}
2830
2831    case tcc_expression:
2832      switch (TREE_CODE (arg0))
2833	{
2834	case ADDR_EXPR:
2835	case TRUTH_NOT_EXPR:
2836	  return OP_SAME (0);
2837
2838	case TRUTH_ANDIF_EXPR:
2839	case TRUTH_ORIF_EXPR:
2840	  return OP_SAME (0) && OP_SAME (1);
2841
2842	case TRUTH_AND_EXPR:
2843	case TRUTH_OR_EXPR:
2844	case TRUTH_XOR_EXPR:
2845	  if (OP_SAME (0) && OP_SAME (1))
2846	    return 1;
2847
2848	  /* Otherwise take into account this is a commutative operation.  */
2849	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2850				   TREE_OPERAND (arg1, 1), flags)
2851		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2852				      TREE_OPERAND (arg1, 0), flags));
2853
2854	case CALL_EXPR:
2855	  /* If the CALL_EXPRs call different functions, then they
2856	     clearly can not be equal.  */
2857	  if (!OP_SAME (0))
2858	    return 0;
2859
2860	  {
2861	    unsigned int cef = call_expr_flags (arg0);
2862	    if (flags & OEP_PURE_SAME)
2863	      cef &= ECF_CONST | ECF_PURE;
2864	    else
2865	      cef &= ECF_CONST;
2866	    if (!cef)
2867	      return 0;
2868	  }
2869
2870	  /* Now see if all the arguments are the same.  operand_equal_p
2871	     does not handle TREE_LIST, so we walk the operands here
2872	     feeding them to operand_equal_p.  */
2873	  arg0 = TREE_OPERAND (arg0, 1);
2874	  arg1 = TREE_OPERAND (arg1, 1);
2875	  while (arg0 && arg1)
2876	    {
2877	      if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2878				     flags))
2879		return 0;
2880
2881	      arg0 = TREE_CHAIN (arg0);
2882	      arg1 = TREE_CHAIN (arg1);
2883	    }
2884
2885	  /* If we get here and both argument lists are exhausted
2886	     then the CALL_EXPRs are equal.  */
2887	  return ! (arg0 || arg1);
2888
2889	default:
2890	  return 0;
2891	}
2892
2893    case tcc_declaration:
2894      /* Consider __builtin_sqrt equal to sqrt.  */
2895      return (TREE_CODE (arg0) == FUNCTION_DECL
2896	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2897	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2898	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2899
2900    default:
2901      return 0;
2902    }
2903
2904#undef OP_SAME
2905#undef OP_SAME_WITH_NULL
2906}
2907
2908/* Similar to operand_equal_p, but see if ARG0 might have been made by
2909   shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2910
2911   When in doubt, return 0.  */
2912
2913static int
2914operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2915{
2916  int unsignedp1, unsignedpo;
2917  tree primarg0, primarg1, primother;
2918  unsigned int correct_width;
2919
2920  if (operand_equal_p (arg0, arg1, 0))
2921    return 1;
2922
2923  if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2924      || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2925    return 0;
2926
2927  /* Discard any conversions that don't change the modes of ARG0 and ARG1
2928     and see if the inner values are the same.  This removes any
2929     signedness comparison, which doesn't matter here.  */
2930  primarg0 = arg0, primarg1 = arg1;
2931  STRIP_NOPS (primarg0);
2932  STRIP_NOPS (primarg1);
2933  if (operand_equal_p (primarg0, primarg1, 0))
2934    return 1;
2935
2936  /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2937     actual comparison operand, ARG0.
2938
2939     First throw away any conversions to wider types
2940     already present in the operands.  */
2941
2942  primarg1 = get_narrower (arg1, &unsignedp1);
2943  primother = get_narrower (other, &unsignedpo);
2944
2945  correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2946  if (unsignedp1 == unsignedpo
2947      && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2948      && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2949    {
2950      tree type = TREE_TYPE (arg0);
2951
2952      /* Make sure shorter operand is extended the right way
2953	 to match the longer operand.  */
2954      primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2955			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2956
2957      if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2958	return 1;
2959    }
2960
2961  return 0;
2962}
2963
2964/* See if ARG is an expression that is either a comparison or is performing
2965   arithmetic on comparisons.  The comparisons must only be comparing
2966   two different values, which will be stored in *CVAL1 and *CVAL2; if
2967   they are nonzero it means that some operands have already been found.
2968   No variables may be used anywhere else in the expression except in the
2969   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2970   the expression and save_expr needs to be called with CVAL1 and CVAL2.
2971
2972   If this is true, return 1.  Otherwise, return zero.  */
2973
2974static int
2975twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2976{
2977  enum tree_code code = TREE_CODE (arg);
2978  enum tree_code_class class = TREE_CODE_CLASS (code);
2979
2980  /* We can handle some of the tcc_expression cases here.  */
2981  if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2982    class = tcc_unary;
2983  else if (class == tcc_expression
2984	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2985	       || code == COMPOUND_EXPR))
2986    class = tcc_binary;
2987
2988  else if (class == tcc_expression && code == SAVE_EXPR
2989	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2990    {
2991      /* If we've already found a CVAL1 or CVAL2, this expression is
2992	 two complex to handle.  */
2993      if (*cval1 || *cval2)
2994	return 0;
2995
2996      class = tcc_unary;
2997      *save_p = 1;
2998    }
2999
3000  switch (class)
3001    {
3002    case tcc_unary:
3003      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3004
3005    case tcc_binary:
3006      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3007	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
3008				      cval1, cval2, save_p));
3009
3010    case tcc_constant:
3011      return 1;
3012
3013    case tcc_expression:
3014      if (code == COND_EXPR)
3015	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3016				     cval1, cval2, save_p)
3017		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
3018					cval1, cval2, save_p)
3019		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
3020					cval1, cval2, save_p));
3021      return 0;
3022
3023    case tcc_comparison:
3024      /* First see if we can handle the first operand, then the second.  For
3025	 the second operand, we know *CVAL1 can't be zero.  It must be that
3026	 one side of the comparison is each of the values; test for the
3027	 case where this isn't true by failing if the two operands
3028	 are the same.  */
3029
3030      if (operand_equal_p (TREE_OPERAND (arg, 0),
3031			   TREE_OPERAND (arg, 1), 0))
3032	return 0;
3033
3034      if (*cval1 == 0)
3035	*cval1 = TREE_OPERAND (arg, 0);
3036      else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3037	;
3038      else if (*cval2 == 0)
3039	*cval2 = TREE_OPERAND (arg, 0);
3040      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3041	;
3042      else
3043	return 0;
3044
3045      if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3046	;
3047      else if (*cval2 == 0)
3048	*cval2 = TREE_OPERAND (arg, 1);
3049      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3050	;
3051      else
3052	return 0;
3053
3054      return 1;
3055
3056    default:
3057      return 0;
3058    }
3059}
3060
3061/* ARG is a tree that is known to contain just arithmetic operations and
3062   comparisons.  Evaluate the operations in the tree substituting NEW0 for
3063   any occurrence of OLD0 as an operand of a comparison and likewise for
3064   NEW1 and OLD1.  */
3065
3066static tree
3067eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3068{
3069  tree type = TREE_TYPE (arg);
3070  enum tree_code code = TREE_CODE (arg);
3071  enum tree_code_class class = TREE_CODE_CLASS (code);
3072
3073  /* We can handle some of the tcc_expression cases here.  */
3074  if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3075    class = tcc_unary;
3076  else if (class == tcc_expression
3077	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3078    class = tcc_binary;
3079
3080  switch (class)
3081    {
3082    case tcc_unary:
3083      return fold_build1 (code, type,
3084			  eval_subst (TREE_OPERAND (arg, 0),
3085				      old0, new0, old1, new1));
3086
3087    case tcc_binary:
3088      return fold_build2 (code, type,
3089			  eval_subst (TREE_OPERAND (arg, 0),
3090				      old0, new0, old1, new1),
3091			  eval_subst (TREE_OPERAND (arg, 1),
3092				      old0, new0, old1, new1));
3093
3094    case tcc_expression:
3095      switch (code)
3096	{
3097	case SAVE_EXPR:
3098	  return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3099
3100	case COMPOUND_EXPR:
3101	  return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3102
3103	case COND_EXPR:
3104	  return fold_build3 (code, type,
3105			      eval_subst (TREE_OPERAND (arg, 0),
3106					  old0, new0, old1, new1),
3107			      eval_subst (TREE_OPERAND (arg, 1),
3108					  old0, new0, old1, new1),
3109			      eval_subst (TREE_OPERAND (arg, 2),
3110					  old0, new0, old1, new1));
3111	default:
3112	  break;
3113	}
3114      /* Fall through - ???  */
3115
3116    case tcc_comparison:
3117      {
3118	tree arg0 = TREE_OPERAND (arg, 0);
3119	tree arg1 = TREE_OPERAND (arg, 1);
3120
3121	/* We need to check both for exact equality and tree equality.  The
3122	   former will be true if the operand has a side-effect.  In that
3123	   case, we know the operand occurred exactly once.  */
3124
3125	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3126	  arg0 = new0;
3127	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3128	  arg0 = new1;
3129
3130	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3131	  arg1 = new0;
3132	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3133	  arg1 = new1;
3134
3135	return fold_build2 (code, type, arg0, arg1);
3136      }
3137
3138    default:
3139      return arg;
3140    }
3141}
3142
3143/* Return a tree for the case when the result of an expression is RESULT
3144   converted to TYPE and OMITTED was previously an operand of the expression
3145   but is now not needed (e.g., we folded OMITTED * 0).
3146
3147   If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3148   the conversion of RESULT to TYPE.  */
3149
3150tree
3151omit_one_operand (tree type, tree result, tree omitted)
3152{
3153  tree t = fold_convert (type, result);
3154
3155  if (TREE_SIDE_EFFECTS (omitted))
3156    return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3157
3158  return non_lvalue (t);
3159}
3160
3161/* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
3162
3163static tree
3164pedantic_omit_one_operand (tree type, tree result, tree omitted)
3165{
3166  tree t = fold_convert (type, result);
3167
3168  if (TREE_SIDE_EFFECTS (omitted))
3169    return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3170
3171  return pedantic_non_lvalue (t);
3172}
3173
3174/* Return a tree for the case when the result of an expression is RESULT
3175   converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3176   of the expression but are now not needed.
3177
3178   If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3179   If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3180   evaluated before OMITTED2.  Otherwise, if neither has side effects,
3181   just do the conversion of RESULT to TYPE.  */
3182
3183tree
3184omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3185{
3186  tree t = fold_convert (type, result);
3187
3188  if (TREE_SIDE_EFFECTS (omitted2))
3189    t = build2 (COMPOUND_EXPR, type, omitted2, t);
3190  if (TREE_SIDE_EFFECTS (omitted1))
3191    t = build2 (COMPOUND_EXPR, type, omitted1, t);
3192
3193  return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3194}
3195
3196
3197/* Return a simplified tree node for the truth-negation of ARG.  This
3198   never alters ARG itself.  We assume that ARG is an operation that
3199   returns a truth value (0 or 1).
3200
3201   FIXME: one would think we would fold the result, but it causes
3202   problems with the dominator optimizer.  */
3203
3204tree
3205fold_truth_not_expr (tree arg)
3206{
3207  tree type = TREE_TYPE (arg);
3208  enum tree_code code = TREE_CODE (arg);
3209
3210  /* If this is a comparison, we can simply invert it, except for
3211     floating-point non-equality comparisons, in which case we just
3212     enclose a TRUTH_NOT_EXPR around what we have.  */
3213
3214  if (TREE_CODE_CLASS (code) == tcc_comparison)
3215    {
3216      tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3217      if (FLOAT_TYPE_P (op_type)
3218	  && flag_trapping_math
3219	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3220	  && code != NE_EXPR && code != EQ_EXPR)
3221	return NULL_TREE;
3222      else
3223	{
3224	  code = invert_tree_comparison (code,
3225					 HONOR_NANS (TYPE_MODE (op_type)));
3226	  if (code == ERROR_MARK)
3227	    return NULL_TREE;
3228	  else
3229	    return build2 (code, type,
3230			   TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3231	}
3232    }
3233
3234  switch (code)
3235    {
3236    case INTEGER_CST:
3237      return constant_boolean_node (integer_zerop (arg), type);
3238
3239    case TRUTH_AND_EXPR:
3240      return build2 (TRUTH_OR_EXPR, type,
3241		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3242		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3243
3244    case TRUTH_OR_EXPR:
3245      return build2 (TRUTH_AND_EXPR, type,
3246		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3247		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3248
3249    case TRUTH_XOR_EXPR:
3250      /* Here we can invert either operand.  We invert the first operand
3251	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3252	 result is the XOR of the first operand with the inside of the
3253	 negation of the second operand.  */
3254
3255      if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3256	return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3257		       TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3258      else
3259	return build2 (TRUTH_XOR_EXPR, type,
3260		       invert_truthvalue (TREE_OPERAND (arg, 0)),
3261		       TREE_OPERAND (arg, 1));
3262
3263    case TRUTH_ANDIF_EXPR:
3264      return build2 (TRUTH_ORIF_EXPR, type,
3265		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3266		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3267
3268    case TRUTH_ORIF_EXPR:
3269      return build2 (TRUTH_ANDIF_EXPR, type,
3270		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3271		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3272
3273    case TRUTH_NOT_EXPR:
3274      return TREE_OPERAND (arg, 0);
3275
3276    case COND_EXPR:
3277      {
3278	tree arg1 = TREE_OPERAND (arg, 1);
3279	tree arg2 = TREE_OPERAND (arg, 2);
3280	/* A COND_EXPR may have a throw as one operand, which
3281	   then has void type.  Just leave void operands
3282	   as they are.  */
3283	return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3284		       VOID_TYPE_P (TREE_TYPE (arg1))
3285		       ? arg1 : invert_truthvalue (arg1),
3286		       VOID_TYPE_P (TREE_TYPE (arg2))
3287		       ? arg2 : invert_truthvalue (arg2));
3288      }
3289
3290    case COMPOUND_EXPR:
3291      return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3292		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3293
3294    case NON_LVALUE_EXPR:
3295      return invert_truthvalue (TREE_OPERAND (arg, 0));
3296
3297    case NOP_EXPR:
3298      if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3299	return build1 (TRUTH_NOT_EXPR, type, arg);
3300
3301    case CONVERT_EXPR:
3302    case FLOAT_EXPR:
3303      return build1 (TREE_CODE (arg), type,
3304		     invert_truthvalue (TREE_OPERAND (arg, 0)));
3305
3306    case BIT_AND_EXPR:
3307      if (!integer_onep (TREE_OPERAND (arg, 1)))
3308	break;
3309      return build2 (EQ_EXPR, type, arg,
3310		     build_int_cst (type, 0));
3311
3312    case SAVE_EXPR:
3313      return build1 (TRUTH_NOT_EXPR, type, arg);
3314
3315    case CLEANUP_POINT_EXPR:
3316      return build1 (CLEANUP_POINT_EXPR, type,
3317		     invert_truthvalue (TREE_OPERAND (arg, 0)));
3318
3319    default:
3320      break;
3321    }
3322
3323  return NULL_TREE;
3324}
3325
3326/* Return a simplified tree node for the truth-negation of ARG.  This
3327   never alters ARG itself.  We assume that ARG is an operation that
3328   returns a truth value (0 or 1).
3329
3330   FIXME: one would think we would fold the result, but it causes
3331   problems with the dominator optimizer.  */
3332
3333tree
3334invert_truthvalue (tree arg)
3335{
3336  tree tem;
3337
3338  if (TREE_CODE (arg) == ERROR_MARK)
3339    return arg;
3340
3341  tem = fold_truth_not_expr (arg);
3342  if (!tem)
3343    tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3344
3345  return tem;
3346}
3347
3348/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3349   operands are another bit-wise operation with a common input.  If so,
3350   distribute the bit operations to save an operation and possibly two if
3351   constants are involved.  For example, convert
3352	(A | B) & (A | C) into A | (B & C)
3353   Further simplification will occur if B and C are constants.
3354
3355   If this optimization cannot be done, 0 will be returned.  */
3356
3357static tree
3358distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3359{
3360  tree common;
3361  tree left, right;
3362
3363  if (TREE_CODE (arg0) != TREE_CODE (arg1)
3364      || TREE_CODE (arg0) == code
3365      || (TREE_CODE (arg0) != BIT_AND_EXPR
3366	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
3367    return 0;
3368
3369  if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3370    {
3371      common = TREE_OPERAND (arg0, 0);
3372      left = TREE_OPERAND (arg0, 1);
3373      right = TREE_OPERAND (arg1, 1);
3374    }
3375  else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3376    {
3377      common = TREE_OPERAND (arg0, 0);
3378      left = TREE_OPERAND (arg0, 1);
3379      right = TREE_OPERAND (arg1, 0);
3380    }
3381  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3382    {
3383      common = TREE_OPERAND (arg0, 1);
3384      left = TREE_OPERAND (arg0, 0);
3385      right = TREE_OPERAND (arg1, 1);
3386    }
3387  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3388    {
3389      common = TREE_OPERAND (arg0, 1);
3390      left = TREE_OPERAND (arg0, 0);
3391      right = TREE_OPERAND (arg1, 0);
3392    }
3393  else
3394    return 0;
3395
3396  return fold_build2 (TREE_CODE (arg0), type, common,
3397		      fold_build2 (code, type, left, right));
3398}
3399
3400/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3401   with code CODE.  This optimization is unsafe.  */
3402static tree
3403distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3404{
3405  bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3406  bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3407
3408  /* (A / C) +- (B / C) -> (A +- B) / C.  */
3409  if (mul0 == mul1
3410      && operand_equal_p (TREE_OPERAND (arg0, 1),
3411		       TREE_OPERAND (arg1, 1), 0))
3412    return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3413			fold_build2 (code, type,
3414				     TREE_OPERAND (arg0, 0),
3415				     TREE_OPERAND (arg1, 0)),
3416			TREE_OPERAND (arg0, 1));
3417
3418  /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3419  if (operand_equal_p (TREE_OPERAND (arg0, 0),
3420		       TREE_OPERAND (arg1, 0), 0)
3421      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3422      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3423    {
3424      REAL_VALUE_TYPE r0, r1;
3425      r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3426      r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3427      if (!mul0)
3428	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3429      if (!mul1)
3430        real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3431      real_arithmetic (&r0, code, &r0, &r1);
3432      return fold_build2 (MULT_EXPR, type,
3433			  TREE_OPERAND (arg0, 0),
3434			  build_real (type, r0));
3435    }
3436
3437  return NULL_TREE;
3438}
3439
3440/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3441   starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
3442
3443static tree
3444make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3445		    int unsignedp)
3446{
3447  tree result;
3448
3449  if (bitpos == 0)
3450    {
3451      tree size = TYPE_SIZE (TREE_TYPE (inner));
3452      if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3453	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3454	  && host_integerp (size, 0)
3455	  && tree_low_cst (size, 0) == bitsize)
3456	return fold_convert (type, inner);
3457    }
3458
3459  result = build3 (BIT_FIELD_REF, type, inner,
3460		   size_int (bitsize), bitsize_int (bitpos));
3461
3462  BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3463
3464  return result;
3465}
3466
3467/* Optimize a bit-field compare.
3468
3469   There are two cases:  First is a compare against a constant and the
3470   second is a comparison of two items where the fields are at the same
3471   bit position relative to the start of a chunk (byte, halfword, word)
3472   large enough to contain it.  In these cases we can avoid the shift
3473   implicit in bitfield extractions.
3474
3475   For constants, we emit a compare of the shifted constant with the
3476   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3477   compared.  For two fields at the same position, we do the ANDs with the
3478   similar mask and compare the result of the ANDs.
3479
3480   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3481   COMPARE_TYPE is the type of the comparison, and LHS and RHS
3482   are the left and right operands of the comparison, respectively.
3483
3484   If the optimization described above can be done, we return the resulting
3485   tree.  Otherwise we return zero.  */
3486
3487static tree
3488optimize_bit_field_compare (enum tree_code code, tree compare_type,
3489			    tree lhs, tree rhs)
3490{
3491  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3492  tree type = TREE_TYPE (lhs);
3493  tree signed_type, unsigned_type;
3494  int const_p = TREE_CODE (rhs) == INTEGER_CST;
3495  enum machine_mode lmode, rmode, nmode;
3496  int lunsignedp, runsignedp;
3497  int lvolatilep = 0, rvolatilep = 0;
3498  tree linner, rinner = NULL_TREE;
3499  tree mask;
3500  tree offset;
3501
3502  /* Get all the information about the extractions being done.  If the bit size
3503     if the same as the size of the underlying object, we aren't doing an
3504     extraction at all and so can do nothing.  We also don't want to
3505     do anything if the inner expression is a PLACEHOLDER_EXPR since we
3506     then will no longer be able to replace it.  */
3507  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3508				&lunsignedp, &lvolatilep, false);
3509  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3510      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3511    return 0;
3512
3513 if (!const_p)
3514   {
3515     /* If this is not a constant, we can only do something if bit positions,
3516	sizes, and signedness are the same.  */
3517     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3518				   &runsignedp, &rvolatilep, false);
3519
3520     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3521	 || lunsignedp != runsignedp || offset != 0
3522	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3523       return 0;
3524   }
3525
3526  /* See if we can find a mode to refer to this field.  We should be able to,
3527     but fail if we can't.  */
3528  nmode = get_best_mode (lbitsize, lbitpos,
3529			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3530			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3531				TYPE_ALIGN (TREE_TYPE (rinner))),
3532			 word_mode, lvolatilep || rvolatilep);
3533  if (nmode == VOIDmode)
3534    return 0;
3535
3536  /* Set signed and unsigned types of the precision of this mode for the
3537     shifts below.  */
3538  signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3539  unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3540
3541  /* Compute the bit position and size for the new reference and our offset
3542     within it. If the new reference is the same size as the original, we
3543     won't optimize anything, so return zero.  */
3544  nbitsize = GET_MODE_BITSIZE (nmode);
3545  nbitpos = lbitpos & ~ (nbitsize - 1);
3546  lbitpos -= nbitpos;
3547  if (nbitsize == lbitsize)
3548    return 0;
3549
3550  if (BYTES_BIG_ENDIAN)
3551    lbitpos = nbitsize - lbitsize - lbitpos;
3552
3553  /* Make the mask to be used against the extracted field.  */
3554  mask = build_int_cst (unsigned_type, -1);
3555  mask = force_fit_type (mask, 0, false, false);
3556  mask = fold_convert (unsigned_type, mask);
3557  mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3558  mask = const_binop (RSHIFT_EXPR, mask,
3559		      size_int (nbitsize - lbitsize - lbitpos), 0);
3560
3561  if (! const_p)
3562    /* If not comparing with constant, just rework the comparison
3563       and return.  */
3564    return build2 (code, compare_type,
3565		   build2 (BIT_AND_EXPR, unsigned_type,
3566			   make_bit_field_ref (linner, unsigned_type,
3567					       nbitsize, nbitpos, 1),
3568			   mask),
3569		   build2 (BIT_AND_EXPR, unsigned_type,
3570			   make_bit_field_ref (rinner, unsigned_type,
3571					       nbitsize, nbitpos, 1),
3572			   mask));
3573
3574  /* Otherwise, we are handling the constant case. See if the constant is too
3575     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
3576     this not only for its own sake, but to avoid having to test for this
3577     error case below.  If we didn't, we might generate wrong code.
3578
3579     For unsigned fields, the constant shifted right by the field length should
3580     be all zero.  For signed fields, the high-order bits should agree with
3581     the sign bit.  */
3582
3583  if (lunsignedp)
3584    {
3585      if (! integer_zerop (const_binop (RSHIFT_EXPR,
3586					fold_convert (unsigned_type, rhs),
3587					size_int (lbitsize), 0)))
3588	{
3589	  warning (0, "comparison is always %d due to width of bit-field",
3590		   code == NE_EXPR);
3591	  return constant_boolean_node (code == NE_EXPR, compare_type);
3592	}
3593    }
3594  else
3595    {
3596      tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3597			      size_int (lbitsize - 1), 0);
3598      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3599	{
3600	  warning (0, "comparison is always %d due to width of bit-field",
3601		   code == NE_EXPR);
3602	  return constant_boolean_node (code == NE_EXPR, compare_type);
3603	}
3604    }
3605
3606  /* Single-bit compares should always be against zero.  */
3607  if (lbitsize == 1 && ! integer_zerop (rhs))
3608    {
3609      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3610      rhs = build_int_cst (type, 0);
3611    }
3612
3613  /* Make a new bitfield reference, shift the constant over the
3614     appropriate number of bits and mask it with the computed mask
3615     (in case this was a signed field).  If we changed it, make a new one.  */
3616  lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3617  if (lvolatilep)
3618    {
3619      TREE_SIDE_EFFECTS (lhs) = 1;
3620      TREE_THIS_VOLATILE (lhs) = 1;
3621    }
3622
3623  rhs = const_binop (BIT_AND_EXPR,
3624		     const_binop (LSHIFT_EXPR,
3625				  fold_convert (unsigned_type, rhs),
3626				  size_int (lbitpos), 0),
3627		     mask, 0);
3628
3629  return build2 (code, compare_type,
3630		 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3631		 rhs);
3632}
3633
3634/* Subroutine for fold_truthop: decode a field reference.
3635
3636   If EXP is a comparison reference, we return the innermost reference.
3637
3638   *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3639   set to the starting bit number.
3640
3641   If the innermost field can be completely contained in a mode-sized
3642   unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
3643
3644   *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3645   otherwise it is not changed.
3646
3647   *PUNSIGNEDP is set to the signedness of the field.
3648
3649   *PMASK is set to the mask used.  This is either contained in a
3650   BIT_AND_EXPR or derived from the width of the field.
3651
3652   *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3653
3654   Return 0 if this is not a component reference or is one that we can't
3655   do anything with.  */
3656
3657static tree
3658decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3659			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3660			int *punsignedp, int *pvolatilep,
3661			tree *pmask, tree *pand_mask)
3662{
3663  tree outer_type = 0;
3664  tree and_mask = 0;
3665  tree mask, inner, offset;
3666  tree unsigned_type;
3667  unsigned int precision;
3668
3669  /* All the optimizations using this function assume integer fields.
3670     There are problems with FP fields since the type_for_size call
3671     below can fail for, e.g., XFmode.  */
3672  if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3673    return 0;
3674
3675  /* We are interested in the bare arrangement of bits, so strip everything
3676     that doesn't affect the machine mode.  However, record the type of the
3677     outermost expression if it may matter below.  */
3678  if (TREE_CODE (exp) == NOP_EXPR
3679      || TREE_CODE (exp) == CONVERT_EXPR
3680      || TREE_CODE (exp) == NON_LVALUE_EXPR)
3681    outer_type = TREE_TYPE (exp);
3682  STRIP_NOPS (exp);
3683
3684  if (TREE_CODE (exp) == BIT_AND_EXPR)
3685    {
3686      and_mask = TREE_OPERAND (exp, 1);
3687      exp = TREE_OPERAND (exp, 0);
3688      STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3689      if (TREE_CODE (and_mask) != INTEGER_CST)
3690	return 0;
3691    }
3692
3693  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3694			       punsignedp, pvolatilep, false);
3695  if ((inner == exp && and_mask == 0)
3696      || *pbitsize < 0 || offset != 0
3697      || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3698    return 0;
3699
3700  /* If the number of bits in the reference is the same as the bitsize of
3701     the outer type, then the outer type gives the signedness. Otherwise
3702     (in case of a small bitfield) the signedness is unchanged.  */
3703  if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3704    *punsignedp = TYPE_UNSIGNED (outer_type);
3705
3706  /* Compute the mask to access the bitfield.  */
3707  unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3708  precision = TYPE_PRECISION (unsigned_type);
3709
3710  mask = build_int_cst (unsigned_type, -1);
3711  mask = force_fit_type (mask, 0, false, false);
3712
3713  mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3714  mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3715
3716  /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3717  if (and_mask != 0)
3718    mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3719			fold_convert (unsigned_type, and_mask), mask);
3720
3721  *pmask = mask;
3722  *pand_mask = and_mask;
3723  return inner;
3724}
3725
3726/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3727   bit positions.  */
3728
3729static int
3730all_ones_mask_p (tree mask, int size)
3731{
3732  tree type = TREE_TYPE (mask);
3733  unsigned int precision = TYPE_PRECISION (type);
3734  tree tmask;
3735
3736  tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3737  tmask = force_fit_type (tmask, 0, false, false);
3738
3739  return
3740    tree_int_cst_equal (mask,
3741			const_binop (RSHIFT_EXPR,
3742				     const_binop (LSHIFT_EXPR, tmask,
3743						  size_int (precision - size),
3744						  0),
3745				     size_int (precision - size), 0));
3746}
3747
3748/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3749   represents the sign bit of EXP's type.  If EXP represents a sign
3750   or zero extension, also test VAL against the unextended type.
3751   The return value is the (sub)expression whose sign bit is VAL,
3752   or NULL_TREE otherwise.  */
3753
3754static tree
3755sign_bit_p (tree exp, tree val)
3756{
3757  unsigned HOST_WIDE_INT mask_lo, lo;
3758  HOST_WIDE_INT mask_hi, hi;
3759  int width;
3760  tree t;
3761
3762  /* Tree EXP must have an integral type.  */
3763  t = TREE_TYPE (exp);
3764  if (! INTEGRAL_TYPE_P (t))
3765    return NULL_TREE;
3766
3767  /* Tree VAL must be an integer constant.  */
3768  if (TREE_CODE (val) != INTEGER_CST
3769      || TREE_CONSTANT_OVERFLOW (val))
3770    return NULL_TREE;
3771
3772  width = TYPE_PRECISION (t);
3773  if (width > HOST_BITS_PER_WIDE_INT)
3774    {
3775      hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3776      lo = 0;
3777
3778      mask_hi = ((unsigned HOST_WIDE_INT) -1
3779		 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3780      mask_lo = -1;
3781    }
3782  else
3783    {
3784      hi = 0;
3785      lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3786
3787      mask_hi = 0;
3788      mask_lo = ((unsigned HOST_WIDE_INT) -1
3789		 >> (HOST_BITS_PER_WIDE_INT - width));
3790    }
3791
3792  /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3793     treat VAL as if it were unsigned.  */
3794  if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3795      && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3796    return exp;
3797
3798  /* Handle extension from a narrower type.  */
3799  if (TREE_CODE (exp) == NOP_EXPR
3800      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3801    return sign_bit_p (TREE_OPERAND (exp, 0), val);
3802
3803  return NULL_TREE;
3804}
3805
3806/* Subroutine for fold_truthop: determine if an operand is simple enough
3807   to be evaluated unconditionally.  */
3808
3809static int
3810simple_operand_p (tree exp)
3811{
3812  /* Strip any conversions that don't change the machine mode.  */
3813  STRIP_NOPS (exp);
3814
3815  return (CONSTANT_CLASS_P (exp)
3816	  || TREE_CODE (exp) == SSA_NAME
3817	  || (DECL_P (exp)
3818	      && ! TREE_ADDRESSABLE (exp)
3819	      && ! TREE_THIS_VOLATILE (exp)
3820	      && ! DECL_NONLOCAL (exp)
3821	      /* Don't regard global variables as simple.  They may be
3822		 allocated in ways unknown to the compiler (shared memory,
3823		 #pragma weak, etc).  */
3824	      && ! TREE_PUBLIC (exp)
3825	      && ! DECL_EXTERNAL (exp)
3826	      /* Loading a static variable is unduly expensive, but global
3827		 registers aren't expensive.  */
3828	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3829}
3830
3831/* The following functions are subroutines to fold_range_test and allow it to
3832   try to change a logical combination of comparisons into a range test.
3833
3834   For example, both
3835	X == 2 || X == 3 || X == 4 || X == 5
3836   and
3837	X >= 2 && X <= 5
3838   are converted to
3839	(unsigned) (X - 2) <= 3
3840
3841   We describe each set of comparisons as being either inside or outside
3842   a range, using a variable named like IN_P, and then describe the
3843   range with a lower and upper bound.  If one of the bounds is omitted,
3844   it represents either the highest or lowest value of the type.
3845
3846   In the comments below, we represent a range by two numbers in brackets
3847   preceded by a "+" to designate being inside that range, or a "-" to
3848   designate being outside that range, so the condition can be inverted by
3849   flipping the prefix.  An omitted bound is represented by a "-".  For
3850   example, "- [-, 10]" means being outside the range starting at the lowest
3851   possible value and ending at 10, in other words, being greater than 10.
3852   The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3853   always false.
3854
3855   We set up things so that the missing bounds are handled in a consistent
3856   manner so neither a missing bound nor "true" and "false" need to be
3857   handled using a special case.  */
3858
3859/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3860   of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3861   and UPPER1_P are nonzero if the respective argument is an upper bound
3862   and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3863   must be specified for a comparison.  ARG1 will be converted to ARG0's
3864   type if both are specified.  */
3865
3866static tree
3867range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3868	     tree arg1, int upper1_p)
3869{
3870  tree tem;
3871  int result;
3872  int sgn0, sgn1;
3873
3874  /* If neither arg represents infinity, do the normal operation.
3875     Else, if not a comparison, return infinity.  Else handle the special
3876     comparison rules. Note that most of the cases below won't occur, but
3877     are handled for consistency.  */
3878
3879  if (arg0 != 0 && arg1 != 0)
3880    {
3881      tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3882			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3883      STRIP_NOPS (tem);
3884      return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3885    }
3886
3887  if (TREE_CODE_CLASS (code) != tcc_comparison)
3888    return 0;
3889
3890  /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3891     for neither.  In real maths, we cannot assume open ended ranges are
3892     the same. But, this is computer arithmetic, where numbers are finite.
3893     We can therefore make the transformation of any unbounded range with
3894     the value Z, Z being greater than any representable number. This permits
3895     us to treat unbounded ranges as equal.  */
3896  sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3897  sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3898  switch (code)
3899    {
3900    case EQ_EXPR:
3901      result = sgn0 == sgn1;
3902      break;
3903    case NE_EXPR:
3904      result = sgn0 != sgn1;
3905      break;
3906    case LT_EXPR:
3907      result = sgn0 < sgn1;
3908      break;
3909    case LE_EXPR:
3910      result = sgn0 <= sgn1;
3911      break;
3912    case GT_EXPR:
3913      result = sgn0 > sgn1;
3914      break;
3915    case GE_EXPR:
3916      result = sgn0 >= sgn1;
3917      break;
3918    default:
3919      gcc_unreachable ();
3920    }
3921
3922  return constant_boolean_node (result, type);
3923}
3924
3925/* Given EXP, a logical expression, set the range it is testing into
3926   variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
3927   actually being tested.  *PLOW and *PHIGH will be made of the same
3928   type as the returned expression.  If EXP is not a comparison, we
3929   will most likely not be returning a useful value and range.  Set
3930   *STRICT_OVERFLOW_P to true if the return value is only valid
3931   because signed overflow is undefined; otherwise, do not change
3932   *STRICT_OVERFLOW_P.  */
3933
3934static tree
3935make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3936	    bool *strict_overflow_p)
3937{
3938  enum tree_code code;
3939  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3940  tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3941  int in_p, n_in_p;
3942  tree low, high, n_low, n_high;
3943
3944  /* Start with simply saying "EXP != 0" and then look at the code of EXP
3945     and see if we can refine the range.  Some of the cases below may not
3946     happen, but it doesn't seem worth worrying about this.  We "continue"
3947     the outer loop when we've changed something; otherwise we "break"
3948     the switch, which will "break" the while.  */
3949
3950  in_p = 0;
3951  low = high = build_int_cst (TREE_TYPE (exp), 0);
3952
3953  while (1)
3954    {
3955      code = TREE_CODE (exp);
3956      exp_type = TREE_TYPE (exp);
3957
3958      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3959	{
3960	  if (TREE_CODE_LENGTH (code) > 0)
3961	    arg0 = TREE_OPERAND (exp, 0);
3962	  if (TREE_CODE_CLASS (code) == tcc_comparison
3963	      || TREE_CODE_CLASS (code) == tcc_unary
3964	      || TREE_CODE_CLASS (code) == tcc_binary)
3965	    arg0_type = TREE_TYPE (arg0);
3966	  if (TREE_CODE_CLASS (code) == tcc_binary
3967	      || TREE_CODE_CLASS (code) == tcc_comparison
3968	      || (TREE_CODE_CLASS (code) == tcc_expression
3969		  && TREE_CODE_LENGTH (code) > 1))
3970	    arg1 = TREE_OPERAND (exp, 1);
3971	}
3972
3973      switch (code)
3974	{
3975	case TRUTH_NOT_EXPR:
3976	  in_p = ! in_p, exp = arg0;
3977	  continue;
3978
3979	case EQ_EXPR: case NE_EXPR:
3980	case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3981	  /* We can only do something if the range is testing for zero
3982	     and if the second operand is an integer constant.  Note that
3983	     saying something is "in" the range we make is done by
3984	     complementing IN_P since it will set in the initial case of
3985	     being not equal to zero; "out" is leaving it alone.  */
3986	  if (low == 0 || high == 0
3987	      || ! integer_zerop (low) || ! integer_zerop (high)
3988	      || TREE_CODE (arg1) != INTEGER_CST)
3989	    break;
3990
3991	  switch (code)
3992	    {
3993	    case NE_EXPR:  /* - [c, c]  */
3994	      low = high = arg1;
3995	      break;
3996	    case EQ_EXPR:  /* + [c, c]  */
3997	      in_p = ! in_p, low = high = arg1;
3998	      break;
3999	    case GT_EXPR:  /* - [-, c] */
4000	      low = 0, high = arg1;
4001	      break;
4002	    case GE_EXPR:  /* + [c, -] */
4003	      in_p = ! in_p, low = arg1, high = 0;
4004	      break;
4005	    case LT_EXPR:  /* - [c, -] */
4006	      low = arg1, high = 0;
4007	      break;
4008	    case LE_EXPR:  /* + [-, c] */
4009	      in_p = ! in_p, low = 0, high = arg1;
4010	      break;
4011	    default:
4012	      gcc_unreachable ();
4013	    }
4014
4015	  /* If this is an unsigned comparison, we also know that EXP is
4016	     greater than or equal to zero.  We base the range tests we make
4017	     on that fact, so we record it here so we can parse existing
4018	     range tests.  We test arg0_type since often the return type
4019	     of, e.g. EQ_EXPR, is boolean.  */
4020	  if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4021	    {
4022	      if (! merge_ranges (&n_in_p, &n_low, &n_high,
4023				  in_p, low, high, 1,
4024				  build_int_cst (arg0_type, 0),
4025				  NULL_TREE))
4026		break;
4027
4028	      in_p = n_in_p, low = n_low, high = n_high;
4029
4030	      /* If the high bound is missing, but we have a nonzero low
4031		 bound, reverse the range so it goes from zero to the low bound
4032		 minus 1.  */
4033	      if (high == 0 && low && ! integer_zerop (low))
4034		{
4035		  in_p = ! in_p;
4036		  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4037				      integer_one_node, 0);
4038		  low = build_int_cst (arg0_type, 0);
4039		}
4040	    }
4041
4042	  exp = arg0;
4043	  continue;
4044
4045	case NEGATE_EXPR:
4046	  /* (-x) IN [a,b] -> x in [-b, -a]  */
4047	  n_low = range_binop (MINUS_EXPR, exp_type,
4048			       build_int_cst (exp_type, 0),
4049			       0, high, 1);
4050	  n_high = range_binop (MINUS_EXPR, exp_type,
4051				build_int_cst (exp_type, 0),
4052				0, low, 0);
4053	  low = n_low, high = n_high;
4054	  exp = arg0;
4055	  continue;
4056
4057	case BIT_NOT_EXPR:
4058	  /* ~ X -> -X - 1  */
4059	  exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4060			build_int_cst (exp_type, 1));
4061	  continue;
4062
4063	case PLUS_EXPR:  case MINUS_EXPR:
4064	  if (TREE_CODE (arg1) != INTEGER_CST)
4065	    break;
4066
4067	  /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4068	     move a constant to the other side.  */
4069	  if (!TYPE_UNSIGNED (arg0_type)
4070	      && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4071	    break;
4072
4073	  /* If EXP is signed, any overflow in the computation is undefined,
4074	     so we don't worry about it so long as our computations on
4075	     the bounds don't overflow.  For unsigned, overflow is defined
4076	     and this is exactly the right thing.  */
4077	  n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4078			       arg0_type, low, 0, arg1, 0);
4079	  n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4080				arg0_type, high, 1, arg1, 0);
4081	  if ((n_low != 0 && TREE_OVERFLOW (n_low))
4082	      || (n_high != 0 && TREE_OVERFLOW (n_high)))
4083	    break;
4084
4085	  if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4086	    *strict_overflow_p = true;
4087
4088	  /* Check for an unsigned range which has wrapped around the maximum
4089	     value thus making n_high < n_low, and normalize it.  */
4090	  if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4091	    {
4092	      low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4093				 integer_one_node, 0);
4094	      high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4095				  integer_one_node, 0);
4096
4097	      /* If the range is of the form +/- [ x+1, x ], we won't
4098		 be able to normalize it.  But then, it represents the
4099		 whole range or the empty set, so make it
4100		 +/- [ -, - ].  */
4101	      if (tree_int_cst_equal (n_low, low)
4102		  && tree_int_cst_equal (n_high, high))
4103		low = high = 0;
4104	      else
4105		in_p = ! in_p;
4106	    }
4107	  else
4108	    low = n_low, high = n_high;
4109
4110	  exp = arg0;
4111	  continue;
4112
4113	case NOP_EXPR:  case NON_LVALUE_EXPR:  case CONVERT_EXPR:
4114	  if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4115	    break;
4116
4117	  if (! INTEGRAL_TYPE_P (arg0_type)
4118	      || (low != 0 && ! int_fits_type_p (low, arg0_type))
4119	      || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4120	    break;
4121
4122	  n_low = low, n_high = high;
4123
4124	  if (n_low != 0)
4125	    n_low = fold_convert (arg0_type, n_low);
4126
4127	  if (n_high != 0)
4128	    n_high = fold_convert (arg0_type, n_high);
4129
4130
4131	  /* If we're converting arg0 from an unsigned type, to exp,
4132	     a signed type,  we will be doing the comparison as unsigned.
4133	     The tests above have already verified that LOW and HIGH
4134	     are both positive.
4135
4136	     So we have to ensure that we will handle large unsigned
4137	     values the same way that the current signed bounds treat
4138	     negative values.  */
4139
4140	  if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4141	    {
4142	      tree high_positive;
4143	      tree equiv_type = lang_hooks.types.type_for_mode
4144		(TYPE_MODE (arg0_type), 1);
4145
4146	      /* A range without an upper bound is, naturally, unbounded.
4147		 Since convert would have cropped a very large value, use
4148		 the max value for the destination type.  */
4149	      high_positive
4150		= TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4151		: TYPE_MAX_VALUE (arg0_type);
4152
4153	      if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4154		high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4155					     fold_convert (arg0_type,
4156							   high_positive),
4157					     fold_convert (arg0_type,
4158							   integer_one_node));
4159
4160	      /* If the low bound is specified, "and" the range with the
4161		 range for which the original unsigned value will be
4162		 positive.  */
4163	      if (low != 0)
4164		{
4165		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4166				      1, n_low, n_high, 1,
4167				      fold_convert (arg0_type,
4168						    integer_zero_node),
4169				      high_positive))
4170		    break;
4171
4172		  in_p = (n_in_p == in_p);
4173		}
4174	      else
4175		{
4176		  /* Otherwise, "or" the range with the range of the input
4177		     that will be interpreted as negative.  */
4178		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4179				      0, n_low, n_high, 1,
4180				      fold_convert (arg0_type,
4181						    integer_zero_node),
4182				      high_positive))
4183		    break;
4184
4185		  in_p = (in_p != n_in_p);
4186		}
4187	    }
4188
4189	  exp = arg0;
4190	  low = n_low, high = n_high;
4191	  continue;
4192
4193	default:
4194	  break;
4195	}
4196
4197      break;
4198    }
4199
4200  /* If EXP is a constant, we can evaluate whether this is true or false.  */
4201  if (TREE_CODE (exp) == INTEGER_CST)
4202    {
4203      in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4204						 exp, 0, low, 0))
4205		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4206						    exp, 1, high, 1)));
4207      low = high = 0;
4208      exp = 0;
4209    }
4210
4211  *pin_p = in_p, *plow = low, *phigh = high;
4212  return exp;
4213}
4214
4215/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4216   type, TYPE, return an expression to test if EXP is in (or out of, depending
4217   on IN_P) the range.  Return 0 if the test couldn't be created.  */
4218
4219static tree
4220build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4221{
4222  tree etype = TREE_TYPE (exp);
4223  tree value;
4224
4225#ifdef HAVE_canonicalize_funcptr_for_compare
4226  /* Disable this optimization for function pointer expressions
4227     on targets that require function pointer canonicalization.  */
4228  if (HAVE_canonicalize_funcptr_for_compare
4229      && TREE_CODE (etype) == POINTER_TYPE
4230      && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4231    return NULL_TREE;
4232#endif
4233
4234  if (! in_p)
4235    {
4236      value = build_range_check (type, exp, 1, low, high);
4237      if (value != 0)
4238        return invert_truthvalue (value);
4239
4240      return 0;
4241    }
4242
4243  if (low == 0 && high == 0)
4244    return build_int_cst (type, 1);
4245
4246  if (low == 0)
4247    return fold_build2 (LE_EXPR, type, exp,
4248			fold_convert (etype, high));
4249
4250  if (high == 0)
4251    return fold_build2 (GE_EXPR, type, exp,
4252			fold_convert (etype, low));
4253
4254  if (operand_equal_p (low, high, 0))
4255    return fold_build2 (EQ_EXPR, type, exp,
4256			fold_convert (etype, low));
4257
4258  if (integer_zerop (low))
4259    {
4260      if (! TYPE_UNSIGNED (etype))
4261	{
4262	  etype = lang_hooks.types.unsigned_type (etype);
4263	  high = fold_convert (etype, high);
4264	  exp = fold_convert (etype, exp);
4265	}
4266      return build_range_check (type, exp, 1, 0, high);
4267    }
4268
4269  /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4270  if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4271    {
4272      unsigned HOST_WIDE_INT lo;
4273      HOST_WIDE_INT hi;
4274      int prec;
4275
4276      prec = TYPE_PRECISION (etype);
4277      if (prec <= HOST_BITS_PER_WIDE_INT)
4278	{
4279	  hi = 0;
4280	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4281	}
4282      else
4283	{
4284	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4285	  lo = (unsigned HOST_WIDE_INT) -1;
4286	}
4287
4288      if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4289	{
4290	  if (TYPE_UNSIGNED (etype))
4291	    {
4292	      etype = lang_hooks.types.signed_type (etype);
4293	      exp = fold_convert (etype, exp);
4294	    }
4295	  return fold_build2 (GT_EXPR, type, exp,
4296			      build_int_cst (etype, 0));
4297	}
4298    }
4299
4300  /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4301     This requires wrap-around arithmetics for the type of the expression.  */
4302  switch (TREE_CODE (etype))
4303    {
4304    case INTEGER_TYPE:
4305      /* There is no requirement that LOW be within the range of ETYPE
4306	 if the latter is a subtype.  It must, however, be within the base
4307	 type of ETYPE.  So be sure we do the subtraction in that type.  */
4308      if (TREE_TYPE (etype))
4309	etype = TREE_TYPE (etype);
4310      break;
4311
4312    case ENUMERAL_TYPE:
4313    case BOOLEAN_TYPE:
4314      etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4315					      TYPE_UNSIGNED (etype));
4316      break;
4317
4318    default:
4319      break;
4320    }
4321
4322  /* If we don't have wrap-around arithmetics upfront, try to force it.  */
4323  if (TREE_CODE (etype) == INTEGER_TYPE
4324      && !TYPE_OVERFLOW_WRAPS (etype))
4325    {
4326      tree utype, minv, maxv;
4327
4328      /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4329	 for the type in question, as we rely on this here.  */
4330      utype = lang_hooks.types.unsigned_type (etype);
4331      maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4332      maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4333			  integer_one_node, 1);
4334      minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4335
4336      if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4337				      minv, 1, maxv, 1)))
4338	etype = utype;
4339      else
4340	return 0;
4341    }
4342
4343  high = fold_convert (etype, high);
4344  low = fold_convert (etype, low);
4345  exp = fold_convert (etype, exp);
4346
4347  value = const_binop (MINUS_EXPR, high, low, 0);
4348
4349  if (value != 0 && !TREE_OVERFLOW (value))
4350    return build_range_check (type,
4351			      fold_build2 (MINUS_EXPR, etype, exp, low),
4352			      1, build_int_cst (etype, 0), value);
4353
4354  return 0;
4355}
4356
4357/* Return the predecessor of VAL in its type, handling the infinite case.  */
4358
4359static tree
4360range_predecessor (tree val)
4361{
4362  tree type = TREE_TYPE (val);
4363
4364  if (INTEGRAL_TYPE_P (type)
4365      && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4366    return 0;
4367  else
4368    return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4369}
4370
4371/* Return the successor of VAL in its type, handling the infinite case.  */
4372
4373static tree
4374range_successor (tree val)
4375{
4376  tree type = TREE_TYPE (val);
4377
4378  if (INTEGRAL_TYPE_P (type)
4379      && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4380    return 0;
4381  else
4382    return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4383}
4384
4385/* Given two ranges, see if we can merge them into one.  Return 1 if we
4386   can, 0 if we can't.  Set the output range into the specified parameters.  */
4387
4388static int
4389merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4390	      tree high0, int in1_p, tree low1, tree high1)
4391{
4392  int no_overlap;
4393  int subset;
4394  int temp;
4395  tree tem;
4396  int in_p;
4397  tree low, high;
4398  int lowequal = ((low0 == 0 && low1 == 0)
4399		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4400						low0, 0, low1, 0)));
4401  int highequal = ((high0 == 0 && high1 == 0)
4402		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4403						 high0, 1, high1, 1)));
4404
4405  /* Make range 0 be the range that starts first, or ends last if they
4406     start at the same value.  Swap them if it isn't.  */
4407  if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4408				 low0, 0, low1, 0))
4409      || (lowequal
4410	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4411					high1, 1, high0, 1))))
4412    {
4413      temp = in0_p, in0_p = in1_p, in1_p = temp;
4414      tem = low0, low0 = low1, low1 = tem;
4415      tem = high0, high0 = high1, high1 = tem;
4416    }
4417
4418  /* Now flag two cases, whether the ranges are disjoint or whether the
4419     second range is totally subsumed in the first.  Note that the tests
4420     below are simplified by the ones above.  */
4421  no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4422					  high0, 1, low1, 0));
4423  subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4424				      high1, 1, high0, 1));
4425
4426  /* We now have four cases, depending on whether we are including or
4427     excluding the two ranges.  */
4428  if (in0_p && in1_p)
4429    {
4430      /* If they don't overlap, the result is false.  If the second range
4431	 is a subset it is the result.  Otherwise, the range is from the start
4432	 of the second to the end of the first.  */
4433      if (no_overlap)
4434	in_p = 0, low = high = 0;
4435      else if (subset)
4436	in_p = 1, low = low1, high = high1;
4437      else
4438	in_p = 1, low = low1, high = high0;
4439    }
4440
4441  else if (in0_p && ! in1_p)
4442    {
4443      /* If they don't overlap, the result is the first range.  If they are
4444	 equal, the result is false.  If the second range is a subset of the
4445	 first, and the ranges begin at the same place, we go from just after
4446	 the end of the second range to the end of the first.  If the second
4447	 range is not a subset of the first, or if it is a subset and both
4448	 ranges end at the same place, the range starts at the start of the
4449	 first range and ends just before the second range.
4450	 Otherwise, we can't describe this as a single range.  */
4451      if (no_overlap)
4452	in_p = 1, low = low0, high = high0;
4453      else if (lowequal && highequal)
4454	in_p = 0, low = high = 0;
4455      else if (subset && lowequal)
4456	{
4457	  low = range_successor (high1);
4458	  high = high0;
4459	  in_p = 1;
4460	  if (low == 0)
4461	    {
4462	      /* We are in the weird situation where high0 > high1 but
4463		 high1 has no successor.  Punt.  */
4464	      return 0;
4465	    }
4466	}
4467      else if (! subset || highequal)
4468	{
4469	  low = low0;
4470	  high = range_predecessor (low1);
4471	  in_p = 1;
4472	  if (high == 0)
4473	    {
4474	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
4475	      return 0;
4476	    }
4477	}
4478      else
4479	return 0;
4480    }
4481
4482  else if (! in0_p && in1_p)
4483    {
4484      /* If they don't overlap, the result is the second range.  If the second
4485	 is a subset of the first, the result is false.  Otherwise,
4486	 the range starts just after the first range and ends at the
4487	 end of the second.  */
4488      if (no_overlap)
4489	in_p = 1, low = low1, high = high1;
4490      else if (subset || highequal)
4491	in_p = 0, low = high = 0;
4492      else
4493	{
4494	  low = range_successor (high0);
4495	  high = high1;
4496	  in_p = 1;
4497	  if (low == 0)
4498	    {
4499	      /* high1 > high0 but high0 has no successor.  Punt.  */
4500	      return 0;
4501	    }
4502	}
4503    }
4504
4505  else
4506    {
4507      /* The case where we are excluding both ranges.  Here the complex case
4508	 is if they don't overlap.  In that case, the only time we have a
4509	 range is if they are adjacent.  If the second is a subset of the
4510	 first, the result is the first.  Otherwise, the range to exclude
4511	 starts at the beginning of the first range and ends at the end of the
4512	 second.  */
4513      if (no_overlap)
4514	{
4515	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4516					 range_successor (high0),
4517					 1, low1, 0)))
4518	    in_p = 0, low = low0, high = high1;
4519	  else
4520	    {
4521	      /* Canonicalize - [min, x] into - [-, x].  */
4522	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
4523		switch (TREE_CODE (TREE_TYPE (low0)))
4524		  {
4525		  case ENUMERAL_TYPE:
4526		    if (TYPE_PRECISION (TREE_TYPE (low0))
4527			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4528		      break;
4529		    /* FALLTHROUGH */
4530		  case INTEGER_TYPE:
4531		    if (tree_int_cst_equal (low0,
4532					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
4533		      low0 = 0;
4534		    break;
4535		  case POINTER_TYPE:
4536		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
4537			&& integer_zerop (low0))
4538		      low0 = 0;
4539		    break;
4540		  default:
4541		    break;
4542		  }
4543
4544	      /* Canonicalize - [x, max] into - [x, -].  */
4545	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
4546		switch (TREE_CODE (TREE_TYPE (high1)))
4547		  {
4548		  case ENUMERAL_TYPE:
4549		    if (TYPE_PRECISION (TREE_TYPE (high1))
4550			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4551		      break;
4552		    /* FALLTHROUGH */
4553		  case INTEGER_TYPE:
4554		    if (tree_int_cst_equal (high1,
4555					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
4556		      high1 = 0;
4557		    break;
4558		  case POINTER_TYPE:
4559		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
4560			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4561						       high1, 1,
4562						       integer_one_node, 1)))
4563		      high1 = 0;
4564		    break;
4565		  default:
4566		    break;
4567		  }
4568
4569	      /* The ranges might be also adjacent between the maximum and
4570	         minimum values of the given type.  For
4571	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4572	         return + [x + 1, y - 1].  */
4573	      if (low0 == 0 && high1 == 0)
4574	        {
4575		  low = range_successor (high0);
4576		  high = range_predecessor (low1);
4577		  if (low == 0 || high == 0)
4578		    return 0;
4579
4580		  in_p = 1;
4581		}
4582	      else
4583		return 0;
4584	    }
4585	}
4586      else if (subset)
4587	in_p = 0, low = low0, high = high0;
4588      else
4589	in_p = 0, low = low0, high = high1;
4590    }
4591
4592  *pin_p = in_p, *plow = low, *phigh = high;
4593  return 1;
4594}
4595
4596
4597/* Subroutine of fold, looking inside expressions of the form
4598   A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4599   of the COND_EXPR.  This function is being used also to optimize
4600   A op B ? C : A, by reversing the comparison first.
4601
4602   Return a folded expression whose code is not a COND_EXPR
4603   anymore, or NULL_TREE if no folding opportunity is found.  */
4604
4605static tree
4606fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4607{
4608  enum tree_code comp_code = TREE_CODE (arg0);
4609  tree arg00 = TREE_OPERAND (arg0, 0);
4610  tree arg01 = TREE_OPERAND (arg0, 1);
4611  tree arg1_type = TREE_TYPE (arg1);
4612  tree tem;
4613
4614  STRIP_NOPS (arg1);
4615  STRIP_NOPS (arg2);
4616
4617  /* If we have A op 0 ? A : -A, consider applying the following
4618     transformations:
4619
4620     A == 0? A : -A    same as -A
4621     A != 0? A : -A    same as A
4622     A >= 0? A : -A    same as abs (A)
4623     A > 0?  A : -A    same as abs (A)
4624     A <= 0? A : -A    same as -abs (A)
4625     A < 0?  A : -A    same as -abs (A)
4626
4627     None of these transformations work for modes with signed
4628     zeros.  If A is +/-0, the first two transformations will
4629     change the sign of the result (from +0 to -0, or vice
4630     versa).  The last four will fix the sign of the result,
4631     even though the original expressions could be positive or
4632     negative, depending on the sign of A.
4633
4634     Note that all these transformations are correct if A is
4635     NaN, since the two alternatives (A and -A) are also NaNs.  */
4636  if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4637       ? real_zerop (arg01)
4638       : integer_zerop (arg01))
4639      && ((TREE_CODE (arg2) == NEGATE_EXPR
4640	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4641	     /* In the case that A is of the form X-Y, '-A' (arg2) may
4642	        have already been folded to Y-X, check for that. */
4643	  || (TREE_CODE (arg1) == MINUS_EXPR
4644	      && TREE_CODE (arg2) == MINUS_EXPR
4645	      && operand_equal_p (TREE_OPERAND (arg1, 0),
4646				  TREE_OPERAND (arg2, 1), 0)
4647	      && operand_equal_p (TREE_OPERAND (arg1, 1),
4648				  TREE_OPERAND (arg2, 0), 0))))
4649    switch (comp_code)
4650      {
4651      case EQ_EXPR:
4652      case UNEQ_EXPR:
4653	tem = fold_convert (arg1_type, arg1);
4654	return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4655      case NE_EXPR:
4656      case LTGT_EXPR:
4657	return pedantic_non_lvalue (fold_convert (type, arg1));
4658      case UNGE_EXPR:
4659      case UNGT_EXPR:
4660	if (flag_trapping_math)
4661	  break;
4662	/* Fall through.  */
4663      case GE_EXPR:
4664      case GT_EXPR:
4665	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4666	  arg1 = fold_convert (lang_hooks.types.signed_type
4667			       (TREE_TYPE (arg1)), arg1);
4668	tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4669	return pedantic_non_lvalue (fold_convert (type, tem));
4670      case UNLE_EXPR:
4671      case UNLT_EXPR:
4672	if (flag_trapping_math)
4673	  break;
4674      case LE_EXPR:
4675      case LT_EXPR:
4676	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4677	  arg1 = fold_convert (lang_hooks.types.signed_type
4678			       (TREE_TYPE (arg1)), arg1);
4679	tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4680	return negate_expr (fold_convert (type, tem));
4681      default:
4682	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4683	break;
4684      }
4685
4686  /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
4687     A == 0 ? A : 0 is always 0 unless A is -0.  Note that
4688     both transformations are correct when A is NaN: A != 0
4689     is then true, and A == 0 is false.  */
4690
4691  if (integer_zerop (arg01) && integer_zerop (arg2))
4692    {
4693      if (comp_code == NE_EXPR)
4694	return pedantic_non_lvalue (fold_convert (type, arg1));
4695      else if (comp_code == EQ_EXPR)
4696	return build_int_cst (type, 0);
4697    }
4698
4699  /* Try some transformations of A op B ? A : B.
4700
4701     A == B? A : B    same as B
4702     A != B? A : B    same as A
4703     A >= B? A : B    same as max (A, B)
4704     A > B?  A : B    same as max (B, A)
4705     A <= B? A : B    same as min (A, B)
4706     A < B?  A : B    same as min (B, A)
4707
4708     As above, these transformations don't work in the presence
4709     of signed zeros.  For example, if A and B are zeros of
4710     opposite sign, the first two transformations will change
4711     the sign of the result.  In the last four, the original
4712     expressions give different results for (A=+0, B=-0) and
4713     (A=-0, B=+0), but the transformed expressions do not.
4714
4715     The first two transformations are correct if either A or B
4716     is a NaN.  In the first transformation, the condition will
4717     be false, and B will indeed be chosen.  In the case of the
4718     second transformation, the condition A != B will be true,
4719     and A will be chosen.
4720
4721     The conversions to max() and min() are not correct if B is
4722     a number and A is not.  The conditions in the original
4723     expressions will be false, so all four give B.  The min()
4724     and max() versions would give a NaN instead.  */
4725  if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4726      /* Avoid these transformations if the COND_EXPR may be used
4727	 as an lvalue in the C++ front-end.  PR c++/19199.  */
4728      && (in_gimple_form
4729	  || (strcmp (lang_hooks.name, "GNU C++") != 0
4730	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4731	  || ! maybe_lvalue_p (arg1)
4732	  || ! maybe_lvalue_p (arg2)))
4733    {
4734      tree comp_op0 = arg00;
4735      tree comp_op1 = arg01;
4736      tree comp_type = TREE_TYPE (comp_op0);
4737
4738      /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
4739      if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4740	{
4741	  comp_type = type;
4742	  comp_op0 = arg1;
4743	  comp_op1 = arg2;
4744	}
4745
4746      switch (comp_code)
4747	{
4748	case EQ_EXPR:
4749	  return pedantic_non_lvalue (fold_convert (type, arg2));
4750	case NE_EXPR:
4751	  return pedantic_non_lvalue (fold_convert (type, arg1));
4752	case LE_EXPR:
4753	case LT_EXPR:
4754	case UNLE_EXPR:
4755	case UNLT_EXPR:
4756	  /* In C++ a ?: expression can be an lvalue, so put the
4757	     operand which will be used if they are equal first
4758	     so that we can convert this back to the
4759	     corresponding COND_EXPR.  */
4760	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4761	    {
4762	      comp_op0 = fold_convert (comp_type, comp_op0);
4763	      comp_op1 = fold_convert (comp_type, comp_op1);
4764	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4765		    ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4766		    : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4767	      return pedantic_non_lvalue (fold_convert (type, tem));
4768	    }
4769	  break;
4770	case GE_EXPR:
4771	case GT_EXPR:
4772	case UNGE_EXPR:
4773	case UNGT_EXPR:
4774	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4775	    {
4776	      comp_op0 = fold_convert (comp_type, comp_op0);
4777	      comp_op1 = fold_convert (comp_type, comp_op1);
4778	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4779		    ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4780		    : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4781	      return pedantic_non_lvalue (fold_convert (type, tem));
4782	    }
4783	  break;
4784	case UNEQ_EXPR:
4785	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4786	    return pedantic_non_lvalue (fold_convert (type, arg2));
4787	  break;
4788	case LTGT_EXPR:
4789	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4790	    return pedantic_non_lvalue (fold_convert (type, arg1));
4791	  break;
4792	default:
4793	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4794	  break;
4795	}
4796    }
4797
4798  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4799     we might still be able to simplify this.  For example,
4800     if C1 is one less or one more than C2, this might have started
4801     out as a MIN or MAX and been transformed by this function.
4802     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
4803
4804  if (INTEGRAL_TYPE_P (type)
4805      && TREE_CODE (arg01) == INTEGER_CST
4806      && TREE_CODE (arg2) == INTEGER_CST)
4807    switch (comp_code)
4808      {
4809      case EQ_EXPR:
4810	/* We can replace A with C1 in this case.  */
4811	arg1 = fold_convert (type, arg01);
4812	return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4813
4814      case LT_EXPR:
4815	/* If C1 is C2 + 1, this is min(A, C2).  */
4816	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4817			       OEP_ONLY_CONST)
4818	    && operand_equal_p (arg01,
4819				const_binop (PLUS_EXPR, arg2,
4820					     integer_one_node, 0),
4821				OEP_ONLY_CONST))
4822	  return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4823						   type, arg1, arg2));
4824	break;
4825
4826      case LE_EXPR:
4827	/* If C1 is C2 - 1, this is min(A, C2).  */
4828	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4829			       OEP_ONLY_CONST)
4830	    && operand_equal_p (arg01,
4831				const_binop (MINUS_EXPR, arg2,
4832					     integer_one_node, 0),
4833				OEP_ONLY_CONST))
4834	  return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4835						   type, arg1, arg2));
4836	break;
4837
4838      case GT_EXPR:
4839	/* If C1 is C2 - 1, this is max(A, C2).  */
4840	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4841			       OEP_ONLY_CONST)
4842	    && operand_equal_p (arg01,
4843				const_binop (MINUS_EXPR, arg2,
4844					     integer_one_node, 0),
4845				OEP_ONLY_CONST))
4846	  return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4847						   type, arg1, arg2));
4848	break;
4849
4850      case GE_EXPR:
4851	/* If C1 is C2 + 1, this is max(A, C2).  */
4852	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4853			       OEP_ONLY_CONST)
4854	    && operand_equal_p (arg01,
4855				const_binop (PLUS_EXPR, arg2,
4856					     integer_one_node, 0),
4857				OEP_ONLY_CONST))
4858	  return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4859						   type, arg1, arg2));
4860	break;
4861      case NE_EXPR:
4862	break;
4863      default:
4864	gcc_unreachable ();
4865      }
4866
4867  return NULL_TREE;
4868}
4869
4870
4871
4872#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4873#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4874#endif
4875
4876/* EXP is some logical combination of boolean tests.  See if we can
4877   merge it into some range test.  Return the new tree if so.  */
4878
4879static tree
4880fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4881{
4882  int or_op = (code == TRUTH_ORIF_EXPR
4883	       || code == TRUTH_OR_EXPR);
4884  int in0_p, in1_p, in_p;
4885  tree low0, low1, low, high0, high1, high;
4886  bool strict_overflow_p = false;
4887  tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4888  tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4889  tree tem;
4890  const char * const warnmsg = G_("assuming signed overflow does not occur "
4891				  "when simplifying range test");
4892
4893  /* If this is an OR operation, invert both sides; we will invert
4894     again at the end.  */
4895  if (or_op)
4896    in0_p = ! in0_p, in1_p = ! in1_p;
4897
4898  /* If both expressions are the same, if we can merge the ranges, and we
4899     can build the range test, return it or it inverted.  If one of the
4900     ranges is always true or always false, consider it to be the same
4901     expression as the other.  */
4902  if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4903      && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4904		       in1_p, low1, high1)
4905      && 0 != (tem = (build_range_check (type,
4906					 lhs != 0 ? lhs
4907					 : rhs != 0 ? rhs : integer_zero_node,
4908					 in_p, low, high))))
4909    {
4910      if (strict_overflow_p)
4911	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4912      return or_op ? invert_truthvalue (tem) : tem;
4913    }
4914
4915  /* On machines where the branch cost is expensive, if this is a
4916     short-circuited branch and the underlying object on both sides
4917     is the same, make a non-short-circuit operation.  */
4918  else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4919	   && lhs != 0 && rhs != 0
4920	   && (code == TRUTH_ANDIF_EXPR
4921	       || code == TRUTH_ORIF_EXPR)
4922	   && operand_equal_p (lhs, rhs, 0))
4923    {
4924      /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
4925	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4926	 which cases we can't do this.  */
4927      if (simple_operand_p (lhs))
4928	return build2 (code == TRUTH_ANDIF_EXPR
4929		       ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4930		       type, op0, op1);
4931
4932      else if (lang_hooks.decls.global_bindings_p () == 0
4933	       && ! CONTAINS_PLACEHOLDER_P (lhs))
4934	{
4935	  tree common = save_expr (lhs);
4936
4937	  if (0 != (lhs = build_range_check (type, common,
4938					     or_op ? ! in0_p : in0_p,
4939					     low0, high0))
4940	      && (0 != (rhs = build_range_check (type, common,
4941						 or_op ? ! in1_p : in1_p,
4942						 low1, high1))))
4943	    {
4944	      if (strict_overflow_p)
4945		fold_overflow_warning (warnmsg,
4946				       WARN_STRICT_OVERFLOW_COMPARISON);
4947	      return build2 (code == TRUTH_ANDIF_EXPR
4948			     ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4949			     type, lhs, rhs);
4950	    }
4951	}
4952    }
4953
4954  return 0;
4955}
4956
4957/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4958   bit value.  Arrange things so the extra bits will be set to zero if and
4959   only if C is signed-extended to its full width.  If MASK is nonzero,
4960   it is an INTEGER_CST that should be AND'ed with the extra bits.  */
4961
4962static tree
4963unextend (tree c, int p, int unsignedp, tree mask)
4964{
4965  tree type = TREE_TYPE (c);
4966  int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4967  tree temp;
4968
4969  if (p == modesize || unsignedp)
4970    return c;
4971
4972  /* We work by getting just the sign bit into the low-order bit, then
4973     into the high-order bit, then sign-extend.  We then XOR that value
4974     with C.  */
4975  temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4976  temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4977
4978  /* We must use a signed type in order to get an arithmetic right shift.
4979     However, we must also avoid introducing accidental overflows, so that
4980     a subsequent call to integer_zerop will work.  Hence we must
4981     do the type conversion here.  At this point, the constant is either
4982     zero or one, and the conversion to a signed type can never overflow.
4983     We could get an overflow if this conversion is done anywhere else.  */
4984  if (TYPE_UNSIGNED (type))
4985    temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4986
4987  temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4988  temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4989  if (mask != 0)
4990    temp = const_binop (BIT_AND_EXPR, temp,
4991			fold_convert (TREE_TYPE (c), mask), 0);
4992  /* If necessary, convert the type back to match the type of C.  */
4993  if (TYPE_UNSIGNED (type))
4994    temp = fold_convert (type, temp);
4995
4996  return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4997}
4998
4999/* Find ways of folding logical expressions of LHS and RHS:
5000   Try to merge two comparisons to the same innermost item.
5001   Look for range tests like "ch >= '0' && ch <= '9'".
5002   Look for combinations of simple terms on machines with expensive branches
5003   and evaluate the RHS unconditionally.
5004
5005   For example, if we have p->a == 2 && p->b == 4 and we can make an
5006   object large enough to span both A and B, we can do this with a comparison
5007   against the object ANDed with the a mask.
5008
5009   If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5010   operations to do this with one comparison.
5011
5012   We check for both normal comparisons and the BIT_AND_EXPRs made this by
5013   function and the one above.
5014
5015   CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5016   TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5017
5018   TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5019   two operands.
5020
5021   We return the simplified tree or 0 if no optimization is possible.  */
5022
5023static tree
5024fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5025{
5026  /* If this is the "or" of two comparisons, we can do something if
5027     the comparisons are NE_EXPR.  If this is the "and", we can do something
5028     if the comparisons are EQ_EXPR.  I.e.,
5029	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5030
5031     WANTED_CODE is this operation code.  For single bit fields, we can
5032     convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5033     comparison for one-bit fields.  */
5034
5035  enum tree_code wanted_code;
5036  enum tree_code lcode, rcode;
5037  tree ll_arg, lr_arg, rl_arg, rr_arg;
5038  tree ll_inner, lr_inner, rl_inner, rr_inner;
5039  HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5040  HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5041  HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5042  HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5043  int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5044  enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5045  enum machine_mode lnmode, rnmode;
5046  tree ll_mask, lr_mask, rl_mask, rr_mask;
5047  tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5048  tree l_const, r_const;
5049  tree lntype, rntype, result;
5050  int first_bit, end_bit;
5051  int volatilep;
5052  tree orig_lhs = lhs, orig_rhs = rhs;
5053  enum tree_code orig_code = code;
5054
5055  /* Start by getting the comparison codes.  Fail if anything is volatile.
5056     If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5057     it were surrounded with a NE_EXPR.  */
5058
5059  if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5060    return 0;
5061
5062  lcode = TREE_CODE (lhs);
5063  rcode = TREE_CODE (rhs);
5064
5065  if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5066    {
5067      lhs = build2 (NE_EXPR, truth_type, lhs,
5068		    build_int_cst (TREE_TYPE (lhs), 0));
5069      lcode = NE_EXPR;
5070    }
5071
5072  if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5073    {
5074      rhs = build2 (NE_EXPR, truth_type, rhs,
5075		    build_int_cst (TREE_TYPE (rhs), 0));
5076      rcode = NE_EXPR;
5077    }
5078
5079  if (TREE_CODE_CLASS (lcode) != tcc_comparison
5080      || TREE_CODE_CLASS (rcode) != tcc_comparison)
5081    return 0;
5082
5083  ll_arg = TREE_OPERAND (lhs, 0);
5084  lr_arg = TREE_OPERAND (lhs, 1);
5085  rl_arg = TREE_OPERAND (rhs, 0);
5086  rr_arg = TREE_OPERAND (rhs, 1);
5087
5088  /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5089  if (simple_operand_p (ll_arg)
5090      && simple_operand_p (lr_arg))
5091    {
5092      tree result;
5093      if (operand_equal_p (ll_arg, rl_arg, 0)
5094          && operand_equal_p (lr_arg, rr_arg, 0))
5095	{
5096          result = combine_comparisons (code, lcode, rcode,
5097					truth_type, ll_arg, lr_arg);
5098	  if (result)
5099	    return result;
5100	}
5101      else if (operand_equal_p (ll_arg, rr_arg, 0)
5102               && operand_equal_p (lr_arg, rl_arg, 0))
5103	{
5104          result = combine_comparisons (code, lcode,
5105					swap_tree_comparison (rcode),
5106					truth_type, ll_arg, lr_arg);
5107	  if (result)
5108	    return result;
5109	}
5110    }
5111
5112  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5113	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5114
5115  /* If the RHS can be evaluated unconditionally and its operands are
5116     simple, it wins to evaluate the RHS unconditionally on machines
5117     with expensive branches.  In this case, this isn't a comparison
5118     that can be merged.  Avoid doing this if the RHS is a floating-point
5119     comparison since those can trap.  */
5120
5121  if (BRANCH_COST >= 2
5122      && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5123      && simple_operand_p (rl_arg)
5124      && simple_operand_p (rr_arg))
5125    {
5126      /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5127      if (code == TRUTH_OR_EXPR
5128	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5129	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5130	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5131	return build2 (NE_EXPR, truth_type,
5132		       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5133			       ll_arg, rl_arg),
5134		       build_int_cst (TREE_TYPE (ll_arg), 0));
5135
5136      /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5137      if (code == TRUTH_AND_EXPR
5138	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5139	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5140	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5141	return build2 (EQ_EXPR, truth_type,
5142		       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5143			       ll_arg, rl_arg),
5144		       build_int_cst (TREE_TYPE (ll_arg), 0));
5145
5146      if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5147	{
5148	  if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5149	    return build2 (code, truth_type, lhs, rhs);
5150	  return NULL_TREE;
5151	}
5152    }
5153
5154  /* See if the comparisons can be merged.  Then get all the parameters for
5155     each side.  */
5156
5157  if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5158      || (rcode != EQ_EXPR && rcode != NE_EXPR))
5159    return 0;
5160
5161  volatilep = 0;
5162  ll_inner = decode_field_reference (ll_arg,
5163				     &ll_bitsize, &ll_bitpos, &ll_mode,
5164				     &ll_unsignedp, &volatilep, &ll_mask,
5165				     &ll_and_mask);
5166  lr_inner = decode_field_reference (lr_arg,
5167				     &lr_bitsize, &lr_bitpos, &lr_mode,
5168				     &lr_unsignedp, &volatilep, &lr_mask,
5169				     &lr_and_mask);
5170  rl_inner = decode_field_reference (rl_arg,
5171				     &rl_bitsize, &rl_bitpos, &rl_mode,
5172				     &rl_unsignedp, &volatilep, &rl_mask,
5173				     &rl_and_mask);
5174  rr_inner = decode_field_reference (rr_arg,
5175				     &rr_bitsize, &rr_bitpos, &rr_mode,
5176				     &rr_unsignedp, &volatilep, &rr_mask,
5177				     &rr_and_mask);
5178
5179  /* It must be true that the inner operation on the lhs of each
5180     comparison must be the same if we are to be able to do anything.
5181     Then see if we have constants.  If not, the same must be true for
5182     the rhs's.  */
5183  if (volatilep || ll_inner == 0 || rl_inner == 0
5184      || ! operand_equal_p (ll_inner, rl_inner, 0))
5185    return 0;
5186
5187  if (TREE_CODE (lr_arg) == INTEGER_CST
5188      && TREE_CODE (rr_arg) == INTEGER_CST)
5189    l_const = lr_arg, r_const = rr_arg;
5190  else if (lr_inner == 0 || rr_inner == 0
5191	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5192    return 0;
5193  else
5194    l_const = r_const = 0;
5195
5196  /* If either comparison code is not correct for our logical operation,
5197     fail.  However, we can convert a one-bit comparison against zero into
5198     the opposite comparison against that bit being set in the field.  */
5199
5200  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5201  if (lcode != wanted_code)
5202    {
5203      if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5204	{
5205	  /* Make the left operand unsigned, since we are only interested
5206	     in the value of one bit.  Otherwise we are doing the wrong
5207	     thing below.  */
5208	  ll_unsignedp = 1;
5209	  l_const = ll_mask;
5210	}
5211      else
5212	return 0;
5213    }
5214
5215  /* This is analogous to the code for l_const above.  */
5216  if (rcode != wanted_code)
5217    {
5218      if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5219	{
5220	  rl_unsignedp = 1;
5221	  r_const = rl_mask;
5222	}
5223      else
5224	return 0;
5225    }
5226
5227  /* After this point all optimizations will generate bit-field
5228     references, which we might not want.  */
5229  if (! lang_hooks.can_use_bit_fields_p ())
5230    return 0;
5231
5232  /* See if we can find a mode that contains both fields being compared on
5233     the left.  If we can't, fail.  Otherwise, update all constants and masks
5234     to be relative to a field of that size.  */
5235  first_bit = MIN (ll_bitpos, rl_bitpos);
5236  end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5237  lnmode = get_best_mode (end_bit - first_bit, first_bit,
5238			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5239			  volatilep);
5240  if (lnmode == VOIDmode)
5241    return 0;
5242
5243  lnbitsize = GET_MODE_BITSIZE (lnmode);
5244  lnbitpos = first_bit & ~ (lnbitsize - 1);
5245  lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5246  xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5247
5248  if (BYTES_BIG_ENDIAN)
5249    {
5250      xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5251      xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5252    }
5253
5254  ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5255			 size_int (xll_bitpos), 0);
5256  rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5257			 size_int (xrl_bitpos), 0);
5258
5259  if (l_const)
5260    {
5261      l_const = fold_convert (lntype, l_const);
5262      l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5263      l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5264      if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5265					fold_build1 (BIT_NOT_EXPR,
5266						     lntype, ll_mask),
5267					0)))
5268	{
5269	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5270
5271	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5272	}
5273    }
5274  if (r_const)
5275    {
5276      r_const = fold_convert (lntype, r_const);
5277      r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5278      r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5279      if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5280					fold_build1 (BIT_NOT_EXPR,
5281						     lntype, rl_mask),
5282					0)))
5283	{
5284	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5285
5286	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5287	}
5288    }
5289
5290  /* If the right sides are not constant, do the same for it.  Also,
5291     disallow this optimization if a size or signedness mismatch occurs
5292     between the left and right sides.  */
5293  if (l_const == 0)
5294    {
5295      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5296	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5297	  /* Make sure the two fields on the right
5298	     correspond to the left without being swapped.  */
5299	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5300	return 0;
5301
5302      first_bit = MIN (lr_bitpos, rr_bitpos);
5303      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5304      rnmode = get_best_mode (end_bit - first_bit, first_bit,
5305			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5306			      volatilep);
5307      if (rnmode == VOIDmode)
5308	return 0;
5309
5310      rnbitsize = GET_MODE_BITSIZE (rnmode);
5311      rnbitpos = first_bit & ~ (rnbitsize - 1);
5312      rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5313      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5314
5315      if (BYTES_BIG_ENDIAN)
5316	{
5317	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5318	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5319	}
5320
5321      lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5322			     size_int (xlr_bitpos), 0);
5323      rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5324			     size_int (xrr_bitpos), 0);
5325
5326      /* Make a mask that corresponds to both fields being compared.
5327	 Do this for both items being compared.  If the operands are the
5328	 same size and the bits being compared are in the same position
5329	 then we can do this by masking both and comparing the masked
5330	 results.  */
5331      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5332      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5333      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5334	{
5335	  lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5336				    ll_unsignedp || rl_unsignedp);
5337	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5338	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5339
5340	  rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5341				    lr_unsignedp || rr_unsignedp);
5342	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5343	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5344
5345	  return build2 (wanted_code, truth_type, lhs, rhs);
5346	}
5347
5348      /* There is still another way we can do something:  If both pairs of
5349	 fields being compared are adjacent, we may be able to make a wider
5350	 field containing them both.
5351
5352	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5353	 the mask must be shifted to account for the shift done by
5354	 make_bit_field_ref.  */
5355      if ((ll_bitsize + ll_bitpos == rl_bitpos
5356	   && lr_bitsize + lr_bitpos == rr_bitpos)
5357	  || (ll_bitpos == rl_bitpos + rl_bitsize
5358	      && lr_bitpos == rr_bitpos + rr_bitsize))
5359	{
5360	  tree type;
5361
5362	  lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5363				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5364	  rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5365				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5366
5367	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5368				 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5369	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5370				 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5371
5372	  /* Convert to the smaller type before masking out unwanted bits.  */
5373	  type = lntype;
5374	  if (lntype != rntype)
5375	    {
5376	      if (lnbitsize > rnbitsize)
5377		{
5378		  lhs = fold_convert (rntype, lhs);
5379		  ll_mask = fold_convert (rntype, ll_mask);
5380		  type = rntype;
5381		}
5382	      else if (lnbitsize < rnbitsize)
5383		{
5384		  rhs = fold_convert (lntype, rhs);
5385		  lr_mask = fold_convert (lntype, lr_mask);
5386		  type = lntype;
5387		}
5388	    }
5389
5390	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5391	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5392
5393	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5394	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5395
5396	  return build2 (wanted_code, truth_type, lhs, rhs);
5397	}
5398
5399      return 0;
5400    }
5401
5402  /* Handle the case of comparisons with constants.  If there is something in
5403     common between the masks, those bits of the constants must be the same.
5404     If not, the condition is always false.  Test for this to avoid generating
5405     incorrect code below.  */
5406  result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5407  if (! integer_zerop (result)
5408      && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5409			   const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5410    {
5411      if (wanted_code == NE_EXPR)
5412	{
5413	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5414	  return constant_boolean_node (true, truth_type);
5415	}
5416      else
5417	{
5418	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5419	  return constant_boolean_node (false, truth_type);
5420	}
5421    }
5422
5423  /* Construct the expression we will return.  First get the component
5424     reference we will make.  Unless the mask is all ones the width of
5425     that field, perform the mask operation.  Then compare with the
5426     merged constant.  */
5427  result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5428			       ll_unsignedp || rl_unsignedp);
5429
5430  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5431  if (! all_ones_mask_p (ll_mask, lnbitsize))
5432    result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5433
5434  return build2 (wanted_code, truth_type, result,
5435		 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5436}
5437
5438/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5439   constant.  */
5440
5441static tree
5442optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5443{
5444  tree arg0 = op0;
5445  enum tree_code op_code;
5446  tree comp_const = op1;
5447  tree minmax_const;
5448  int consts_equal, consts_lt;
5449  tree inner;
5450
5451  STRIP_SIGN_NOPS (arg0);
5452
5453  op_code = TREE_CODE (arg0);
5454  minmax_const = TREE_OPERAND (arg0, 1);
5455  consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5456  consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5457  inner = TREE_OPERAND (arg0, 0);
5458
5459  /* If something does not permit us to optimize, return the original tree.  */
5460  if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5461      || TREE_CODE (comp_const) != INTEGER_CST
5462      || TREE_CONSTANT_OVERFLOW (comp_const)
5463      || TREE_CODE (minmax_const) != INTEGER_CST
5464      || TREE_CONSTANT_OVERFLOW (minmax_const))
5465    return NULL_TREE;
5466
5467  /* Now handle all the various comparison codes.  We only handle EQ_EXPR
5468     and GT_EXPR, doing the rest with recursive calls using logical
5469     simplifications.  */
5470  switch (code)
5471    {
5472    case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
5473      {
5474	tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5475					  type, op0, op1);
5476	if (tem)
5477	  return invert_truthvalue (tem);
5478	return NULL_TREE;
5479      }
5480
5481    case GE_EXPR:
5482      return
5483	fold_build2 (TRUTH_ORIF_EXPR, type,
5484		     optimize_minmax_comparison
5485		     (EQ_EXPR, type, arg0, comp_const),
5486		     optimize_minmax_comparison
5487		     (GT_EXPR, type, arg0, comp_const));
5488
5489    case EQ_EXPR:
5490      if (op_code == MAX_EXPR && consts_equal)
5491	/* MAX (X, 0) == 0  ->  X <= 0  */
5492	return fold_build2 (LE_EXPR, type, inner, comp_const);
5493
5494      else if (op_code == MAX_EXPR && consts_lt)
5495	/* MAX (X, 0) == 5  ->  X == 5   */
5496	return fold_build2 (EQ_EXPR, type, inner, comp_const);
5497
5498      else if (op_code == MAX_EXPR)
5499	/* MAX (X, 0) == -1  ->  false  */
5500	return omit_one_operand (type, integer_zero_node, inner);
5501
5502      else if (consts_equal)
5503	/* MIN (X, 0) == 0  ->  X >= 0  */
5504	return fold_build2 (GE_EXPR, type, inner, comp_const);
5505
5506      else if (consts_lt)
5507	/* MIN (X, 0) == 5  ->  false  */
5508	return omit_one_operand (type, integer_zero_node, inner);
5509
5510      else
5511	/* MIN (X, 0) == -1  ->  X == -1  */
5512	return fold_build2 (EQ_EXPR, type, inner, comp_const);
5513
5514    case GT_EXPR:
5515      if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5516	/* MAX (X, 0) > 0  ->  X > 0
5517	   MAX (X, 0) > 5  ->  X > 5  */
5518	return fold_build2 (GT_EXPR, type, inner, comp_const);
5519
5520      else if (op_code == MAX_EXPR)
5521	/* MAX (X, 0) > -1  ->  true  */
5522	return omit_one_operand (type, integer_one_node, inner);
5523
5524      else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5525	/* MIN (X, 0) > 0  ->  false
5526	   MIN (X, 0) > 5  ->  false  */
5527	return omit_one_operand (type, integer_zero_node, inner);
5528
5529      else
5530	/* MIN (X, 0) > -1  ->  X > -1  */
5531	return fold_build2 (GT_EXPR, type, inner, comp_const);
5532
5533    default:
5534      return NULL_TREE;
5535    }
5536}
5537
5538/* T is an integer expression that is being multiplied, divided, or taken a
5539   modulus (CODE says which and what kind of divide or modulus) by a
5540   constant C.  See if we can eliminate that operation by folding it with
5541   other operations already in T.  WIDE_TYPE, if non-null, is a type that
5542   should be used for the computation if wider than our type.
5543
5544   For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5545   (X * 2) + (Y * 4).  We must, however, be assured that either the original
5546   expression would not overflow or that overflow is undefined for the type
5547   in the language in question.
5548
5549   We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5550   the machine has a multiply-accumulate insn or that this is part of an
5551   addressing calculation.
5552
5553   If we return a non-null expression, it is an equivalent form of the
5554   original computation, but need not be in the original type.
5555
5556   We set *STRICT_OVERFLOW_P to true if the return values depends on
5557   signed overflow being undefined.  Otherwise we do not change
5558   *STRICT_OVERFLOW_P.  */
5559
5560static tree
5561extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5562		bool *strict_overflow_p)
5563{
5564  /* To avoid exponential search depth, refuse to allow recursion past
5565     three levels.  Beyond that (1) it's highly unlikely that we'll find
5566     something interesting and (2) we've probably processed it before
5567     when we built the inner expression.  */
5568
5569  static int depth;
5570  tree ret;
5571
5572  if (depth > 3)
5573    return NULL;
5574
5575  depth++;
5576  ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5577  depth--;
5578
5579  return ret;
5580}
5581
5582static tree
5583extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5584		  bool *strict_overflow_p)
5585{
5586  tree type = TREE_TYPE (t);
5587  enum tree_code tcode = TREE_CODE (t);
5588  tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5589				   > GET_MODE_SIZE (TYPE_MODE (type)))
5590		? wide_type : type);
5591  tree t1, t2;
5592  int same_p = tcode == code;
5593  tree op0 = NULL_TREE, op1 = NULL_TREE;
5594  bool sub_strict_overflow_p;
5595
5596  /* Don't deal with constants of zero here; they confuse the code below.  */
5597  if (integer_zerop (c))
5598    return NULL_TREE;
5599
5600  if (TREE_CODE_CLASS (tcode) == tcc_unary)
5601    op0 = TREE_OPERAND (t, 0);
5602
5603  if (TREE_CODE_CLASS (tcode) == tcc_binary)
5604    op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5605
5606  /* Note that we need not handle conditional operations here since fold
5607     already handles those cases.  So just do arithmetic here.  */
5608  switch (tcode)
5609    {
5610    case INTEGER_CST:
5611      /* For a constant, we can always simplify if we are a multiply
5612	 or (for divide and modulus) if it is a multiple of our constant.  */
5613      if (code == MULT_EXPR
5614	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5615	return const_binop (code, fold_convert (ctype, t),
5616			    fold_convert (ctype, c), 0);
5617      break;
5618
5619    case CONVERT_EXPR:  case NON_LVALUE_EXPR:  case NOP_EXPR:
5620      /* If op0 is an expression ...  */
5621      if ((COMPARISON_CLASS_P (op0)
5622	   || UNARY_CLASS_P (op0)
5623	   || BINARY_CLASS_P (op0)
5624	   || EXPRESSION_CLASS_P (op0))
5625	  /* ... and is unsigned, and its type is smaller than ctype,
5626	     then we cannot pass through as widening.  */
5627	  && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5628	       && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5629		     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5630	       && (GET_MODE_SIZE (TYPE_MODE (ctype))
5631	           > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5632	      /* ... or this is a truncation (t is narrower than op0),
5633		 then we cannot pass through this narrowing.  */
5634	      || (GET_MODE_SIZE (TYPE_MODE (type))
5635		  < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5636	      /* ... or signedness changes for division or modulus,
5637		 then we cannot pass through this conversion.  */
5638	      || (code != MULT_EXPR
5639		  && (TYPE_UNSIGNED (ctype)
5640		      != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5641	break;
5642
5643      /* Pass the constant down and see if we can make a simplification.  If
5644	 we can, replace this expression with the inner simplification for
5645	 possible later conversion to our or some other type.  */
5646      if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5647	  && TREE_CODE (t2) == INTEGER_CST
5648	  && ! TREE_CONSTANT_OVERFLOW (t2)
5649	  && (0 != (t1 = extract_muldiv (op0, t2, code,
5650					 code == MULT_EXPR
5651					 ? ctype : NULL_TREE,
5652					 strict_overflow_p))))
5653	return t1;
5654      break;
5655
5656    case ABS_EXPR:
5657      /* If widening the type changes it from signed to unsigned, then we
5658         must avoid building ABS_EXPR itself as unsigned.  */
5659      if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5660        {
5661          tree cstype = (*lang_hooks.types.signed_type) (ctype);
5662          if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5663	      != 0)
5664            {
5665              t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5666              return fold_convert (ctype, t1);
5667            }
5668          break;
5669        }
5670      /* If the constant is negative, we cannot simplify this.  */
5671      if (tree_int_cst_sgn (c) == -1)
5672	break;
5673      /* FALLTHROUGH */
5674    case NEGATE_EXPR:
5675      if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5676	  != 0)
5677	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5678      break;
5679
5680    case MIN_EXPR:  case MAX_EXPR:
5681      /* If widening the type changes the signedness, then we can't perform
5682	 this optimization as that changes the result.  */
5683      if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5684	break;
5685
5686      /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
5687      sub_strict_overflow_p = false;
5688      if ((t1 = extract_muldiv (op0, c, code, wide_type,
5689				&sub_strict_overflow_p)) != 0
5690	  && (t2 = extract_muldiv (op1, c, code, wide_type,
5691				   &sub_strict_overflow_p)) != 0)
5692	{
5693	  if (tree_int_cst_sgn (c) < 0)
5694	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5695	  if (sub_strict_overflow_p)
5696	    *strict_overflow_p = true;
5697	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5698			      fold_convert (ctype, t2));
5699	}
5700      break;
5701
5702    case LSHIFT_EXPR:  case RSHIFT_EXPR:
5703      /* If the second operand is constant, this is a multiplication
5704	 or floor division, by a power of two, so we can treat it that
5705	 way unless the multiplier or divisor overflows.  Signed
5706	 left-shift overflow is implementation-defined rather than
5707	 undefined in C90, so do not convert signed left shift into
5708	 multiplication.  */
5709      if (TREE_CODE (op1) == INTEGER_CST
5710	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5711	  /* const_binop may not detect overflow correctly,
5712	     so check for it explicitly here.  */
5713	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5714	  && TREE_INT_CST_HIGH (op1) == 0
5715	  && 0 != (t1 = fold_convert (ctype,
5716				      const_binop (LSHIFT_EXPR,
5717						   size_one_node,
5718						   op1, 0)))
5719	  && ! TREE_OVERFLOW (t1))
5720	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5721				       ? MULT_EXPR : FLOOR_DIV_EXPR,
5722				       ctype, fold_convert (ctype, op0), t1),
5723			       c, code, wide_type, strict_overflow_p);
5724      break;
5725
5726    case PLUS_EXPR:  case MINUS_EXPR:
5727      /* See if we can eliminate the operation on both sides.  If we can, we
5728	 can return a new PLUS or MINUS.  If we can't, the only remaining
5729	 cases where we can do anything are if the second operand is a
5730	 constant.  */
5731      sub_strict_overflow_p = false;
5732      t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5733      t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5734      if (t1 != 0 && t2 != 0
5735	  && (code == MULT_EXPR
5736	      /* If not multiplication, we can only do this if both operands
5737		 are divisible by c.  */
5738	      || (multiple_of_p (ctype, op0, c)
5739	          && multiple_of_p (ctype, op1, c))))
5740	{
5741	  if (sub_strict_overflow_p)
5742	    *strict_overflow_p = true;
5743	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5744			      fold_convert (ctype, t2));
5745	}
5746
5747      /* If this was a subtraction, negate OP1 and set it to be an addition.
5748	 This simplifies the logic below.  */
5749      if (tcode == MINUS_EXPR)
5750	tcode = PLUS_EXPR, op1 = negate_expr (op1);
5751
5752      if (TREE_CODE (op1) != INTEGER_CST)
5753	break;
5754
5755      /* If either OP1 or C are negative, this optimization is not safe for
5756	 some of the division and remainder types while for others we need
5757	 to change the code.  */
5758      if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5759	{
5760	  if (code == CEIL_DIV_EXPR)
5761	    code = FLOOR_DIV_EXPR;
5762	  else if (code == FLOOR_DIV_EXPR)
5763	    code = CEIL_DIV_EXPR;
5764	  else if (code != MULT_EXPR
5765		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5766	    break;
5767	}
5768
5769      /* If it's a multiply or a division/modulus operation of a multiple
5770         of our constant, do the operation and verify it doesn't overflow.  */
5771      if (code == MULT_EXPR
5772	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5773	{
5774	  op1 = const_binop (code, fold_convert (ctype, op1),
5775			     fold_convert (ctype, c), 0);
5776	  /* We allow the constant to overflow with wrapping semantics.  */
5777	  if (op1 == 0
5778	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5779	    break;
5780	}
5781      else
5782	break;
5783
5784      /* If we have an unsigned type is not a sizetype, we cannot widen
5785	 the operation since it will change the result if the original
5786	 computation overflowed.  */
5787      if (TYPE_UNSIGNED (ctype)
5788	  && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5789	  && ctype != type)
5790	break;
5791
5792      /* If we were able to eliminate our operation from the first side,
5793	 apply our operation to the second side and reform the PLUS.  */
5794      if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5795	return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5796
5797      /* The last case is if we are a multiply.  In that case, we can
5798	 apply the distributive law to commute the multiply and addition
5799	 if the multiplication of the constants doesn't overflow.  */
5800      if (code == MULT_EXPR)
5801	return fold_build2 (tcode, ctype,
5802			    fold_build2 (code, ctype,
5803					 fold_convert (ctype, op0),
5804					 fold_convert (ctype, c)),
5805			    op1);
5806
5807      break;
5808
5809    case MULT_EXPR:
5810      /* We have a special case here if we are doing something like
5811	 (C * 8) % 4 since we know that's zero.  */
5812      if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5813	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5814	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5815	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5816	return omit_one_operand (type, integer_zero_node, op0);
5817
5818      /* ... fall through ...  */
5819
5820    case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
5821    case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
5822      /* If we can extract our operation from the LHS, do so and return a
5823	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
5824	 do something only if the second operand is a constant.  */
5825      if (same_p
5826	  && (t1 = extract_muldiv (op0, c, code, wide_type,
5827				   strict_overflow_p)) != 0)
5828	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5829			    fold_convert (ctype, op1));
5830      else if (tcode == MULT_EXPR && code == MULT_EXPR
5831	       && (t1 = extract_muldiv (op1, c, code, wide_type,
5832					strict_overflow_p)) != 0)
5833	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5834			    fold_convert (ctype, t1));
5835      else if (TREE_CODE (op1) != INTEGER_CST)
5836	return 0;
5837
5838      /* If these are the same operation types, we can associate them
5839	 assuming no overflow.  */
5840      if (tcode == code
5841	  && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5842				     fold_convert (ctype, c), 0))
5843	  && ! TREE_OVERFLOW (t1))
5844	return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5845
5846      /* If these operations "cancel" each other, we have the main
5847	 optimizations of this pass, which occur when either constant is a
5848	 multiple of the other, in which case we replace this with either an
5849	 operation or CODE or TCODE.
5850
5851	 If we have an unsigned type that is not a sizetype, we cannot do
5852	 this since it will change the result if the original computation
5853	 overflowed.  */
5854      if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5855	   || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5856	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5857	      || (tcode == MULT_EXPR
5858		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5859		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5860	{
5861	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5862	    {
5863	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
5864		*strict_overflow_p = true;
5865	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5866				  fold_convert (ctype,
5867						const_binop (TRUNC_DIV_EXPR,
5868							     op1, c, 0)));
5869	    }
5870	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5871	    {
5872	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
5873		*strict_overflow_p = true;
5874	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
5875				  fold_convert (ctype,
5876						const_binop (TRUNC_DIV_EXPR,
5877							     c, op1, 0)));
5878	    }
5879	}
5880      break;
5881
5882    default:
5883      break;
5884    }
5885
5886  return 0;
5887}
5888
5889/* Return a node which has the indicated constant VALUE (either 0 or
5890   1), and is of the indicated TYPE.  */
5891
5892tree
5893constant_boolean_node (int value, tree type)
5894{
5895  if (type == integer_type_node)
5896    return value ? integer_one_node : integer_zero_node;
5897  else if (type == boolean_type_node)
5898    return value ? boolean_true_node : boolean_false_node;
5899  else
5900    return build_int_cst (type, value);
5901}
5902
5903
5904/* Return true if expr looks like an ARRAY_REF and set base and
5905   offset to the appropriate trees.  If there is no offset,
5906   offset is set to NULL_TREE.  Base will be canonicalized to
5907   something you can get the element type from using
5908   TREE_TYPE (TREE_TYPE (base)).  Offset will be the offset
5909   in bytes to the base.  */
5910
5911static bool
5912extract_array_ref (tree expr, tree *base, tree *offset)
5913{
5914  /* One canonical form is a PLUS_EXPR with the first
5915     argument being an ADDR_EXPR with a possible NOP_EXPR
5916     attached.  */
5917  if (TREE_CODE (expr) == PLUS_EXPR)
5918    {
5919      tree op0 = TREE_OPERAND (expr, 0);
5920      tree inner_base, dummy1;
5921      /* Strip NOP_EXPRs here because the C frontends and/or
5922	 folders present us (int *)&x.a + 4B possibly.  */
5923      STRIP_NOPS (op0);
5924      if (extract_array_ref (op0, &inner_base, &dummy1))
5925	{
5926	  *base = inner_base;
5927	  if (dummy1 == NULL_TREE)
5928	    *offset = TREE_OPERAND (expr, 1);
5929	  else
5930	    *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5931				   dummy1, TREE_OPERAND (expr, 1));
5932	  return true;
5933	}
5934    }
5935  /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5936     which we transform into an ADDR_EXPR with appropriate
5937     offset.  For other arguments to the ADDR_EXPR we assume
5938     zero offset and as such do not care about the ADDR_EXPR
5939     type and strip possible nops from it.  */
5940  else if (TREE_CODE (expr) == ADDR_EXPR)
5941    {
5942      tree op0 = TREE_OPERAND (expr, 0);
5943      if (TREE_CODE (op0) == ARRAY_REF)
5944	{
5945	  tree idx = TREE_OPERAND (op0, 1);
5946	  *base = TREE_OPERAND (op0, 0);
5947	  *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5948				 array_ref_element_size (op0));
5949	}
5950      else
5951	{
5952	  /* Handle array-to-pointer decay as &a.  */
5953	  if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5954	    *base = TREE_OPERAND (expr, 0);
5955	  else
5956	    *base = expr;
5957	  *offset = NULL_TREE;
5958	}
5959      return true;
5960    }
5961  /* The next canonical form is a VAR_DECL with POINTER_TYPE.  */
5962  else if (SSA_VAR_P (expr)
5963	   && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5964    {
5965      *base = expr;
5966      *offset = NULL_TREE;
5967      return true;
5968    }
5969
5970  return false;
5971}
5972
5973
5974/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5975   Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
5976   CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5977   expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
5978   COND is the first argument to CODE; otherwise (as in the example
5979   given here), it is the second argument.  TYPE is the type of the
5980   original expression.  Return NULL_TREE if no simplification is
5981   possible.  */
5982
5983static tree
5984fold_binary_op_with_conditional_arg (enum tree_code code,
5985				     tree type, tree op0, tree op1,
5986				     tree cond, tree arg, int cond_first_p)
5987{
5988  tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5989  tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5990  tree test, true_value, false_value;
5991  tree lhs = NULL_TREE;
5992  tree rhs = NULL_TREE;
5993
5994  /* This transformation is only worthwhile if we don't have to wrap
5995     arg in a SAVE_EXPR, and the operation can be simplified on at least
5996     one of the branches once its pushed inside the COND_EXPR.  */
5997  if (!TREE_CONSTANT (arg))
5998    return NULL_TREE;
5999
6000  if (TREE_CODE (cond) == COND_EXPR)
6001    {
6002      test = TREE_OPERAND (cond, 0);
6003      true_value = TREE_OPERAND (cond, 1);
6004      false_value = TREE_OPERAND (cond, 2);
6005      /* If this operand throws an expression, then it does not make
6006	 sense to try to perform a logical or arithmetic operation
6007	 involving it.  */
6008      if (VOID_TYPE_P (TREE_TYPE (true_value)))
6009	lhs = true_value;
6010      if (VOID_TYPE_P (TREE_TYPE (false_value)))
6011	rhs = false_value;
6012    }
6013  else
6014    {
6015      tree testtype = TREE_TYPE (cond);
6016      test = cond;
6017      true_value = constant_boolean_node (true, testtype);
6018      false_value = constant_boolean_node (false, testtype);
6019    }
6020
6021  arg = fold_convert (arg_type, arg);
6022  if (lhs == 0)
6023    {
6024      true_value = fold_convert (cond_type, true_value);
6025      if (cond_first_p)
6026	lhs = fold_build2 (code, type, true_value, arg);
6027      else
6028	lhs = fold_build2 (code, type, arg, true_value);
6029    }
6030  if (rhs == 0)
6031    {
6032      false_value = fold_convert (cond_type, false_value);
6033      if (cond_first_p)
6034	rhs = fold_build2 (code, type, false_value, arg);
6035      else
6036	rhs = fold_build2 (code, type, arg, false_value);
6037    }
6038
6039  test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6040  return fold_convert (type, test);
6041}
6042
6043
6044/* Subroutine of fold() that checks for the addition of +/- 0.0.
6045
6046   If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6047   TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6048   ADDEND is the same as X.
6049
6050   X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6051   and finite.  The problematic cases are when X is zero, and its mode
6052   has signed zeros.  In the case of rounding towards -infinity,
6053   X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6054   modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6055
6056static bool
6057fold_real_zero_addition_p (tree type, tree addend, int negate)
6058{
6059  if (!real_zerop (addend))
6060    return false;
6061
6062  /* Don't allow the fold with -fsignaling-nans.  */
6063  if (HONOR_SNANS (TYPE_MODE (type)))
6064    return false;
6065
6066  /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6067  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6068    return true;
6069
6070  /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6071  if (TREE_CODE (addend) == REAL_CST
6072      && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6073    negate = !negate;
6074
6075  /* The mode has signed zeros, and we have to honor their sign.
6076     In this situation, there is only one case we can return true for.
6077     X - 0 is the same as X unless rounding towards -infinity is
6078     supported.  */
6079  return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6080}
6081
6082/* Subroutine of fold() that checks comparisons of built-in math
6083   functions against real constants.
6084
6085   FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6086   operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6087   is the type of the result and ARG0 and ARG1 are the operands of the
6088   comparison.  ARG1 must be a TREE_REAL_CST.
6089
6090   The function returns the constant folded tree if a simplification
6091   can be made, and NULL_TREE otherwise.  */
6092
6093static tree
6094fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6095		     tree type, tree arg0, tree arg1)
6096{
6097  REAL_VALUE_TYPE c;
6098
6099  if (BUILTIN_SQRT_P (fcode))
6100    {
6101      tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6102      enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6103
6104      c = TREE_REAL_CST (arg1);
6105      if (REAL_VALUE_NEGATIVE (c))
6106	{
6107	  /* sqrt(x) < y is always false, if y is negative.  */
6108	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6109	    return omit_one_operand (type, integer_zero_node, arg);
6110
6111	  /* sqrt(x) > y is always true, if y is negative and we
6112	     don't care about NaNs, i.e. negative values of x.  */
6113	  if (code == NE_EXPR || !HONOR_NANS (mode))
6114	    return omit_one_operand (type, integer_one_node, arg);
6115
6116	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6117	  return fold_build2 (GE_EXPR, type, arg,
6118			      build_real (TREE_TYPE (arg), dconst0));
6119	}
6120      else if (code == GT_EXPR || code == GE_EXPR)
6121	{
6122	  REAL_VALUE_TYPE c2;
6123
6124	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6125	  real_convert (&c2, mode, &c2);
6126
6127	  if (REAL_VALUE_ISINF (c2))
6128	    {
6129	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
6130	      if (HONOR_INFINITIES (mode))
6131		return fold_build2 (EQ_EXPR, type, arg,
6132				    build_real (TREE_TYPE (arg), c2));
6133
6134	      /* sqrt(x) > y is always false, when y is very large
6135		 and we don't care about infinities.  */
6136	      return omit_one_operand (type, integer_zero_node, arg);
6137	    }
6138
6139	  /* sqrt(x) > c is the same as x > c*c.  */
6140	  return fold_build2 (code, type, arg,
6141			      build_real (TREE_TYPE (arg), c2));
6142	}
6143      else if (code == LT_EXPR || code == LE_EXPR)
6144	{
6145	  REAL_VALUE_TYPE c2;
6146
6147	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6148	  real_convert (&c2, mode, &c2);
6149
6150	  if (REAL_VALUE_ISINF (c2))
6151	    {
6152	      /* sqrt(x) < y is always true, when y is a very large
6153		 value and we don't care about NaNs or Infinities.  */
6154	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6155		return omit_one_operand (type, integer_one_node, arg);
6156
6157	      /* sqrt(x) < y is x != +Inf when y is very large and we
6158		 don't care about NaNs.  */
6159	      if (! HONOR_NANS (mode))
6160		return fold_build2 (NE_EXPR, type, arg,
6161				    build_real (TREE_TYPE (arg), c2));
6162
6163	      /* sqrt(x) < y is x >= 0 when y is very large and we
6164		 don't care about Infinities.  */
6165	      if (! HONOR_INFINITIES (mode))
6166		return fold_build2 (GE_EXPR, type, arg,
6167				    build_real (TREE_TYPE (arg), dconst0));
6168
6169	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6170	      if (lang_hooks.decls.global_bindings_p () != 0
6171		  || CONTAINS_PLACEHOLDER_P (arg))
6172		return NULL_TREE;
6173
6174	      arg = save_expr (arg);
6175	      return fold_build2 (TRUTH_ANDIF_EXPR, type,
6176				  fold_build2 (GE_EXPR, type, arg,
6177					       build_real (TREE_TYPE (arg),
6178							   dconst0)),
6179				  fold_build2 (NE_EXPR, type, arg,
6180					       build_real (TREE_TYPE (arg),
6181							   c2)));
6182	    }
6183
6184	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6185	  if (! HONOR_NANS (mode))
6186	    return fold_build2 (code, type, arg,
6187				build_real (TREE_TYPE (arg), c2));
6188
6189	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6190	  if (lang_hooks.decls.global_bindings_p () == 0
6191	      && ! CONTAINS_PLACEHOLDER_P (arg))
6192	    {
6193	      arg = save_expr (arg);
6194	      return fold_build2 (TRUTH_ANDIF_EXPR, type,
6195				  fold_build2 (GE_EXPR, type, arg,
6196					       build_real (TREE_TYPE (arg),
6197							   dconst0)),
6198				  fold_build2 (code, type, arg,
6199					       build_real (TREE_TYPE (arg),
6200							   c2)));
6201	    }
6202	}
6203    }
6204
6205  return NULL_TREE;
6206}
6207
6208/* Subroutine of fold() that optimizes comparisons against Infinities,
6209   either +Inf or -Inf.
6210
6211   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6212   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6213   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6214
6215   The function returns the constant folded tree if a simplification
6216   can be made, and NULL_TREE otherwise.  */
6217
6218static tree
6219fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6220{
6221  enum machine_mode mode;
6222  REAL_VALUE_TYPE max;
6223  tree temp;
6224  bool neg;
6225
6226  mode = TYPE_MODE (TREE_TYPE (arg0));
6227
6228  /* For negative infinity swap the sense of the comparison.  */
6229  neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6230  if (neg)
6231    code = swap_tree_comparison (code);
6232
6233  switch (code)
6234    {
6235    case GT_EXPR:
6236      /* x > +Inf is always false, if with ignore sNANs.  */
6237      if (HONOR_SNANS (mode))
6238        return NULL_TREE;
6239      return omit_one_operand (type, integer_zero_node, arg0);
6240
6241    case LE_EXPR:
6242      /* x <= +Inf is always true, if we don't case about NaNs.  */
6243      if (! HONOR_NANS (mode))
6244	return omit_one_operand (type, integer_one_node, arg0);
6245
6246      /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6247      if (lang_hooks.decls.global_bindings_p () == 0
6248	  && ! CONTAINS_PLACEHOLDER_P (arg0))
6249	{
6250	  arg0 = save_expr (arg0);
6251	  return fold_build2 (EQ_EXPR, type, arg0, arg0);
6252	}
6253      break;
6254
6255    case EQ_EXPR:
6256    case GE_EXPR:
6257      /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
6258      real_maxval (&max, neg, mode);
6259      return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6260			  arg0, build_real (TREE_TYPE (arg0), max));
6261
6262    case LT_EXPR:
6263      /* x < +Inf is always equal to x <= DBL_MAX.  */
6264      real_maxval (&max, neg, mode);
6265      return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6266			  arg0, build_real (TREE_TYPE (arg0), max));
6267
6268    case NE_EXPR:
6269      /* x != +Inf is always equal to !(x > DBL_MAX).  */
6270      real_maxval (&max, neg, mode);
6271      if (! HONOR_NANS (mode))
6272	return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6273			    arg0, build_real (TREE_TYPE (arg0), max));
6274
6275      /* The transformation below creates non-gimple code and thus is
6276	 not appropriate if we are in gimple form.  */
6277      if (in_gimple_form)
6278	return NULL_TREE;
6279
6280      temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6281			  arg0, build_real (TREE_TYPE (arg0), max));
6282      return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6283
6284    default:
6285      break;
6286    }
6287
6288  return NULL_TREE;
6289}
6290
6291/* Subroutine of fold() that optimizes comparisons of a division by
6292   a nonzero integer constant against an integer constant, i.e.
6293   X/C1 op C2.
6294
6295   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6296   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6297   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6298
6299   The function returns the constant folded tree if a simplification
6300   can be made, and NULL_TREE otherwise.  */
6301
6302static tree
6303fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6304{
6305  tree prod, tmp, hi, lo;
6306  tree arg00 = TREE_OPERAND (arg0, 0);
6307  tree arg01 = TREE_OPERAND (arg0, 1);
6308  unsigned HOST_WIDE_INT lpart;
6309  HOST_WIDE_INT hpart;
6310  bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6311  bool neg_overflow;
6312  int overflow;
6313
6314  /* We have to do this the hard way to detect unsigned overflow.
6315     prod = int_const_binop (MULT_EXPR, arg01, arg1, 0);  */
6316  overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6317				   TREE_INT_CST_HIGH (arg01),
6318				   TREE_INT_CST_LOW (arg1),
6319				   TREE_INT_CST_HIGH (arg1),
6320				   &lpart, &hpart, unsigned_p);
6321  prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6322  prod = force_fit_type (prod, -1, overflow, false);
6323  neg_overflow = false;
6324
6325  if (unsigned_p)
6326    {
6327      tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6328      lo = prod;
6329
6330      /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0).  */
6331      overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6332				       TREE_INT_CST_HIGH (prod),
6333				       TREE_INT_CST_LOW (tmp),
6334				       TREE_INT_CST_HIGH (tmp),
6335				       &lpart, &hpart, unsigned_p);
6336      hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6337      hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6338			   TREE_CONSTANT_OVERFLOW (prod));
6339    }
6340  else if (tree_int_cst_sgn (arg01) >= 0)
6341    {
6342      tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6343      switch (tree_int_cst_sgn (arg1))
6344	{
6345	case -1:
6346	  neg_overflow = true;
6347	  lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6348	  hi = prod;
6349	  break;
6350
6351	case  0:
6352	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6353	  hi = tmp;
6354	  break;
6355
6356	case  1:
6357          hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6358	  lo = prod;
6359	  break;
6360
6361	default:
6362	  gcc_unreachable ();
6363	}
6364    }
6365  else
6366    {
6367      /* A negative divisor reverses the relational operators.  */
6368      code = swap_tree_comparison (code);
6369
6370      tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6371      switch (tree_int_cst_sgn (arg1))
6372	{
6373	case -1:
6374	  hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6375	  lo = prod;
6376	  break;
6377
6378	case  0:
6379	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6380	  lo = tmp;
6381	  break;
6382
6383	case  1:
6384	  neg_overflow = true;
6385	  lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6386	  hi = prod;
6387	  break;
6388
6389	default:
6390	  gcc_unreachable ();
6391	}
6392    }
6393
6394  switch (code)
6395    {
6396    case EQ_EXPR:
6397      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6398	return omit_one_operand (type, integer_zero_node, arg00);
6399      if (TREE_OVERFLOW (hi))
6400	return fold_build2 (GE_EXPR, type, arg00, lo);
6401      if (TREE_OVERFLOW (lo))
6402	return fold_build2 (LE_EXPR, type, arg00, hi);
6403      return build_range_check (type, arg00, 1, lo, hi);
6404
6405    case NE_EXPR:
6406      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6407	return omit_one_operand (type, integer_one_node, arg00);
6408      if (TREE_OVERFLOW (hi))
6409	return fold_build2 (LT_EXPR, type, arg00, lo);
6410      if (TREE_OVERFLOW (lo))
6411	return fold_build2 (GT_EXPR, type, arg00, hi);
6412      return build_range_check (type, arg00, 0, lo, hi);
6413
6414    case LT_EXPR:
6415      if (TREE_OVERFLOW (lo))
6416	{
6417	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6418	  return omit_one_operand (type, tmp, arg00);
6419	}
6420      return fold_build2 (LT_EXPR, type, arg00, lo);
6421
6422    case LE_EXPR:
6423      if (TREE_OVERFLOW (hi))
6424	{
6425	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6426	  return omit_one_operand (type, tmp, arg00);
6427	}
6428      return fold_build2 (LE_EXPR, type, arg00, hi);
6429
6430    case GT_EXPR:
6431      if (TREE_OVERFLOW (hi))
6432	{
6433	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6434	  return omit_one_operand (type, tmp, arg00);
6435	}
6436      return fold_build2 (GT_EXPR, type, arg00, hi);
6437
6438    case GE_EXPR:
6439      if (TREE_OVERFLOW (lo))
6440	{
6441	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6442	  return omit_one_operand (type, tmp, arg00);
6443	}
6444      return fold_build2 (GE_EXPR, type, arg00, lo);
6445
6446    default:
6447      break;
6448    }
6449
6450  return NULL_TREE;
6451}
6452
6453
6454/* If CODE with arguments ARG0 and ARG1 represents a single bit
6455   equality/inequality test, then return a simplified form of the test
6456   using a sign testing.  Otherwise return NULL.  TYPE is the desired
6457   result type.  */
6458
6459static tree
6460fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6461				     tree result_type)
6462{
6463  /* If this is testing a single bit, we can optimize the test.  */
6464  if ((code == NE_EXPR || code == EQ_EXPR)
6465      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6466      && integer_pow2p (TREE_OPERAND (arg0, 1)))
6467    {
6468      /* If we have (A & C) != 0 where C is the sign bit of A, convert
6469	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6470      tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6471
6472      if (arg00 != NULL_TREE
6473	  /* This is only a win if casting to a signed type is cheap,
6474	     i.e. when arg00's type is not a partial mode.  */
6475	  && TYPE_PRECISION (TREE_TYPE (arg00))
6476	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6477	{
6478	  tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6479	  return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6480			      result_type, fold_convert (stype, arg00),
6481			      build_int_cst (stype, 0));
6482	}
6483    }
6484
6485  return NULL_TREE;
6486}
6487
6488/* If CODE with arguments ARG0 and ARG1 represents a single bit
6489   equality/inequality test, then return a simplified form of
6490   the test using shifts and logical operations.  Otherwise return
6491   NULL.  TYPE is the desired result type.  */
6492
6493tree
6494fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6495		      tree result_type)
6496{
6497  /* If this is testing a single bit, we can optimize the test.  */
6498  if ((code == NE_EXPR || code == EQ_EXPR)
6499      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6500      && integer_pow2p (TREE_OPERAND (arg0, 1)))
6501    {
6502      tree inner = TREE_OPERAND (arg0, 0);
6503      tree type = TREE_TYPE (arg0);
6504      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6505      enum machine_mode operand_mode = TYPE_MODE (type);
6506      int ops_unsigned;
6507      tree signed_type, unsigned_type, intermediate_type;
6508      tree tem;
6509
6510      /* First, see if we can fold the single bit test into a sign-bit
6511	 test.  */
6512      tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6513						 result_type);
6514      if (tem)
6515	return tem;
6516
6517      /* Otherwise we have (A & C) != 0 where C is a single bit,
6518	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6519	 Similarly for (A & C) == 0.  */
6520
6521      /* If INNER is a right shift of a constant and it plus BITNUM does
6522	 not overflow, adjust BITNUM and INNER.  */
6523      if (TREE_CODE (inner) == RSHIFT_EXPR
6524	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6525	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6526	  && bitnum < TYPE_PRECISION (type)
6527	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6528				   bitnum - TYPE_PRECISION (type)))
6529	{
6530	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6531	  inner = TREE_OPERAND (inner, 0);
6532	}
6533
6534      /* If we are going to be able to omit the AND below, we must do our
6535	 operations as unsigned.  If we must use the AND, we have a choice.
6536	 Normally unsigned is faster, but for some machines signed is.  */
6537#ifdef LOAD_EXTEND_OP
6538      ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6539		      && !flag_syntax_only) ? 0 : 1;
6540#else
6541      ops_unsigned = 1;
6542#endif
6543
6544      signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6545      unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6546      intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6547      inner = fold_convert (intermediate_type, inner);
6548
6549      if (bitnum != 0)
6550	inner = build2 (RSHIFT_EXPR, intermediate_type,
6551			inner, size_int (bitnum));
6552
6553      if (code == EQ_EXPR)
6554	inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6555			     inner, integer_one_node);
6556
6557      /* Put the AND last so it can combine with more things.  */
6558      inner = build2 (BIT_AND_EXPR, intermediate_type,
6559		      inner, integer_one_node);
6560
6561      /* Make sure to return the proper type.  */
6562      inner = fold_convert (result_type, inner);
6563
6564      return inner;
6565    }
6566  return NULL_TREE;
6567}
6568
6569/* Check whether we are allowed to reorder operands arg0 and arg1,
6570   such that the evaluation of arg1 occurs before arg0.  */
6571
6572static bool
6573reorder_operands_p (tree arg0, tree arg1)
6574{
6575  if (! flag_evaluation_order)
6576      return true;
6577  if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6578    return true;
6579  return ! TREE_SIDE_EFFECTS (arg0)
6580	 && ! TREE_SIDE_EFFECTS (arg1);
6581}
6582
6583/* Test whether it is preferable two swap two operands, ARG0 and
6584   ARG1, for example because ARG0 is an integer constant and ARG1
6585   isn't.  If REORDER is true, only recommend swapping if we can
6586   evaluate the operands in reverse order.  */
6587
6588bool
6589tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6590{
6591  STRIP_SIGN_NOPS (arg0);
6592  STRIP_SIGN_NOPS (arg1);
6593
6594  if (TREE_CODE (arg1) == INTEGER_CST)
6595    return 0;
6596  if (TREE_CODE (arg0) == INTEGER_CST)
6597    return 1;
6598
6599  if (TREE_CODE (arg1) == REAL_CST)
6600    return 0;
6601  if (TREE_CODE (arg0) == REAL_CST)
6602    return 1;
6603
6604  if (TREE_CODE (arg1) == COMPLEX_CST)
6605    return 0;
6606  if (TREE_CODE (arg0) == COMPLEX_CST)
6607    return 1;
6608
6609  if (TREE_CONSTANT (arg1))
6610    return 0;
6611  if (TREE_CONSTANT (arg0))
6612    return 1;
6613
6614  if (optimize_size)
6615    return 0;
6616
6617  if (reorder && flag_evaluation_order
6618      && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6619    return 0;
6620
6621  if (DECL_P (arg1))
6622    return 0;
6623  if (DECL_P (arg0))
6624    return 1;
6625
6626  /* It is preferable to swap two SSA_NAME to ensure a canonical form
6627     for commutative and comparison operators.  Ensuring a canonical
6628     form allows the optimizers to find additional redundancies without
6629     having to explicitly check for both orderings.  */
6630  if (TREE_CODE (arg0) == SSA_NAME
6631      && TREE_CODE (arg1) == SSA_NAME
6632      && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6633    return 1;
6634
6635  return 0;
6636}
6637
6638/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6639   ARG0 is extended to a wider type.  */
6640
6641static tree
6642fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6643{
6644  tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6645  tree arg1_unw;
6646  tree shorter_type, outer_type;
6647  tree min, max;
6648  bool above, below;
6649
6650  if (arg0_unw == arg0)
6651    return NULL_TREE;
6652  shorter_type = TREE_TYPE (arg0_unw);
6653
6654#ifdef HAVE_canonicalize_funcptr_for_compare
6655  /* Disable this optimization if we're casting a function pointer
6656     type on targets that require function pointer canonicalization.  */
6657  if (HAVE_canonicalize_funcptr_for_compare
6658      && TREE_CODE (shorter_type) == POINTER_TYPE
6659      && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6660    return NULL_TREE;
6661#endif
6662
6663  if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6664    return NULL_TREE;
6665
6666  arg1_unw = get_unwidened (arg1, NULL_TREE);
6667
6668  /* If possible, express the comparison in the shorter mode.  */
6669  if ((code == EQ_EXPR || code == NE_EXPR
6670       || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6671      && (TREE_TYPE (arg1_unw) == shorter_type
6672	  || (TYPE_PRECISION (shorter_type)
6673	      >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6674	  || (TREE_CODE (arg1_unw) == INTEGER_CST
6675	      && (TREE_CODE (shorter_type) == INTEGER_TYPE
6676		  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6677	      && int_fits_type_p (arg1_unw, shorter_type))))
6678    return fold_build2 (code, type, arg0_unw,
6679		       fold_convert (shorter_type, arg1_unw));
6680
6681  if (TREE_CODE (arg1_unw) != INTEGER_CST
6682      || TREE_CODE (shorter_type) != INTEGER_TYPE
6683      || !int_fits_type_p (arg1_unw, shorter_type))
6684    return NULL_TREE;
6685
6686  /* If we are comparing with the integer that does not fit into the range
6687     of the shorter type, the result is known.  */
6688  outer_type = TREE_TYPE (arg1_unw);
6689  min = lower_bound_in_type (outer_type, shorter_type);
6690  max = upper_bound_in_type (outer_type, shorter_type);
6691
6692  above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6693						   max, arg1_unw));
6694  below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6695						   arg1_unw, min));
6696
6697  switch (code)
6698    {
6699    case EQ_EXPR:
6700      if (above || below)
6701	return omit_one_operand (type, integer_zero_node, arg0);
6702      break;
6703
6704    case NE_EXPR:
6705      if (above || below)
6706	return omit_one_operand (type, integer_one_node, arg0);
6707      break;
6708
6709    case LT_EXPR:
6710    case LE_EXPR:
6711      if (above)
6712	return omit_one_operand (type, integer_one_node, arg0);
6713      else if (below)
6714	return omit_one_operand (type, integer_zero_node, arg0);
6715
6716    case GT_EXPR:
6717    case GE_EXPR:
6718      if (above)
6719	return omit_one_operand (type, integer_zero_node, arg0);
6720      else if (below)
6721	return omit_one_operand (type, integer_one_node, arg0);
6722
6723    default:
6724      break;
6725    }
6726
6727  return NULL_TREE;
6728}
6729
6730/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6731   ARG0 just the signedness is changed.  */
6732
6733static tree
6734fold_sign_changed_comparison (enum tree_code code, tree type,
6735			      tree arg0, tree arg1)
6736{
6737  tree arg0_inner, tmp;
6738  tree inner_type, outer_type;
6739
6740  if (TREE_CODE (arg0) != NOP_EXPR
6741      && TREE_CODE (arg0) != CONVERT_EXPR)
6742    return NULL_TREE;
6743
6744  outer_type = TREE_TYPE (arg0);
6745  arg0_inner = TREE_OPERAND (arg0, 0);
6746  inner_type = TREE_TYPE (arg0_inner);
6747
6748#ifdef HAVE_canonicalize_funcptr_for_compare
6749  /* Disable this optimization if we're casting a function pointer
6750     type on targets that require function pointer canonicalization.  */
6751  if (HAVE_canonicalize_funcptr_for_compare
6752      && TREE_CODE (inner_type) == POINTER_TYPE
6753      && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6754    return NULL_TREE;
6755#endif
6756
6757  if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6758    return NULL_TREE;
6759
6760  if (TREE_CODE (arg1) != INTEGER_CST
6761      && !((TREE_CODE (arg1) == NOP_EXPR
6762	    || TREE_CODE (arg1) == CONVERT_EXPR)
6763	   && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6764    return NULL_TREE;
6765
6766  if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6767      && code != NE_EXPR
6768      && code != EQ_EXPR)
6769    return NULL_TREE;
6770
6771  if (TREE_CODE (arg1) == INTEGER_CST)
6772    {
6773      tmp = build_int_cst_wide (inner_type,
6774				TREE_INT_CST_LOW (arg1),
6775				TREE_INT_CST_HIGH (arg1));
6776      arg1 = force_fit_type (tmp, 0,
6777			     TREE_OVERFLOW (arg1),
6778			     TREE_CONSTANT_OVERFLOW (arg1));
6779    }
6780  else
6781    arg1 = fold_convert (inner_type, arg1);
6782
6783  return fold_build2 (code, type, arg0_inner, arg1);
6784}
6785
6786/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6787   step of the array.  Reconstructs s and delta in the case of s * delta
6788   being an integer constant (and thus already folded).
6789   ADDR is the address. MULT is the multiplicative expression.
6790   If the function succeeds, the new address expression is returned.  Otherwise
6791   NULL_TREE is returned.  */
6792
6793static tree
6794try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6795{
6796  tree s, delta, step;
6797  tree ref = TREE_OPERAND (addr, 0), pref;
6798  tree ret, pos;
6799  tree itype;
6800
6801  /* Canonicalize op1 into a possibly non-constant delta
6802     and an INTEGER_CST s.  */
6803  if (TREE_CODE (op1) == MULT_EXPR)
6804    {
6805      tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6806
6807      STRIP_NOPS (arg0);
6808      STRIP_NOPS (arg1);
6809
6810      if (TREE_CODE (arg0) == INTEGER_CST)
6811        {
6812          s = arg0;
6813          delta = arg1;
6814        }
6815      else if (TREE_CODE (arg1) == INTEGER_CST)
6816        {
6817          s = arg1;
6818          delta = arg0;
6819        }
6820      else
6821        return NULL_TREE;
6822    }
6823  else if (TREE_CODE (op1) == INTEGER_CST)
6824    {
6825      delta = op1;
6826      s = NULL_TREE;
6827    }
6828  else
6829    {
6830      /* Simulate we are delta * 1.  */
6831      delta = op1;
6832      s = integer_one_node;
6833    }
6834
6835  for (;; ref = TREE_OPERAND (ref, 0))
6836    {
6837      if (TREE_CODE (ref) == ARRAY_REF)
6838	{
6839	  itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6840	  if (! itype)
6841	    continue;
6842
6843	  step = array_ref_element_size (ref);
6844	  if (TREE_CODE (step) != INTEGER_CST)
6845	    continue;
6846
6847	  if (s)
6848	    {
6849	      if (! tree_int_cst_equal (step, s))
6850                continue;
6851	    }
6852	  else
6853	    {
6854	      /* Try if delta is a multiple of step.  */
6855	      tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6856	      if (! tmp)
6857		continue;
6858	      delta = tmp;
6859	    }
6860
6861	  break;
6862	}
6863
6864      if (!handled_component_p (ref))
6865	return NULL_TREE;
6866    }
6867
6868  /* We found the suitable array reference.  So copy everything up to it,
6869     and replace the index.  */
6870
6871  pref = TREE_OPERAND (addr, 0);
6872  ret = copy_node (pref);
6873  pos = ret;
6874
6875  while (pref != ref)
6876    {
6877      pref = TREE_OPERAND (pref, 0);
6878      TREE_OPERAND (pos, 0) = copy_node (pref);
6879      pos = TREE_OPERAND (pos, 0);
6880    }
6881
6882  TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6883				       fold_convert (itype,
6884						     TREE_OPERAND (pos, 1)),
6885				       fold_convert (itype, delta));
6886
6887  return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6888}
6889
6890
6891/* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6892   means A >= Y && A != MAX, but in this case we know that
6893   A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6894
6895static tree
6896fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6897{
6898  tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6899
6900  if (TREE_CODE (bound) == LT_EXPR)
6901    a = TREE_OPERAND (bound, 0);
6902  else if (TREE_CODE (bound) == GT_EXPR)
6903    a = TREE_OPERAND (bound, 1);
6904  else
6905    return NULL_TREE;
6906
6907  typea = TREE_TYPE (a);
6908  if (!INTEGRAL_TYPE_P (typea)
6909      && !POINTER_TYPE_P (typea))
6910    return NULL_TREE;
6911
6912  if (TREE_CODE (ineq) == LT_EXPR)
6913    {
6914      a1 = TREE_OPERAND (ineq, 1);
6915      y = TREE_OPERAND (ineq, 0);
6916    }
6917  else if (TREE_CODE (ineq) == GT_EXPR)
6918    {
6919      a1 = TREE_OPERAND (ineq, 0);
6920      y = TREE_OPERAND (ineq, 1);
6921    }
6922  else
6923    return NULL_TREE;
6924
6925  if (TREE_TYPE (a1) != typea)
6926    return NULL_TREE;
6927
6928  diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6929  if (!integer_onep (diff))
6930    return NULL_TREE;
6931
6932  return fold_build2 (GE_EXPR, type, a, y);
6933}
6934
6935/* Fold a sum or difference of at least one multiplication.
6936   Returns the folded tree or NULL if no simplification could be made.  */
6937
6938static tree
6939fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6940{
6941  tree arg00, arg01, arg10, arg11;
6942  tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6943
6944  /* (A * C) +- (B * C) -> (A+-B) * C.
6945     (A * C) +- A -> A * (C+-1).
6946     We are most concerned about the case where C is a constant,
6947     but other combinations show up during loop reduction.  Since
6948     it is not difficult, try all four possibilities.  */
6949
6950  if (TREE_CODE (arg0) == MULT_EXPR)
6951    {
6952      arg00 = TREE_OPERAND (arg0, 0);
6953      arg01 = TREE_OPERAND (arg0, 1);
6954    }
6955  else
6956    {
6957      arg00 = arg0;
6958      arg01 = build_one_cst (type);
6959    }
6960  if (TREE_CODE (arg1) == MULT_EXPR)
6961    {
6962      arg10 = TREE_OPERAND (arg1, 0);
6963      arg11 = TREE_OPERAND (arg1, 1);
6964    }
6965  else
6966    {
6967      arg10 = arg1;
6968      arg11 = build_one_cst (type);
6969    }
6970  same = NULL_TREE;
6971
6972  if (operand_equal_p (arg01, arg11, 0))
6973    same = arg01, alt0 = arg00, alt1 = arg10;
6974  else if (operand_equal_p (arg00, arg10, 0))
6975    same = arg00, alt0 = arg01, alt1 = arg11;
6976  else if (operand_equal_p (arg00, arg11, 0))
6977    same = arg00, alt0 = arg01, alt1 = arg10;
6978  else if (operand_equal_p (arg01, arg10, 0))
6979    same = arg01, alt0 = arg00, alt1 = arg11;
6980
6981  /* No identical multiplicands; see if we can find a common
6982     power-of-two factor in non-power-of-two multiplies.  This
6983     can help in multi-dimensional array access.  */
6984  else if (host_integerp (arg01, 0)
6985	   && host_integerp (arg11, 0))
6986    {
6987      HOST_WIDE_INT int01, int11, tmp;
6988      bool swap = false;
6989      tree maybe_same;
6990      int01 = TREE_INT_CST_LOW (arg01);
6991      int11 = TREE_INT_CST_LOW (arg11);
6992
6993      /* Move min of absolute values to int11.  */
6994      if ((int01 >= 0 ? int01 : -int01)
6995	  < (int11 >= 0 ? int11 : -int11))
6996        {
6997	  tmp = int01, int01 = int11, int11 = tmp;
6998	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
6999	  maybe_same = arg01;
7000	  swap = true;
7001	}
7002      else
7003	maybe_same = arg11;
7004
7005      if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7006        {
7007	  alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7008			      build_int_cst (TREE_TYPE (arg00),
7009					     int01 / int11));
7010	  alt1 = arg10;
7011	  same = maybe_same;
7012	  if (swap)
7013	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7014	}
7015    }
7016
7017  if (same)
7018    return fold_build2 (MULT_EXPR, type,
7019			fold_build2 (code, type,
7020				     fold_convert (type, alt0),
7021				     fold_convert (type, alt1)),
7022			fold_convert (type, same));
7023
7024  return NULL_TREE;
7025}
7026
7027/* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7028   specified by EXPR into the buffer PTR of length LEN bytes.
7029   Return the number of bytes placed in the buffer, or zero
7030   upon failure.  */
7031
7032static int
7033native_encode_int (tree expr, unsigned char *ptr, int len)
7034{
7035  tree type = TREE_TYPE (expr);
7036  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7037  int byte, offset, word, words;
7038  unsigned char value;
7039
7040  if (total_bytes > len)
7041    return 0;
7042  words = total_bytes / UNITS_PER_WORD;
7043
7044  for (byte = 0; byte < total_bytes; byte++)
7045    {
7046      int bitpos = byte * BITS_PER_UNIT;
7047      if (bitpos < HOST_BITS_PER_WIDE_INT)
7048	value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7049      else
7050	value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7051				 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7052
7053      if (total_bytes > UNITS_PER_WORD)
7054	{
7055	  word = byte / UNITS_PER_WORD;
7056	  if (WORDS_BIG_ENDIAN)
7057	    word = (words - 1) - word;
7058	  offset = word * UNITS_PER_WORD;
7059	  if (BYTES_BIG_ENDIAN)
7060	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7061	  else
7062	    offset += byte % UNITS_PER_WORD;
7063	}
7064      else
7065	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7066      ptr[offset] = value;
7067    }
7068  return total_bytes;
7069}
7070
7071
7072/* Subroutine of native_encode_expr.  Encode the REAL_CST
7073   specified by EXPR into the buffer PTR of length LEN bytes.
7074   Return the number of bytes placed in the buffer, or zero
7075   upon failure.  */
7076
7077static int
7078native_encode_real (tree expr, unsigned char *ptr, int len)
7079{
7080  tree type = TREE_TYPE (expr);
7081  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7082  int byte, offset, word, words, bitpos;
7083  unsigned char value;
7084
7085  /* There are always 32 bits in each long, no matter the size of
7086     the hosts long.  We handle floating point representations with
7087     up to 192 bits.  */
7088  long tmp[6];
7089
7090  if (total_bytes > len)
7091    return 0;
7092  words = 32 / UNITS_PER_WORD;
7093
7094  real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7095
7096  for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7097       bitpos += BITS_PER_UNIT)
7098    {
7099      byte = (bitpos / BITS_PER_UNIT) & 3;
7100      value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7101
7102      if (UNITS_PER_WORD < 4)
7103	{
7104	  word = byte / UNITS_PER_WORD;
7105	  if (WORDS_BIG_ENDIAN)
7106	    word = (words - 1) - word;
7107	  offset = word * UNITS_PER_WORD;
7108	  if (BYTES_BIG_ENDIAN)
7109	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7110	  else
7111	    offset += byte % UNITS_PER_WORD;
7112	}
7113      else
7114	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7115      ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7116    }
7117  return total_bytes;
7118}
7119
7120/* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7121   specified by EXPR into the buffer PTR of length LEN bytes.
7122   Return the number of bytes placed in the buffer, or zero
7123   upon failure.  */
7124
7125static int
7126native_encode_complex (tree expr, unsigned char *ptr, int len)
7127{
7128  int rsize, isize;
7129  tree part;
7130
7131  part = TREE_REALPART (expr);
7132  rsize = native_encode_expr (part, ptr, len);
7133  if (rsize == 0)
7134    return 0;
7135  part = TREE_IMAGPART (expr);
7136  isize = native_encode_expr (part, ptr+rsize, len-rsize);
7137  if (isize != rsize)
7138    return 0;
7139  return rsize + isize;
7140}
7141
7142
7143/* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7144   specified by EXPR into the buffer PTR of length LEN bytes.
7145   Return the number of bytes placed in the buffer, or zero
7146   upon failure.  */
7147
7148static int
7149native_encode_vector (tree expr, unsigned char *ptr, int len)
7150{
7151  int i, size, offset, count;
7152  tree itype, elem, elements;
7153
7154  offset = 0;
7155  elements = TREE_VECTOR_CST_ELTS (expr);
7156  count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7157  itype = TREE_TYPE (TREE_TYPE (expr));
7158  size = GET_MODE_SIZE (TYPE_MODE (itype));
7159  for (i = 0; i < count; i++)
7160    {
7161      if (elements)
7162	{
7163	  elem = TREE_VALUE (elements);
7164	  elements = TREE_CHAIN (elements);
7165	}
7166      else
7167	elem = NULL_TREE;
7168
7169      if (elem)
7170	{
7171	  if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7172	    return 0;
7173	}
7174      else
7175	{
7176	  if (offset + size > len)
7177	    return 0;
7178	  memset (ptr+offset, 0, size);
7179	}
7180      offset += size;
7181    }
7182  return offset;
7183}
7184
7185
7186/* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7187   REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7188   buffer PTR of length LEN bytes.  Return the number of bytes
7189   placed in the buffer, or zero upon failure.  */
7190
7191static int
7192native_encode_expr (tree expr, unsigned char *ptr, int len)
7193{
7194  switch (TREE_CODE (expr))
7195    {
7196    case INTEGER_CST:
7197      return native_encode_int (expr, ptr, len);
7198
7199    case REAL_CST:
7200      return native_encode_real (expr, ptr, len);
7201
7202    case COMPLEX_CST:
7203      return native_encode_complex (expr, ptr, len);
7204
7205    case VECTOR_CST:
7206      return native_encode_vector (expr, ptr, len);
7207
7208    default:
7209      return 0;
7210    }
7211}
7212
7213
7214/* Subroutine of native_interpret_expr.  Interpret the contents of
7215   the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7216   If the buffer cannot be interpreted, return NULL_TREE.  */
7217
7218static tree
7219native_interpret_int (tree type, unsigned char *ptr, int len)
7220{
7221  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7222  int byte, offset, word, words;
7223  unsigned char value;
7224  unsigned int HOST_WIDE_INT lo = 0;
7225  HOST_WIDE_INT hi = 0;
7226
7227  if (total_bytes > len)
7228    return NULL_TREE;
7229  if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7230    return NULL_TREE;
7231  words = total_bytes / UNITS_PER_WORD;
7232
7233  for (byte = 0; byte < total_bytes; byte++)
7234    {
7235      int bitpos = byte * BITS_PER_UNIT;
7236      if (total_bytes > UNITS_PER_WORD)
7237	{
7238	  word = byte / UNITS_PER_WORD;
7239	  if (WORDS_BIG_ENDIAN)
7240	    word = (words - 1) - word;
7241	  offset = word * UNITS_PER_WORD;
7242	  if (BYTES_BIG_ENDIAN)
7243	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7244	  else
7245	    offset += byte % UNITS_PER_WORD;
7246	}
7247      else
7248	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7249      value = ptr[offset];
7250
7251      if (bitpos < HOST_BITS_PER_WIDE_INT)
7252	lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7253      else
7254	hi |= (unsigned HOST_WIDE_INT) value
7255	      << (bitpos - HOST_BITS_PER_WIDE_INT);
7256    }
7257
7258  return force_fit_type (build_int_cst_wide (type, lo, hi),
7259			 0, false, false);
7260}
7261
7262
7263/* Subroutine of native_interpret_expr.  Interpret the contents of
7264   the buffer PTR of length LEN as a REAL_CST of type TYPE.
7265   If the buffer cannot be interpreted, return NULL_TREE.  */
7266
7267static tree
7268native_interpret_real (tree type, unsigned char *ptr, int len)
7269{
7270  enum machine_mode mode = TYPE_MODE (type);
7271  int total_bytes = GET_MODE_SIZE (mode);
7272  int byte, offset, word, words, bitpos;
7273  unsigned char value;
7274  /* There are always 32 bits in each long, no matter the size of
7275     the hosts long.  We handle floating point representations with
7276     up to 192 bits.  */
7277  REAL_VALUE_TYPE r;
7278  long tmp[6];
7279
7280  total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7281  if (total_bytes > len || total_bytes > 24)
7282    return NULL_TREE;
7283  words = 32 / UNITS_PER_WORD;
7284
7285  memset (tmp, 0, sizeof (tmp));
7286  for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7287       bitpos += BITS_PER_UNIT)
7288    {
7289      byte = (bitpos / BITS_PER_UNIT) & 3;
7290      if (UNITS_PER_WORD < 4)
7291	{
7292	  word = byte / UNITS_PER_WORD;
7293	  if (WORDS_BIG_ENDIAN)
7294	    word = (words - 1) - word;
7295	  offset = word * UNITS_PER_WORD;
7296	  if (BYTES_BIG_ENDIAN)
7297	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7298	  else
7299	    offset += byte % UNITS_PER_WORD;
7300	}
7301      else
7302	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7303      value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7304
7305      tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7306    }
7307
7308  real_from_target (&r, tmp, mode);
7309  return build_real (type, r);
7310}
7311
7312
7313/* Subroutine of native_interpret_expr.  Interpret the contents of
7314   the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7315   If the buffer cannot be interpreted, return NULL_TREE.  */
7316
7317static tree
7318native_interpret_complex (tree type, unsigned char *ptr, int len)
7319{
7320  tree etype, rpart, ipart;
7321  int size;
7322
7323  etype = TREE_TYPE (type);
7324  size = GET_MODE_SIZE (TYPE_MODE (etype));
7325  if (size * 2 > len)
7326    return NULL_TREE;
7327  rpart = native_interpret_expr (etype, ptr, size);
7328  if (!rpart)
7329    return NULL_TREE;
7330  ipart = native_interpret_expr (etype, ptr+size, size);
7331  if (!ipart)
7332    return NULL_TREE;
7333  return build_complex (type, rpart, ipart);
7334}
7335
7336
7337/* Subroutine of native_interpret_expr.  Interpret the contents of
7338   the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7339   If the buffer cannot be interpreted, return NULL_TREE.  */
7340
7341static tree
7342native_interpret_vector (tree type, unsigned char *ptr, int len)
7343{
7344  tree etype, elem, elements;
7345  int i, size, count;
7346
7347  etype = TREE_TYPE (type);
7348  size = GET_MODE_SIZE (TYPE_MODE (etype));
7349  count = TYPE_VECTOR_SUBPARTS (type);
7350  if (size * count > len)
7351    return NULL_TREE;
7352
7353  elements = NULL_TREE;
7354  for (i = count - 1; i >= 0; i--)
7355    {
7356      elem = native_interpret_expr (etype, ptr+(i*size), size);
7357      if (!elem)
7358	return NULL_TREE;
7359      elements = tree_cons (NULL_TREE, elem, elements);
7360    }
7361  return build_vector (type, elements);
7362}
7363
7364
7365/* Subroutine of fold_view_convert_expr.  Interpret the contents of
7366   the buffer PTR of length LEN as a constant of type TYPE.  For
7367   INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7368   we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7369   return NULL_TREE.  */
7370
7371static tree
7372native_interpret_expr (tree type, unsigned char *ptr, int len)
7373{
7374  switch (TREE_CODE (type))
7375    {
7376    case INTEGER_TYPE:
7377    case ENUMERAL_TYPE:
7378    case BOOLEAN_TYPE:
7379      return native_interpret_int (type, ptr, len);
7380
7381    case REAL_TYPE:
7382      return native_interpret_real (type, ptr, len);
7383
7384    case COMPLEX_TYPE:
7385      return native_interpret_complex (type, ptr, len);
7386
7387    case VECTOR_TYPE:
7388      return native_interpret_vector (type, ptr, len);
7389
7390    default:
7391      return NULL_TREE;
7392    }
7393}
7394
7395
7396/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7397   TYPE at compile-time.  If we're unable to perform the conversion
7398   return NULL_TREE.  */
7399
7400static tree
7401fold_view_convert_expr (tree type, tree expr)
7402{
7403  /* We support up to 512-bit values (for V8DFmode).  */
7404  unsigned char buffer[64];
7405  int len;
7406
7407  /* Check that the host and target are sane.  */
7408  if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7409    return NULL_TREE;
7410
7411  len = native_encode_expr (expr, buffer, sizeof (buffer));
7412  if (len == 0)
7413    return NULL_TREE;
7414
7415  return native_interpret_expr (type, buffer, len);
7416}
7417
7418
7419/* Fold a unary expression of code CODE and type TYPE with operand
7420   OP0.  Return the folded expression if folding is successful.
7421   Otherwise, return NULL_TREE.  */
7422
7423tree
7424fold_unary (enum tree_code code, tree type, tree op0)
7425{
7426  tree tem;
7427  tree arg0;
7428  enum tree_code_class kind = TREE_CODE_CLASS (code);
7429
7430  gcc_assert (IS_EXPR_CODE_CLASS (kind)
7431	      && TREE_CODE_LENGTH (code) == 1);
7432
7433  arg0 = op0;
7434  if (arg0)
7435    {
7436      if (code == NOP_EXPR || code == CONVERT_EXPR
7437	  || code == FLOAT_EXPR || code == ABS_EXPR)
7438	{
7439	  /* Don't use STRIP_NOPS, because signedness of argument type
7440	     matters.  */
7441	  STRIP_SIGN_NOPS (arg0);
7442	}
7443      else
7444	{
7445	  /* Strip any conversions that don't change the mode.  This
7446	     is safe for every expression, except for a comparison
7447	     expression because its signedness is derived from its
7448	     operands.
7449
7450	     Note that this is done as an internal manipulation within
7451	     the constant folder, in order to find the simplest
7452	     representation of the arguments so that their form can be
7453	     studied.  In any cases, the appropriate type conversions
7454	     should be put back in the tree that will get out of the
7455	     constant folder.  */
7456	  STRIP_NOPS (arg0);
7457	}
7458    }
7459
7460  if (TREE_CODE_CLASS (code) == tcc_unary)
7461    {
7462      if (TREE_CODE (arg0) == COMPOUND_EXPR)
7463	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7464		       fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7465      else if (TREE_CODE (arg0) == COND_EXPR)
7466	{
7467	  tree arg01 = TREE_OPERAND (arg0, 1);
7468	  tree arg02 = TREE_OPERAND (arg0, 2);
7469	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7470	    arg01 = fold_build1 (code, type, arg01);
7471	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7472	    arg02 = fold_build1 (code, type, arg02);
7473	  tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7474			     arg01, arg02);
7475
7476	  /* If this was a conversion, and all we did was to move into
7477	     inside the COND_EXPR, bring it back out.  But leave it if
7478	     it is a conversion from integer to integer and the
7479	     result precision is no wider than a word since such a
7480	     conversion is cheap and may be optimized away by combine,
7481	     while it couldn't if it were outside the COND_EXPR.  Then return
7482	     so we don't get into an infinite recursion loop taking the
7483	     conversion out and then back in.  */
7484
7485	  if ((code == NOP_EXPR || code == CONVERT_EXPR
7486	       || code == NON_LVALUE_EXPR)
7487	      && TREE_CODE (tem) == COND_EXPR
7488	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7489	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7490	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7491	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7492	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7493		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7494	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7495		     && (INTEGRAL_TYPE_P
7496			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7497		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7498		  || flag_syntax_only))
7499	    tem = build1 (code, type,
7500			  build3 (COND_EXPR,
7501				  TREE_TYPE (TREE_OPERAND
7502					     (TREE_OPERAND (tem, 1), 0)),
7503				  TREE_OPERAND (tem, 0),
7504				  TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7505				  TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7506	  return tem;
7507	}
7508      else if (COMPARISON_CLASS_P (arg0))
7509	{
7510	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7511	    {
7512	      arg0 = copy_node (arg0);
7513	      TREE_TYPE (arg0) = type;
7514	      return arg0;
7515	    }
7516	  else if (TREE_CODE (type) != INTEGER_TYPE)
7517	    return fold_build3 (COND_EXPR, type, arg0,
7518				fold_build1 (code, type,
7519					     integer_one_node),
7520				fold_build1 (code, type,
7521					     integer_zero_node));
7522	}
7523   }
7524
7525  switch (code)
7526    {
7527    case NOP_EXPR:
7528    case FLOAT_EXPR:
7529    case CONVERT_EXPR:
7530    case FIX_TRUNC_EXPR:
7531    case FIX_CEIL_EXPR:
7532    case FIX_FLOOR_EXPR:
7533    case FIX_ROUND_EXPR:
7534      if (TREE_TYPE (op0) == type)
7535	return op0;
7536
7537      /* If we have (type) (a CMP b) and type is an integral type, return
7538         new expression involving the new type.  */
7539      if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7540	return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7541			    TREE_OPERAND (op0, 1));
7542
7543      /* Handle cases of two conversions in a row.  */
7544      if (TREE_CODE (op0) == NOP_EXPR
7545	  || TREE_CODE (op0) == CONVERT_EXPR)
7546	{
7547	  tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7548	  tree inter_type = TREE_TYPE (op0);
7549	  int inside_int = INTEGRAL_TYPE_P (inside_type);
7550	  int inside_ptr = POINTER_TYPE_P (inside_type);
7551	  int inside_float = FLOAT_TYPE_P (inside_type);
7552	  int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7553	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
7554	  int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7555	  int inter_int = INTEGRAL_TYPE_P (inter_type);
7556	  int inter_ptr = POINTER_TYPE_P (inter_type);
7557	  int inter_float = FLOAT_TYPE_P (inter_type);
7558	  int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7559	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
7560	  int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7561	  int final_int = INTEGRAL_TYPE_P (type);
7562	  int final_ptr = POINTER_TYPE_P (type);
7563	  int final_float = FLOAT_TYPE_P (type);
7564	  int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7565	  unsigned int final_prec = TYPE_PRECISION (type);
7566	  int final_unsignedp = TYPE_UNSIGNED (type);
7567
7568	  /* In addition to the cases of two conversions in a row
7569	     handled below, if we are converting something to its own
7570	     type via an object of identical or wider precision, neither
7571	     conversion is needed.  */
7572	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7573	      && (((inter_int || inter_ptr) && final_int)
7574		  || (inter_float && final_float))
7575	      && inter_prec >= final_prec)
7576	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7577
7578	  /* Likewise, if the intermediate and final types are either both
7579	     float or both integer, we don't need the middle conversion if
7580	     it is wider than the final type and doesn't change the signedness
7581	     (for integers).  Avoid this if the final type is a pointer
7582	     since then we sometimes need the inner conversion.  Likewise if
7583	     the outer has a precision not equal to the size of its mode.  */
7584	  if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7585	       || (inter_float && inside_float)
7586	       || (inter_vec && inside_vec))
7587	      && inter_prec >= inside_prec
7588	      && (inter_float || inter_vec
7589		  || inter_unsignedp == inside_unsignedp)
7590	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7591		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
7592	      && ! final_ptr
7593	      && (! final_vec || inter_prec == inside_prec))
7594	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7595
7596	  /* If we have a sign-extension of a zero-extended value, we can
7597	     replace that by a single zero-extension.  */
7598	  if (inside_int && inter_int && final_int
7599	      && inside_prec < inter_prec && inter_prec < final_prec
7600	      && inside_unsignedp && !inter_unsignedp)
7601	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7602
7603	  /* Two conversions in a row are not needed unless:
7604	     - some conversion is floating-point (overstrict for now), or
7605	     - some conversion is a vector (overstrict for now), or
7606	     - the intermediate type is narrower than both initial and
7607	       final, or
7608	     - the intermediate type and innermost type differ in signedness,
7609	       and the outermost type is wider than the intermediate, or
7610	     - the initial type is a pointer type and the precisions of the
7611	       intermediate and final types differ, or
7612	     - the final type is a pointer type and the precisions of the
7613	       initial and intermediate types differ.
7614	     - the final type is a pointer type and the initial type not
7615	     - the initial type is a pointer to an array and the final type
7616	       not.  */
7617	  /* Java pointer type conversions generate checks in some
7618	     cases, so we explicitly disallow this optimization.  */
7619	  if (! inside_float && ! inter_float && ! final_float
7620	      && ! inside_vec && ! inter_vec && ! final_vec
7621	      && (inter_prec >= inside_prec || inter_prec >= final_prec)
7622	      && ! (inside_int && inter_int
7623		    && inter_unsignedp != inside_unsignedp
7624		    && inter_prec < final_prec)
7625	      && ((inter_unsignedp && inter_prec > inside_prec)
7626		  == (final_unsignedp && final_prec > inter_prec))
7627	      && ! (inside_ptr && inter_prec != final_prec)
7628	      && ! (final_ptr && inside_prec != inter_prec)
7629	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7630		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
7631	      && final_ptr == inside_ptr
7632	      && ! (inside_ptr
7633		    && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7634		    && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)
7635	      && ! ((strcmp (lang_hooks.name, "GNU Java") == 0)
7636		    && final_ptr))
7637	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7638	}
7639
7640      /* Handle (T *)&A.B.C for A being of type T and B and C
7641	 living at offset zero.  This occurs frequently in
7642	 C++ upcasting and then accessing the base.  */
7643      if (TREE_CODE (op0) == ADDR_EXPR
7644	  && POINTER_TYPE_P (type)
7645	  && handled_component_p (TREE_OPERAND (op0, 0)))
7646        {
7647	  HOST_WIDE_INT bitsize, bitpos;
7648	  tree offset;
7649	  enum machine_mode mode;
7650	  int unsignedp, volatilep;
7651          tree base = TREE_OPERAND (op0, 0);
7652	  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7653				      &mode, &unsignedp, &volatilep, false);
7654	  /* If the reference was to a (constant) zero offset, we can use
7655	     the address of the base if it has the same base type
7656	     as the result type.  */
7657	  if (! offset && bitpos == 0
7658	      && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7659		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7660	    return fold_convert (type, build_fold_addr_expr (base));
7661        }
7662
7663      if (TREE_CODE (op0) == MODIFY_EXPR
7664	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7665	  /* Detect assigning a bitfield.  */
7666	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7667	       && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7668	{
7669	  /* Don't leave an assignment inside a conversion
7670	     unless assigning a bitfield.  */
7671	  tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7672	  /* First do the assignment, then return converted constant.  */
7673	  tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7674	  TREE_NO_WARNING (tem) = 1;
7675	  TREE_USED (tem) = 1;
7676	  return tem;
7677	}
7678
7679      /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7680	 constants (if x has signed type, the sign bit cannot be set
7681	 in c).  This folds extension into the BIT_AND_EXPR.  */
7682      if (INTEGRAL_TYPE_P (type)
7683	  && TREE_CODE (type) != BOOLEAN_TYPE
7684	  && TREE_CODE (op0) == BIT_AND_EXPR
7685	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7686	{
7687	  tree and = op0;
7688	  tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7689	  int change = 0;
7690
7691	  if (TYPE_UNSIGNED (TREE_TYPE (and))
7692	      || (TYPE_PRECISION (type)
7693		  <= TYPE_PRECISION (TREE_TYPE (and))))
7694	    change = 1;
7695	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7696		   <= HOST_BITS_PER_WIDE_INT
7697		   && host_integerp (and1, 1))
7698	    {
7699	      unsigned HOST_WIDE_INT cst;
7700
7701	      cst = tree_low_cst (and1, 1);
7702	      cst &= (HOST_WIDE_INT) -1
7703		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7704	      change = (cst == 0);
7705#ifdef LOAD_EXTEND_OP
7706	      if (change
7707		  && !flag_syntax_only
7708		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7709		      == ZERO_EXTEND))
7710		{
7711		  tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7712		  and0 = fold_convert (uns, and0);
7713		  and1 = fold_convert (uns, and1);
7714		}
7715#endif
7716	    }
7717	  if (change)
7718	    {
7719	      tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7720					TREE_INT_CST_HIGH (and1));
7721	      tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7722				    TREE_CONSTANT_OVERFLOW (and1));
7723	      return fold_build2 (BIT_AND_EXPR, type,
7724				  fold_convert (type, and0), tem);
7725	    }
7726	}
7727
7728      /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7729	 T2 being pointers to types of the same size.  */
7730      if (POINTER_TYPE_P (type)
7731	  && BINARY_CLASS_P (arg0)
7732	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7733	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7734	{
7735	  tree arg00 = TREE_OPERAND (arg0, 0);
7736	  tree t0 = type;
7737	  tree t1 = TREE_TYPE (arg00);
7738	  tree tt0 = TREE_TYPE (t0);
7739	  tree tt1 = TREE_TYPE (t1);
7740	  tree s0 = TYPE_SIZE (tt0);
7741	  tree s1 = TYPE_SIZE (tt1);
7742
7743	  if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7744	    return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7745			   TREE_OPERAND (arg0, 1));
7746	}
7747
7748      /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7749	 of the same precision, and X is a integer type not narrower than
7750	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
7751      if (INTEGRAL_TYPE_P (type)
7752	  && TREE_CODE (op0) == BIT_NOT_EXPR
7753	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7754	  && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7755	      || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7756	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7757	{
7758	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7759	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7760	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7761	    return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7762	}
7763
7764      tem = fold_convert_const (code, type, op0);
7765      return tem ? tem : NULL_TREE;
7766
7767    case VIEW_CONVERT_EXPR:
7768      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7769	return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7770      return fold_view_convert_expr (type, op0);
7771
7772    case NEGATE_EXPR:
7773      tem = fold_negate_expr (arg0);
7774      if (tem)
7775	return fold_convert (type, tem);
7776      return NULL_TREE;
7777
7778    case ABS_EXPR:
7779      if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7780	return fold_abs_const (arg0, type);
7781      else if (TREE_CODE (arg0) == NEGATE_EXPR)
7782	return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7783      /* Convert fabs((double)float) into (double)fabsf(float).  */
7784      else if (TREE_CODE (arg0) == NOP_EXPR
7785	       && TREE_CODE (type) == REAL_TYPE)
7786	{
7787	  tree targ0 = strip_float_extensions (arg0);
7788	  if (targ0 != arg0)
7789	    return fold_convert (type, fold_build1 (ABS_EXPR,
7790						    TREE_TYPE (targ0),
7791						    targ0));
7792	}
7793      /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
7794      else if (TREE_CODE (arg0) == ABS_EXPR)
7795	return arg0;
7796      else if (tree_expr_nonnegative_p (arg0))
7797	return arg0;
7798
7799      /* Strip sign ops from argument.  */
7800      if (TREE_CODE (type) == REAL_TYPE)
7801	{
7802	  tem = fold_strip_sign_ops (arg0);
7803	  if (tem)
7804	    return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7805	}
7806      return NULL_TREE;
7807
7808    case CONJ_EXPR:
7809      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7810	return fold_convert (type, arg0);
7811      if (TREE_CODE (arg0) == COMPLEX_EXPR)
7812	{
7813	  tree itype = TREE_TYPE (type);
7814	  tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7815	  tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7816	  return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7817	}
7818      if (TREE_CODE (arg0) == COMPLEX_CST)
7819	{
7820	  tree itype = TREE_TYPE (type);
7821	  tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7822	  tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7823	  return build_complex (type, rpart, negate_expr (ipart));
7824	}
7825      if (TREE_CODE (arg0) == CONJ_EXPR)
7826	return fold_convert (type, TREE_OPERAND (arg0, 0));
7827      return NULL_TREE;
7828
7829    case BIT_NOT_EXPR:
7830      if (TREE_CODE (arg0) == INTEGER_CST)
7831        return fold_not_const (arg0, type);
7832      else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7833	return TREE_OPERAND (arg0, 0);
7834      /* Convert ~ (-A) to A - 1.  */
7835      else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7836	return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7837			    build_int_cst (type, 1));
7838      /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
7839      else if (INTEGRAL_TYPE_P (type)
7840	       && ((TREE_CODE (arg0) == MINUS_EXPR
7841		    && integer_onep (TREE_OPERAND (arg0, 1)))
7842		   || (TREE_CODE (arg0) == PLUS_EXPR
7843		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7844	return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7845      /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
7846      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7847	       && (tem = fold_unary (BIT_NOT_EXPR, type,
7848			       	     fold_convert (type,
7849					     	   TREE_OPERAND (arg0, 0)))))
7850	return fold_build2 (BIT_XOR_EXPR, type, tem,
7851			    fold_convert (type, TREE_OPERAND (arg0, 1)));
7852      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7853	       && (tem = fold_unary (BIT_NOT_EXPR, type,
7854			       	     fold_convert (type,
7855					     	   TREE_OPERAND (arg0, 1)))))
7856	return fold_build2 (BIT_XOR_EXPR, type,
7857			    fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7858
7859      return NULL_TREE;
7860
7861    case TRUTH_NOT_EXPR:
7862      /* The argument to invert_truthvalue must have Boolean type.  */
7863      if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7864          arg0 = fold_convert (boolean_type_node, arg0);
7865
7866      /* Note that the operand of this must be an int
7867	 and its values must be 0 or 1.
7868	 ("true" is a fixed value perhaps depending on the language,
7869	 but we don't handle values other than 1 correctly yet.)  */
7870      tem = fold_truth_not_expr (arg0);
7871      if (!tem)
7872	return NULL_TREE;
7873      return fold_convert (type, tem);
7874
7875    case REALPART_EXPR:
7876      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7877	return fold_convert (type, arg0);
7878      if (TREE_CODE (arg0) == COMPLEX_EXPR)
7879	return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7880				 TREE_OPERAND (arg0, 1));
7881      if (TREE_CODE (arg0) == COMPLEX_CST)
7882	return fold_convert (type, TREE_REALPART (arg0));
7883      if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7884	{
7885	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7886	  tem = fold_build2 (TREE_CODE (arg0), itype,
7887			     fold_build1 (REALPART_EXPR, itype,
7888					  TREE_OPERAND (arg0, 0)),
7889			     fold_build1 (REALPART_EXPR, itype,
7890					  TREE_OPERAND (arg0, 1)));
7891	  return fold_convert (type, tem);
7892	}
7893      if (TREE_CODE (arg0) == CONJ_EXPR)
7894	{
7895	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7896	  tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7897	  return fold_convert (type, tem);
7898	}
7899      return NULL_TREE;
7900
7901    case IMAGPART_EXPR:
7902      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7903	return fold_convert (type, integer_zero_node);
7904      if (TREE_CODE (arg0) == COMPLEX_EXPR)
7905	return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7906				 TREE_OPERAND (arg0, 0));
7907      if (TREE_CODE (arg0) == COMPLEX_CST)
7908	return fold_convert (type, TREE_IMAGPART (arg0));
7909      if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7910	{
7911	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7912	  tem = fold_build2 (TREE_CODE (arg0), itype,
7913			     fold_build1 (IMAGPART_EXPR, itype,
7914					  TREE_OPERAND (arg0, 0)),
7915			     fold_build1 (IMAGPART_EXPR, itype,
7916					  TREE_OPERAND (arg0, 1)));
7917	  return fold_convert (type, tem);
7918	}
7919      if (TREE_CODE (arg0) == CONJ_EXPR)
7920	{
7921	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7922	  tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7923	  return fold_convert (type, negate_expr (tem));
7924	}
7925      return NULL_TREE;
7926
7927    default:
7928      return NULL_TREE;
7929    } /* switch (code) */
7930}
7931
7932/* Fold a binary expression of code CODE and type TYPE with operands
7933   OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7934   Return the folded expression if folding is successful.  Otherwise,
7935   return NULL_TREE.  */
7936
7937static tree
7938fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7939{
7940  enum tree_code compl_code;
7941
7942  if (code == MIN_EXPR)
7943    compl_code = MAX_EXPR;
7944  else if (code == MAX_EXPR)
7945    compl_code = MIN_EXPR;
7946  else
7947    gcc_unreachable ();
7948
7949  /* MIN (MAX (a, b), b) == b.  */
7950  if (TREE_CODE (op0) == compl_code
7951      && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7952    return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7953
7954  /* MIN (MAX (b, a), b) == b.  */
7955  if (TREE_CODE (op0) == compl_code
7956      && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7957      && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7958    return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7959
7960  /* MIN (a, MAX (a, b)) == a.  */
7961  if (TREE_CODE (op1) == compl_code
7962      && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7963      && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7964    return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7965
7966  /* MIN (a, MAX (b, a)) == a.  */
7967  if (TREE_CODE (op1) == compl_code
7968      && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7969      && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7970    return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7971
7972  return NULL_TREE;
7973}
7974
7975/* Subroutine of fold_binary.  This routine performs all of the
7976   transformations that are common to the equality/inequality
7977   operators (EQ_EXPR and NE_EXPR) and the ordering operators
7978   (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
7979   fold_binary should call fold_binary.  Fold a comparison with
7980   tree code CODE and type TYPE with operands OP0 and OP1.  Return
7981   the folded comparison or NULL_TREE.  */
7982
7983static tree
7984fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7985{
7986  tree arg0, arg1, tem;
7987
7988  arg0 = op0;
7989  arg1 = op1;
7990
7991  STRIP_SIGN_NOPS (arg0);
7992  STRIP_SIGN_NOPS (arg1);
7993
7994  tem = fold_relational_const (code, type, arg0, arg1);
7995  if (tem != NULL_TREE)
7996    return tem;
7997
7998  /* If one arg is a real or integer constant, put it last.  */
7999  if (tree_swap_operands_p (arg0, arg1, true))
8000    return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8001
8002  /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
8003  if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8004      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8005	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8006	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8007      && (TREE_CODE (arg1) == INTEGER_CST
8008	  && !TREE_OVERFLOW (arg1)))
8009    {
8010      tree const1 = TREE_OPERAND (arg0, 1);
8011      tree const2 = arg1;
8012      tree variable = TREE_OPERAND (arg0, 0);
8013      tree lhs;
8014      int lhs_add;
8015      lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8016
8017      lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8018			 TREE_TYPE (arg1), const2, const1);
8019      if (TREE_CODE (lhs) == TREE_CODE (arg1)
8020	  && (TREE_CODE (lhs) != INTEGER_CST
8021	      || !TREE_OVERFLOW (lhs)))
8022	{
8023	  fold_overflow_warning (("assuming signed overflow does not occur "
8024				  "when changing X +- C1 cmp C2 to "
8025				  "X cmp C1 +- C2"),
8026				 WARN_STRICT_OVERFLOW_COMPARISON);
8027	  return fold_build2 (code, type, variable, lhs);
8028	}
8029    }
8030
8031  /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8032     same object, then we can fold this to a comparison of the two offsets in
8033     signed size type.  This is possible because pointer arithmetic is
8034     restricted to retain within an object and overflow on pointer differences
8035     is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8036
8037     We check flag_wrapv directly because pointers types are unsigned,
8038     and therefore TYPE_OVERFLOW_WRAPS returns true for them.  That is
8039     normally what we want to avoid certain odd overflow cases, but
8040     not here.  */
8041  if (POINTER_TYPE_P (TREE_TYPE (arg0))
8042      && !flag_wrapv
8043      && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8044    {
8045      tree base0, offset0, base1, offset1;
8046
8047      if (extract_array_ref (arg0, &base0, &offset0)
8048	  && extract_array_ref (arg1, &base1, &offset1)
8049	  && operand_equal_p (base0, base1, 0))
8050        {
8051	  tree signed_size_type_node;
8052	  signed_size_type_node = signed_type_for (size_type_node);
8053
8054	  /* By converting to signed size type we cover middle-end pointer
8055	     arithmetic which operates on unsigned pointer types of size
8056	     type size and ARRAY_REF offsets which are properly sign or
8057	     zero extended from their type in case it is narrower than
8058	     size type.  */
8059	  if (offset0 == NULL_TREE)
8060	    offset0 = build_int_cst (signed_size_type_node, 0);
8061	  else
8062	    offset0 = fold_convert (signed_size_type_node, offset0);
8063	  if (offset1 == NULL_TREE)
8064	    offset1 = build_int_cst (signed_size_type_node, 0);
8065	  else
8066	    offset1 = fold_convert (signed_size_type_node, offset1);
8067
8068	  return fold_build2 (code, type, offset0, offset1);
8069	}
8070    }
8071
8072  if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8073    {
8074      tree targ0 = strip_float_extensions (arg0);
8075      tree targ1 = strip_float_extensions (arg1);
8076      tree newtype = TREE_TYPE (targ0);
8077
8078      if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8079	newtype = TREE_TYPE (targ1);
8080
8081      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
8082      if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8083	return fold_build2 (code, type, fold_convert (newtype, targ0),
8084			    fold_convert (newtype, targ1));
8085
8086      /* (-a) CMP (-b) -> b CMP a  */
8087      if (TREE_CODE (arg0) == NEGATE_EXPR
8088	  && TREE_CODE (arg1) == NEGATE_EXPR)
8089	return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8090			    TREE_OPERAND (arg0, 0));
8091
8092      if (TREE_CODE (arg1) == REAL_CST)
8093	{
8094	  REAL_VALUE_TYPE cst;
8095	  cst = TREE_REAL_CST (arg1);
8096
8097	  /* (-a) CMP CST -> a swap(CMP) (-CST)  */
8098	  if (TREE_CODE (arg0) == NEGATE_EXPR)
8099	    return fold_build2 (swap_tree_comparison (code), type,
8100				TREE_OPERAND (arg0, 0),
8101				build_real (TREE_TYPE (arg1),
8102					    REAL_VALUE_NEGATE (cst)));
8103
8104	  /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
8105	  /* a CMP (-0) -> a CMP 0  */
8106	  if (REAL_VALUE_MINUS_ZERO (cst))
8107	    return fold_build2 (code, type, arg0,
8108				build_real (TREE_TYPE (arg1), dconst0));
8109
8110	  /* x != NaN is always true, other ops are always false.  */
8111	  if (REAL_VALUE_ISNAN (cst)
8112	      && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8113	    {
8114	      tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8115	      return omit_one_operand (type, tem, arg0);
8116	    }
8117
8118	  /* Fold comparisons against infinity.  */
8119	  if (REAL_VALUE_ISINF (cst))
8120	    {
8121	      tem = fold_inf_compare (code, type, arg0, arg1);
8122	      if (tem != NULL_TREE)
8123		return tem;
8124	    }
8125	}
8126
8127      /* If this is a comparison of a real constant with a PLUS_EXPR
8128	 or a MINUS_EXPR of a real constant, we can convert it into a
8129	 comparison with a revised real constant as long as no overflow
8130	 occurs when unsafe_math_optimizations are enabled.  */
8131      if (flag_unsafe_math_optimizations
8132	  && TREE_CODE (arg1) == REAL_CST
8133	  && (TREE_CODE (arg0) == PLUS_EXPR
8134	      || TREE_CODE (arg0) == MINUS_EXPR)
8135	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8136	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8137				      ? MINUS_EXPR : PLUS_EXPR,
8138				      arg1, TREE_OPERAND (arg0, 1), 0))
8139	  && ! TREE_CONSTANT_OVERFLOW (tem))
8140	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8141
8142      /* Likewise, we can simplify a comparison of a real constant with
8143         a MINUS_EXPR whose first operand is also a real constant, i.e.
8144         (c1 - x) < c2 becomes x > c1-c2.  */
8145      if (flag_unsafe_math_optimizations
8146	  && TREE_CODE (arg1) == REAL_CST
8147	  && TREE_CODE (arg0) == MINUS_EXPR
8148	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8149	  && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8150				      arg1, 0))
8151	  && ! TREE_CONSTANT_OVERFLOW (tem))
8152	return fold_build2 (swap_tree_comparison (code), type,
8153			    TREE_OPERAND (arg0, 1), tem);
8154
8155      /* Fold comparisons against built-in math functions.  */
8156      if (TREE_CODE (arg1) == REAL_CST
8157	  && flag_unsafe_math_optimizations
8158	  && ! flag_errno_math)
8159	{
8160	  enum built_in_function fcode = builtin_mathfn_code (arg0);
8161
8162	  if (fcode != END_BUILTINS)
8163	    {
8164	      tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8165	      if (tem != NULL_TREE)
8166		return tem;
8167	    }
8168	}
8169    }
8170
8171  /* Convert foo++ == CONST into ++foo == CONST + INCR.  */
8172  if (TREE_CONSTANT (arg1)
8173      && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8174	  || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8175      /* This optimization is invalid for ordered comparisons
8176         if CONST+INCR overflows or if foo+incr might overflow.
8177	 This optimization is invalid for floating point due to rounding.
8178	 For pointer types we assume overflow doesn't happen.  */
8179      && (POINTER_TYPE_P (TREE_TYPE (arg0))
8180	  || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8181	      && (code == EQ_EXPR || code == NE_EXPR))))
8182    {
8183      tree varop, newconst;
8184
8185      if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8186	{
8187	  newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8188				  arg1, TREE_OPERAND (arg0, 1));
8189	  varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8190			  TREE_OPERAND (arg0, 0),
8191			  TREE_OPERAND (arg0, 1));
8192	}
8193      else
8194	{
8195	  newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8196				  arg1, TREE_OPERAND (arg0, 1));
8197	  varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8198			  TREE_OPERAND (arg0, 0),
8199			  TREE_OPERAND (arg0, 1));
8200	}
8201
8202
8203      /* If VAROP is a reference to a bitfield, we must mask
8204	 the constant by the width of the field.  */
8205      if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8206	  && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8207	  && host_integerp (DECL_SIZE (TREE_OPERAND
8208					 (TREE_OPERAND (varop, 0), 1)), 1))
8209	{
8210	  tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8211	  HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8212	  tree folded_compare, shift;
8213
8214	  /* First check whether the comparison would come out
8215	     always the same.  If we don't do that we would
8216	     change the meaning with the masking.  */
8217	  folded_compare = fold_build2 (code, type,
8218					TREE_OPERAND (varop, 0), arg1);
8219	  if (TREE_CODE (folded_compare) == INTEGER_CST)
8220	    return omit_one_operand (type, folded_compare, varop);
8221
8222	  shift = build_int_cst (NULL_TREE,
8223				 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8224	  shift = fold_convert (TREE_TYPE (varop), shift);
8225	  newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8226				  newconst, shift);
8227	  newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8228				  newconst, shift);
8229	}
8230
8231      return fold_build2 (code, type, varop, newconst);
8232    }
8233
8234  if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8235      && (TREE_CODE (arg0) == NOP_EXPR
8236	  || TREE_CODE (arg0) == CONVERT_EXPR))
8237    {
8238      /* If we are widening one operand of an integer comparison,
8239	 see if the other operand is similarly being widened.  Perhaps we
8240	 can do the comparison in the narrower type.  */
8241      tem = fold_widened_comparison (code, type, arg0, arg1);
8242      if (tem)
8243	return tem;
8244
8245      /* Or if we are changing signedness.  */
8246      tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8247      if (tem)
8248	return tem;
8249    }
8250
8251  /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8252     constant, we can simplify it.  */
8253  if (TREE_CODE (arg1) == INTEGER_CST
8254      && (TREE_CODE (arg0) == MIN_EXPR
8255	  || TREE_CODE (arg0) == MAX_EXPR)
8256      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8257    {
8258      tem = optimize_minmax_comparison (code, type, op0, op1);
8259      if (tem)
8260	return tem;
8261    }
8262
8263  /* Simplify comparison of something with itself.  (For IEEE
8264     floating-point, we can only do some of these simplifications.)  */
8265  if (operand_equal_p (arg0, arg1, 0))
8266    {
8267      switch (code)
8268	{
8269	case EQ_EXPR:
8270	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8271	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8272	    return constant_boolean_node (1, type);
8273	  break;
8274
8275	case GE_EXPR:
8276	case LE_EXPR:
8277	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8278	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8279	    return constant_boolean_node (1, type);
8280	  return fold_build2 (EQ_EXPR, type, arg0, arg1);
8281
8282	case NE_EXPR:
8283	  /* For NE, we can only do this simplification if integer
8284	     or we don't honor IEEE floating point NaNs.  */
8285	  if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8286	      && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8287	    break;
8288	  /* ... fall through ...  */
8289	case GT_EXPR:
8290	case LT_EXPR:
8291	  return constant_boolean_node (0, type);
8292	default:
8293	  gcc_unreachable ();
8294	}
8295    }
8296
8297  /* If we are comparing an expression that just has comparisons
8298     of two integer values, arithmetic expressions of those comparisons,
8299     and constants, we can simplify it.  There are only three cases
8300     to check: the two values can either be equal, the first can be
8301     greater, or the second can be greater.  Fold the expression for
8302     those three values.  Since each value must be 0 or 1, we have
8303     eight possibilities, each of which corresponds to the constant 0
8304     or 1 or one of the six possible comparisons.
8305
8306     This handles common cases like (a > b) == 0 but also handles
8307     expressions like  ((x > y) - (y > x)) > 0, which supposedly
8308     occur in macroized code.  */
8309
8310  if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8311    {
8312      tree cval1 = 0, cval2 = 0;
8313      int save_p = 0;
8314
8315      if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8316	  /* Don't handle degenerate cases here; they should already
8317	     have been handled anyway.  */
8318	  && cval1 != 0 && cval2 != 0
8319	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8320	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8321	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8322	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8323	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8324	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8325				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8326	{
8327	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8328	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8329
8330	  /* We can't just pass T to eval_subst in case cval1 or cval2
8331	     was the same as ARG1.  */
8332
8333	  tree high_result
8334		= fold_build2 (code, type,
8335			       eval_subst (arg0, cval1, maxval,
8336					   cval2, minval),
8337			       arg1);
8338	  tree equal_result
8339		= fold_build2 (code, type,
8340			       eval_subst (arg0, cval1, maxval,
8341					   cval2, maxval),
8342			       arg1);
8343	  tree low_result
8344		= fold_build2 (code, type,
8345			       eval_subst (arg0, cval1, minval,
8346					   cval2, maxval),
8347			       arg1);
8348
8349	  /* All three of these results should be 0 or 1.  Confirm they are.
8350	     Then use those values to select the proper code to use.  */
8351
8352	  if (TREE_CODE (high_result) == INTEGER_CST
8353	      && TREE_CODE (equal_result) == INTEGER_CST
8354	      && TREE_CODE (low_result) == INTEGER_CST)
8355	    {
8356	      /* Make a 3-bit mask with the high-order bit being the
8357		 value for `>', the next for '=', and the low for '<'.  */
8358	      switch ((integer_onep (high_result) * 4)
8359		      + (integer_onep (equal_result) * 2)
8360		      + integer_onep (low_result))
8361		{
8362		case 0:
8363		  /* Always false.  */
8364		  return omit_one_operand (type, integer_zero_node, arg0);
8365		case 1:
8366		  code = LT_EXPR;
8367		  break;
8368		case 2:
8369		  code = EQ_EXPR;
8370		  break;
8371		case 3:
8372		  code = LE_EXPR;
8373		  break;
8374		case 4:
8375		  code = GT_EXPR;
8376		  break;
8377		case 5:
8378		  code = NE_EXPR;
8379		  break;
8380		case 6:
8381		  code = GE_EXPR;
8382		  break;
8383		case 7:
8384		  /* Always true.  */
8385		  return omit_one_operand (type, integer_one_node, arg0);
8386		}
8387
8388	      if (save_p)
8389		return save_expr (build2 (code, type, cval1, cval2));
8390	      return fold_build2 (code, type, cval1, cval2);
8391	    }
8392	}
8393    }
8394
8395  /* Fold a comparison of the address of COMPONENT_REFs with the same
8396     type and component to a comparison of the address of the base
8397     object.  In short, &x->a OP &y->a to x OP y and
8398     &x->a OP &y.a to x OP &y  */
8399  if (TREE_CODE (arg0) == ADDR_EXPR
8400      && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8401      && TREE_CODE (arg1) == ADDR_EXPR
8402      && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8403    {
8404      tree cref0 = TREE_OPERAND (arg0, 0);
8405      tree cref1 = TREE_OPERAND (arg1, 0);
8406      if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8407	{
8408	  tree op0 = TREE_OPERAND (cref0, 0);
8409	  tree op1 = TREE_OPERAND (cref1, 0);
8410	  return fold_build2 (code, type,
8411			      build_fold_addr_expr (op0),
8412			      build_fold_addr_expr (op1));
8413	}
8414    }
8415
8416  /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8417     into a single range test.  */
8418  if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8419       || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8420      && TREE_CODE (arg1) == INTEGER_CST
8421      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8422      && !integer_zerop (TREE_OPERAND (arg0, 1))
8423      && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8424      && !TREE_OVERFLOW (arg1))
8425    {
8426      tem = fold_div_compare (code, type, arg0, arg1);
8427      if (tem != NULL_TREE)
8428	return tem;
8429    }
8430
8431  return NULL_TREE;
8432}
8433
8434
8435/* Subroutine of fold_binary.  Optimize complex multiplications of the
8436   form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
8437   argument EXPR represents the expression "z" of type TYPE.  */
8438
8439static tree
8440fold_mult_zconjz (tree type, tree expr)
8441{
8442  tree itype = TREE_TYPE (type);
8443  tree rpart, ipart, tem;
8444
8445  if (TREE_CODE (expr) == COMPLEX_EXPR)
8446    {
8447      rpart = TREE_OPERAND (expr, 0);
8448      ipart = TREE_OPERAND (expr, 1);
8449    }
8450  else if (TREE_CODE (expr) == COMPLEX_CST)
8451    {
8452      rpart = TREE_REALPART (expr);
8453      ipart = TREE_IMAGPART (expr);
8454    }
8455  else
8456    {
8457      expr = save_expr (expr);
8458      rpart = fold_build1 (REALPART_EXPR, itype, expr);
8459      ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8460    }
8461
8462  rpart = save_expr (rpart);
8463  ipart = save_expr (ipart);
8464  tem = fold_build2 (PLUS_EXPR, itype,
8465		     fold_build2 (MULT_EXPR, itype, rpart, rpart),
8466		     fold_build2 (MULT_EXPR, itype, ipart, ipart));
8467  return fold_build2 (COMPLEX_EXPR, type, tem,
8468		      fold_convert (itype, integer_zero_node));
8469}
8470
8471
8472/* Fold a binary expression of code CODE and type TYPE with operands
8473   OP0 and OP1.  Return the folded expression if folding is
8474   successful.  Otherwise, return NULL_TREE.  */
8475
8476tree
8477fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8478{
8479  enum tree_code_class kind = TREE_CODE_CLASS (code);
8480  tree arg0, arg1, tem;
8481  tree t1 = NULL_TREE;
8482  bool strict_overflow_p;
8483
8484  gcc_assert (IS_EXPR_CODE_CLASS (kind)
8485	      && TREE_CODE_LENGTH (code) == 2
8486	      && op0 != NULL_TREE
8487	      && op1 != NULL_TREE);
8488
8489  arg0 = op0;
8490  arg1 = op1;
8491
8492  /* Strip any conversions that don't change the mode.  This is
8493     safe for every expression, except for a comparison expression
8494     because its signedness is derived from its operands.  So, in
8495     the latter case, only strip conversions that don't change the
8496     signedness.
8497
8498     Note that this is done as an internal manipulation within the
8499     constant folder, in order to find the simplest representation
8500     of the arguments so that their form can be studied.  In any
8501     cases, the appropriate type conversions should be put back in
8502     the tree that will get out of the constant folder.  */
8503
8504  if (kind == tcc_comparison)
8505    {
8506      STRIP_SIGN_NOPS (arg0);
8507      STRIP_SIGN_NOPS (arg1);
8508    }
8509  else
8510    {
8511      STRIP_NOPS (arg0);
8512      STRIP_NOPS (arg1);
8513    }
8514
8515  /* Note that TREE_CONSTANT isn't enough: static var addresses are
8516     constant but we can't do arithmetic on them.  */
8517  if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8518      || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8519      || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8520      || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8521    {
8522      if (kind == tcc_binary)
8523	tem = const_binop (code, arg0, arg1, 0);
8524      else if (kind == tcc_comparison)
8525	tem = fold_relational_const (code, type, arg0, arg1);
8526      else
8527	tem = NULL_TREE;
8528
8529      if (tem != NULL_TREE)
8530	{
8531	  if (TREE_TYPE (tem) != type)
8532	    tem = fold_convert (type, tem);
8533	  return tem;
8534	}
8535    }
8536
8537  /* If this is a commutative operation, and ARG0 is a constant, move it
8538     to ARG1 to reduce the number of tests below.  */
8539  if (commutative_tree_code (code)
8540      && tree_swap_operands_p (arg0, arg1, true))
8541    return fold_build2 (code, type, op1, op0);
8542
8543  /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8544
8545     First check for cases where an arithmetic operation is applied to a
8546     compound, conditional, or comparison operation.  Push the arithmetic
8547     operation inside the compound or conditional to see if any folding
8548     can then be done.  Convert comparison to conditional for this purpose.
8549     The also optimizes non-constant cases that used to be done in
8550     expand_expr.
8551
8552     Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8553     one of the operands is a comparison and the other is a comparison, a
8554     BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
8555     code below would make the expression more complex.  Change it to a
8556     TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
8557     TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
8558
8559  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8560       || code == EQ_EXPR || code == NE_EXPR)
8561      && ((truth_value_p (TREE_CODE (arg0))
8562	   && (truth_value_p (TREE_CODE (arg1))
8563	       || (TREE_CODE (arg1) == BIT_AND_EXPR
8564		   && integer_onep (TREE_OPERAND (arg1, 1)))))
8565	  || (truth_value_p (TREE_CODE (arg1))
8566	      && (truth_value_p (TREE_CODE (arg0))
8567		  || (TREE_CODE (arg0) == BIT_AND_EXPR
8568		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
8569    {
8570      tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8571			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8572			 : TRUTH_XOR_EXPR,
8573			 boolean_type_node,
8574			 fold_convert (boolean_type_node, arg0),
8575			 fold_convert (boolean_type_node, arg1));
8576
8577      if (code == EQ_EXPR)
8578	tem = invert_truthvalue (tem);
8579
8580      return fold_convert (type, tem);
8581    }
8582
8583  if (TREE_CODE_CLASS (code) == tcc_binary
8584      || TREE_CODE_CLASS (code) == tcc_comparison)
8585    {
8586      if (TREE_CODE (arg0) == COMPOUND_EXPR)
8587	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8588		       fold_build2 (code, type,
8589				    TREE_OPERAND (arg0, 1), op1));
8590      if (TREE_CODE (arg1) == COMPOUND_EXPR
8591	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8592	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8593		       fold_build2 (code, type,
8594				    op0, TREE_OPERAND (arg1, 1)));
8595
8596      if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8597	{
8598	  tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8599						     arg0, arg1,
8600						     /*cond_first_p=*/1);
8601	  if (tem != NULL_TREE)
8602	    return tem;
8603	}
8604
8605      if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8606	{
8607	  tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8608						     arg1, arg0,
8609					             /*cond_first_p=*/0);
8610	  if (tem != NULL_TREE)
8611	    return tem;
8612	}
8613    }
8614
8615  switch (code)
8616    {
8617    case PLUS_EXPR:
8618      /* A + (-B) -> A - B */
8619      if (TREE_CODE (arg1) == NEGATE_EXPR)
8620	return fold_build2 (MINUS_EXPR, type,
8621			    fold_convert (type, arg0),
8622			    fold_convert (type, TREE_OPERAND (arg1, 0)));
8623      /* (-A) + B -> B - A */
8624      if (TREE_CODE (arg0) == NEGATE_EXPR
8625	  && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8626	return fold_build2 (MINUS_EXPR, type,
8627			    fold_convert (type, arg1),
8628			    fold_convert (type, TREE_OPERAND (arg0, 0)));
8629      /* Convert ~A + 1 to -A.  */
8630      if (INTEGRAL_TYPE_P (type)
8631	  && TREE_CODE (arg0) == BIT_NOT_EXPR
8632	  && integer_onep (arg1))
8633	return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8634
8635      /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8636	 same or one.  */
8637      if ((TREE_CODE (arg0) == MULT_EXPR
8638	   || TREE_CODE (arg1) == MULT_EXPR)
8639	  && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8640        {
8641	  tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8642	  if (tem)
8643	    return tem;
8644	}
8645
8646      if (! FLOAT_TYPE_P (type))
8647	{
8648	  if (integer_zerop (arg1))
8649	    return non_lvalue (fold_convert (type, arg0));
8650
8651	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8652	     with a constant, and the two constants have no bits in common,
8653	     we should treat this as a BIT_IOR_EXPR since this may produce more
8654	     simplifications.  */
8655	  if (TREE_CODE (arg0) == BIT_AND_EXPR
8656	      && TREE_CODE (arg1) == BIT_AND_EXPR
8657	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8658	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8659	      && integer_zerop (const_binop (BIT_AND_EXPR,
8660					     TREE_OPERAND (arg0, 1),
8661					     TREE_OPERAND (arg1, 1), 0)))
8662	    {
8663	      code = BIT_IOR_EXPR;
8664	      goto bit_ior;
8665	    }
8666
8667	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8668	     (plus (plus (mult) (mult)) (foo)) so that we can
8669	     take advantage of the factoring cases below.  */
8670	  if (((TREE_CODE (arg0) == PLUS_EXPR
8671		|| TREE_CODE (arg0) == MINUS_EXPR)
8672	       && TREE_CODE (arg1) == MULT_EXPR)
8673	      || ((TREE_CODE (arg1) == PLUS_EXPR
8674		   || TREE_CODE (arg1) == MINUS_EXPR)
8675		  && TREE_CODE (arg0) == MULT_EXPR))
8676	    {
8677	      tree parg0, parg1, parg, marg;
8678	      enum tree_code pcode;
8679
8680	      if (TREE_CODE (arg1) == MULT_EXPR)
8681		parg = arg0, marg = arg1;
8682	      else
8683		parg = arg1, marg = arg0;
8684	      pcode = TREE_CODE (parg);
8685	      parg0 = TREE_OPERAND (parg, 0);
8686	      parg1 = TREE_OPERAND (parg, 1);
8687	      STRIP_NOPS (parg0);
8688	      STRIP_NOPS (parg1);
8689
8690	      if (TREE_CODE (parg0) == MULT_EXPR
8691		  && TREE_CODE (parg1) != MULT_EXPR)
8692		return fold_build2 (pcode, type,
8693				    fold_build2 (PLUS_EXPR, type,
8694						 fold_convert (type, parg0),
8695						 fold_convert (type, marg)),
8696				    fold_convert (type, parg1));
8697	      if (TREE_CODE (parg0) != MULT_EXPR
8698		  && TREE_CODE (parg1) == MULT_EXPR)
8699		return fold_build2 (PLUS_EXPR, type,
8700				    fold_convert (type, parg0),
8701				    fold_build2 (pcode, type,
8702						 fold_convert (type, marg),
8703						 fold_convert (type,
8704							       parg1)));
8705	    }
8706
8707	  /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8708	     of the array.  Loop optimizer sometimes produce this type of
8709	     expressions.  */
8710	  if (TREE_CODE (arg0) == ADDR_EXPR)
8711	    {
8712	      tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8713	      if (tem)
8714		return fold_convert (type, tem);
8715	    }
8716	  else if (TREE_CODE (arg1) == ADDR_EXPR)
8717	    {
8718	      tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8719	      if (tem)
8720		return fold_convert (type, tem);
8721	    }
8722	}
8723      else
8724	{
8725	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
8726	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8727	    return non_lvalue (fold_convert (type, arg0));
8728
8729	  /* Likewise if the operands are reversed.  */
8730	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8731	    return non_lvalue (fold_convert (type, arg1));
8732
8733	  /* Convert X + -C into X - C.  */
8734	  if (TREE_CODE (arg1) == REAL_CST
8735	      && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8736	    {
8737	      tem = fold_negate_const (arg1, type);
8738	      if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8739		return fold_build2 (MINUS_EXPR, type,
8740				    fold_convert (type, arg0),
8741				    fold_convert (type, tem));
8742	    }
8743
8744          if (flag_unsafe_math_optimizations
8745	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8746	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8747	      && (tem = distribute_real_division (code, type, arg0, arg1)))
8748	    return tem;
8749
8750	  /* Convert x+x into x*2.0.  */
8751	  if (operand_equal_p (arg0, arg1, 0)
8752	      && SCALAR_FLOAT_TYPE_P (type))
8753	    return fold_build2 (MULT_EXPR, type, arg0,
8754				build_real (type, dconst2));
8755
8756          /* Convert a + (b*c + d*e) into (a + b*c) + d*e.  */
8757          if (flag_unsafe_math_optimizations
8758              && TREE_CODE (arg1) == PLUS_EXPR
8759              && TREE_CODE (arg0) != MULT_EXPR)
8760            {
8761              tree tree10 = TREE_OPERAND (arg1, 0);
8762              tree tree11 = TREE_OPERAND (arg1, 1);
8763              if (TREE_CODE (tree11) == MULT_EXPR
8764		  && TREE_CODE (tree10) == MULT_EXPR)
8765                {
8766                  tree tree0;
8767                  tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8768                  return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8769                }
8770            }
8771          /* Convert (b*c + d*e) + a into b*c + (d*e +a).  */
8772          if (flag_unsafe_math_optimizations
8773              && TREE_CODE (arg0) == PLUS_EXPR
8774              && TREE_CODE (arg1) != MULT_EXPR)
8775            {
8776              tree tree00 = TREE_OPERAND (arg0, 0);
8777              tree tree01 = TREE_OPERAND (arg0, 1);
8778              if (TREE_CODE (tree01) == MULT_EXPR
8779		  && TREE_CODE (tree00) == MULT_EXPR)
8780                {
8781                  tree tree0;
8782                  tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8783                  return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8784                }
8785            }
8786	}
8787
8788     bit_rotate:
8789      /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8790	 is a rotate of A by C1 bits.  */
8791      /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8792	 is a rotate of A by B bits.  */
8793      {
8794	enum tree_code code0, code1;
8795	code0 = TREE_CODE (arg0);
8796	code1 = TREE_CODE (arg1);
8797	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8798	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8799	    && operand_equal_p (TREE_OPERAND (arg0, 0),
8800			        TREE_OPERAND (arg1, 0), 0)
8801	    && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8802	  {
8803	    tree tree01, tree11;
8804	    enum tree_code code01, code11;
8805
8806	    tree01 = TREE_OPERAND (arg0, 1);
8807	    tree11 = TREE_OPERAND (arg1, 1);
8808	    STRIP_NOPS (tree01);
8809	    STRIP_NOPS (tree11);
8810	    code01 = TREE_CODE (tree01);
8811	    code11 = TREE_CODE (tree11);
8812	    if (code01 == INTEGER_CST
8813		&& code11 == INTEGER_CST
8814		&& TREE_INT_CST_HIGH (tree01) == 0
8815		&& TREE_INT_CST_HIGH (tree11) == 0
8816		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8817		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8818	      return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8819			     code0 == LSHIFT_EXPR ? tree01 : tree11);
8820	    else if (code11 == MINUS_EXPR)
8821	      {
8822		tree tree110, tree111;
8823		tree110 = TREE_OPERAND (tree11, 0);
8824		tree111 = TREE_OPERAND (tree11, 1);
8825		STRIP_NOPS (tree110);
8826		STRIP_NOPS (tree111);
8827		if (TREE_CODE (tree110) == INTEGER_CST
8828		    && 0 == compare_tree_int (tree110,
8829					      TYPE_PRECISION
8830					      (TREE_TYPE (TREE_OPERAND
8831							  (arg0, 0))))
8832		    && operand_equal_p (tree01, tree111, 0))
8833		  return build2 ((code0 == LSHIFT_EXPR
8834				  ? LROTATE_EXPR
8835				  : RROTATE_EXPR),
8836				 type, TREE_OPERAND (arg0, 0), tree01);
8837	      }
8838	    else if (code01 == MINUS_EXPR)
8839	      {
8840		tree tree010, tree011;
8841		tree010 = TREE_OPERAND (tree01, 0);
8842		tree011 = TREE_OPERAND (tree01, 1);
8843		STRIP_NOPS (tree010);
8844		STRIP_NOPS (tree011);
8845		if (TREE_CODE (tree010) == INTEGER_CST
8846		    && 0 == compare_tree_int (tree010,
8847					      TYPE_PRECISION
8848					      (TREE_TYPE (TREE_OPERAND
8849							  (arg0, 0))))
8850		    && operand_equal_p (tree11, tree011, 0))
8851		  return build2 ((code0 != LSHIFT_EXPR
8852				  ? LROTATE_EXPR
8853				  : RROTATE_EXPR),
8854				 type, TREE_OPERAND (arg0, 0), tree11);
8855	      }
8856	  }
8857      }
8858
8859    associate:
8860      /* In most languages, can't associate operations on floats through
8861	 parentheses.  Rather than remember where the parentheses were, we
8862	 don't associate floats at all, unless the user has specified
8863	 -funsafe-math-optimizations.  */
8864
8865      if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8866	{
8867	  tree var0, con0, lit0, minus_lit0;
8868	  tree var1, con1, lit1, minus_lit1;
8869	  bool ok = true;
8870
8871	  /* Split both trees into variables, constants, and literals.  Then
8872	     associate each group together, the constants with literals,
8873	     then the result with variables.  This increases the chances of
8874	     literals being recombined later and of generating relocatable
8875	     expressions for the sum of a constant and literal.  */
8876	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8877	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8878			     code == MINUS_EXPR);
8879
8880	  /* With undefined overflow we can only associate constants
8881	     with one variable.  */
8882	  if ((POINTER_TYPE_P (type)
8883	       || (INTEGRAL_TYPE_P (type)
8884		   && !(TYPE_UNSIGNED (type) || flag_wrapv)))
8885	      && var0 && var1)
8886	    {
8887	      tree tmp0 = var0;
8888	      tree tmp1 = var1;
8889
8890	      if (TREE_CODE (tmp0) == NEGATE_EXPR)
8891	        tmp0 = TREE_OPERAND (tmp0, 0);
8892	      if (TREE_CODE (tmp1) == NEGATE_EXPR)
8893	        tmp1 = TREE_OPERAND (tmp1, 0);
8894	      /* The only case we can still associate with two variables
8895		 is if they are the same, modulo negation.  */
8896	      if (!operand_equal_p (tmp0, tmp1, 0))
8897	        ok = false;
8898	    }
8899
8900	  /* Only do something if we found more than two objects.  Otherwise,
8901	     nothing has changed and we risk infinite recursion.  */
8902	  if (ok
8903	      && (2 < ((var0 != 0) + (var1 != 0)
8904		       + (con0 != 0) + (con1 != 0)
8905		       + (lit0 != 0) + (lit1 != 0)
8906		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
8907	    {
8908	      /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
8909	      if (code == MINUS_EXPR)
8910		code = PLUS_EXPR;
8911
8912	      var0 = associate_trees (var0, var1, code, type);
8913	      con0 = associate_trees (con0, con1, code, type);
8914	      lit0 = associate_trees (lit0, lit1, code, type);
8915	      minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8916
8917	      /* Preserve the MINUS_EXPR if the negative part of the literal is
8918		 greater than the positive part.  Otherwise, the multiplicative
8919		 folding code (i.e extract_muldiv) may be fooled in case
8920		 unsigned constants are subtracted, like in the following
8921		 example: ((X*2 + 4) - 8U)/2.  */
8922	      if (minus_lit0 && lit0)
8923		{
8924		  if (TREE_CODE (lit0) == INTEGER_CST
8925		      && TREE_CODE (minus_lit0) == INTEGER_CST
8926		      && tree_int_cst_lt (lit0, minus_lit0))
8927		    {
8928		      minus_lit0 = associate_trees (minus_lit0, lit0,
8929						    MINUS_EXPR, type);
8930		      lit0 = 0;
8931		    }
8932		  else
8933		    {
8934		      lit0 = associate_trees (lit0, minus_lit0,
8935					      MINUS_EXPR, type);
8936		      minus_lit0 = 0;
8937		    }
8938		}
8939	      if (minus_lit0)
8940		{
8941		  if (con0 == 0)
8942		    return fold_convert (type,
8943					 associate_trees (var0, minus_lit0,
8944							  MINUS_EXPR, type));
8945		  else
8946		    {
8947		      con0 = associate_trees (con0, minus_lit0,
8948					      MINUS_EXPR, type);
8949		      return fold_convert (type,
8950					   associate_trees (var0, con0,
8951							    PLUS_EXPR, type));
8952		    }
8953		}
8954
8955	      con0 = associate_trees (con0, lit0, code, type);
8956	      return fold_convert (type, associate_trees (var0, con0,
8957							  code, type));
8958	    }
8959	}
8960
8961      return NULL_TREE;
8962
8963    case MINUS_EXPR:
8964      /* A - (-B) -> A + B */
8965      if (TREE_CODE (arg1) == NEGATE_EXPR)
8966	return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8967      /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
8968      if (TREE_CODE (arg0) == NEGATE_EXPR
8969	  && (FLOAT_TYPE_P (type)
8970	      || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8971	  && negate_expr_p (arg1)
8972	  && reorder_operands_p (arg0, arg1))
8973	return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8974			    TREE_OPERAND (arg0, 0));
8975      /* Convert -A - 1 to ~A.  */
8976      if (INTEGRAL_TYPE_P (type)
8977	  && TREE_CODE (arg0) == NEGATE_EXPR
8978	  && integer_onep (arg1))
8979	return fold_build1 (BIT_NOT_EXPR, type,
8980			    fold_convert (type, TREE_OPERAND (arg0, 0)));
8981
8982      /* Convert -1 - A to ~A.  */
8983      if (INTEGRAL_TYPE_P (type)
8984	  && integer_all_onesp (arg0))
8985	return fold_build1 (BIT_NOT_EXPR, type, arg1);
8986
8987      if (! FLOAT_TYPE_P (type))
8988	{
8989	  if (integer_zerop (arg0))
8990	    return negate_expr (fold_convert (type, arg1));
8991	  if (integer_zerop (arg1))
8992	    return non_lvalue (fold_convert (type, arg0));
8993
8994	  /* Fold A - (A & B) into ~B & A.  */
8995	  if (!TREE_SIDE_EFFECTS (arg0)
8996	      && TREE_CODE (arg1) == BIT_AND_EXPR)
8997	    {
8998	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8999		return fold_build2 (BIT_AND_EXPR, type,
9000				    fold_build1 (BIT_NOT_EXPR, type,
9001						 TREE_OPERAND (arg1, 0)),
9002				    arg0);
9003	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9004		return fold_build2 (BIT_AND_EXPR, type,
9005				    fold_build1 (BIT_NOT_EXPR, type,
9006						 TREE_OPERAND (arg1, 1)),
9007				    arg0);
9008	    }
9009
9010	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9011	     any power of 2 minus 1.  */
9012	  if (TREE_CODE (arg0) == BIT_AND_EXPR
9013	      && TREE_CODE (arg1) == BIT_AND_EXPR
9014	      && operand_equal_p (TREE_OPERAND (arg0, 0),
9015				  TREE_OPERAND (arg1, 0), 0))
9016	    {
9017	      tree mask0 = TREE_OPERAND (arg0, 1);
9018	      tree mask1 = TREE_OPERAND (arg1, 1);
9019	      tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9020
9021	      if (operand_equal_p (tem, mask1, 0))
9022		{
9023		  tem = fold_build2 (BIT_XOR_EXPR, type,
9024				     TREE_OPERAND (arg0, 0), mask1);
9025		  return fold_build2 (MINUS_EXPR, type, tem, mask1);
9026		}
9027	    }
9028	}
9029
9030      /* See if ARG1 is zero and X - ARG1 reduces to X.  */
9031      else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9032	return non_lvalue (fold_convert (type, arg0));
9033
9034      /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
9035	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9036	 (-ARG1 + ARG0) reduces to -ARG1.  */
9037      else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9038	return negate_expr (fold_convert (type, arg1));
9039
9040      /* Fold &x - &x.  This can happen from &x.foo - &x.
9041	 This is unsafe for certain floats even in non-IEEE formats.
9042	 In IEEE, it is unsafe because it does wrong for NaNs.
9043	 Also note that operand_equal_p is always false if an operand
9044	 is volatile.  */
9045
9046      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9047	  && operand_equal_p (arg0, arg1, 0))
9048	return fold_convert (type, integer_zero_node);
9049
9050      /* A - B -> A + (-B) if B is easily negatable.  */
9051      if (negate_expr_p (arg1)
9052	  && ((FLOAT_TYPE_P (type)
9053               /* Avoid this transformation if B is a positive REAL_CST.  */
9054	       && (TREE_CODE (arg1) != REAL_CST
9055		   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9056	      || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
9057	return fold_build2 (PLUS_EXPR, type,
9058			    fold_convert (type, arg0),
9059			    fold_convert (type, negate_expr (arg1)));
9060
9061      /* Try folding difference of addresses.  */
9062      {
9063	HOST_WIDE_INT diff;
9064
9065	if ((TREE_CODE (arg0) == ADDR_EXPR
9066	     || TREE_CODE (arg1) == ADDR_EXPR)
9067	    && ptr_difference_const (arg0, arg1, &diff))
9068	  return build_int_cst_type (type, diff);
9069      }
9070
9071      /* Fold &a[i] - &a[j] to i-j.  */
9072      if (TREE_CODE (arg0) == ADDR_EXPR
9073	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9074	  && TREE_CODE (arg1) == ADDR_EXPR
9075	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9076        {
9077	  tree aref0 = TREE_OPERAND (arg0, 0);
9078	  tree aref1 = TREE_OPERAND (arg1, 0);
9079	  if (operand_equal_p (TREE_OPERAND (aref0, 0),
9080			       TREE_OPERAND (aref1, 0), 0))
9081	    {
9082	      tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9083	      tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9084	      tree esz = array_ref_element_size (aref0);
9085	      tree diff = build2 (MINUS_EXPR, type, op0, op1);
9086	      return fold_build2 (MULT_EXPR, type, diff,
9087			          fold_convert (type, esz));
9088
9089	    }
9090	}
9091
9092      /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9093	 of the array.  Loop optimizer sometimes produce this type of
9094	 expressions.  */
9095      if (TREE_CODE (arg0) == ADDR_EXPR)
9096	{
9097	  tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9098	  if (tem)
9099	    return fold_convert (type, tem);
9100	}
9101
9102      if (flag_unsafe_math_optimizations
9103	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9104	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9105	  && (tem = distribute_real_division (code, type, arg0, arg1)))
9106	return tem;
9107
9108      /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9109	 same or one.  */
9110      if ((TREE_CODE (arg0) == MULT_EXPR
9111	   || TREE_CODE (arg1) == MULT_EXPR)
9112	  && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9113        {
9114	  tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9115	  if (tem)
9116	    return tem;
9117	}
9118
9119      goto associate;
9120
9121    case MULT_EXPR:
9122      /* (-A) * (-B) -> A * B  */
9123      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9124	return fold_build2 (MULT_EXPR, type,
9125			    fold_convert (type, TREE_OPERAND (arg0, 0)),
9126			    fold_convert (type, negate_expr (arg1)));
9127      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9128	return fold_build2 (MULT_EXPR, type,
9129			    fold_convert (type, negate_expr (arg0)),
9130			    fold_convert (type, TREE_OPERAND (arg1, 0)));
9131
9132      if (! FLOAT_TYPE_P (type))
9133	{
9134	  if (integer_zerop (arg1))
9135	    return omit_one_operand (type, arg1, arg0);
9136	  if (integer_onep (arg1))
9137	    return non_lvalue (fold_convert (type, arg0));
9138	  /* Transform x * -1 into -x.  */
9139	  if (integer_all_onesp (arg1))
9140	    return fold_convert (type, negate_expr (arg0));
9141
9142	  /* (a * (1 << b)) is (a << b)  */
9143	  if (TREE_CODE (arg1) == LSHIFT_EXPR
9144	      && integer_onep (TREE_OPERAND (arg1, 0)))
9145	    return fold_build2 (LSHIFT_EXPR, type, arg0,
9146				TREE_OPERAND (arg1, 1));
9147	  if (TREE_CODE (arg0) == LSHIFT_EXPR
9148	      && integer_onep (TREE_OPERAND (arg0, 0)))
9149	    return fold_build2 (LSHIFT_EXPR, type, arg1,
9150				TREE_OPERAND (arg0, 1));
9151
9152	  strict_overflow_p = false;
9153	  if (TREE_CODE (arg1) == INTEGER_CST
9154	      && 0 != (tem = extract_muldiv (op0,
9155					     fold_convert (type, arg1),
9156					     code, NULL_TREE,
9157					     &strict_overflow_p)))
9158	    {
9159	      if (strict_overflow_p)
9160		fold_overflow_warning (("assuming signed overflow does not "
9161					"occur when simplifying "
9162					"multiplication"),
9163				       WARN_STRICT_OVERFLOW_MISC);
9164	      return fold_convert (type, tem);
9165	    }
9166
9167	  /* Optimize z * conj(z) for integer complex numbers.  */
9168	  if (TREE_CODE (arg0) == CONJ_EXPR
9169	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9170	    return fold_mult_zconjz (type, arg1);
9171	  if (TREE_CODE (arg1) == CONJ_EXPR
9172	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9173	    return fold_mult_zconjz (type, arg0);
9174	}
9175      else
9176	{
9177	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
9178	     when x is NaN, since x * 0 is also NaN.  Nor are they the
9179	     same in modes with signed zeros, since multiplying a
9180	     negative value by 0 gives -0, not +0.  */
9181	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9182	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9183	      && real_zerop (arg1))
9184	    return omit_one_operand (type, arg1, arg0);
9185	  /* In IEEE floating point, x*1 is not equivalent to x for snans.  */
9186	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9187	      && real_onep (arg1))
9188	    return non_lvalue (fold_convert (type, arg0));
9189
9190	  /* Transform x * -1.0 into -x.  */
9191	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9192	      && real_minus_onep (arg1))
9193	    return fold_convert (type, negate_expr (arg0));
9194
9195	  /* Convert (C1/X)*C2 into (C1*C2)/X.  */
9196	  if (flag_unsafe_math_optimizations
9197	      && TREE_CODE (arg0) == RDIV_EXPR
9198	      && TREE_CODE (arg1) == REAL_CST
9199	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9200	    {
9201	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9202				      arg1, 0);
9203	      if (tem)
9204		return fold_build2 (RDIV_EXPR, type, tem,
9205				    TREE_OPERAND (arg0, 1));
9206	    }
9207
9208          /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
9209	  if (operand_equal_p (arg0, arg1, 0))
9210	    {
9211	      tree tem = fold_strip_sign_ops (arg0);
9212	      if (tem != NULL_TREE)
9213		{
9214		  tem = fold_convert (type, tem);
9215		  return fold_build2 (MULT_EXPR, type, tem, tem);
9216		}
9217	    }
9218
9219	  /* Optimize z * conj(z) for floating point complex numbers.
9220	     Guarded by flag_unsafe_math_optimizations as non-finite
9221	     imaginary components don't produce scalar results.  */
9222	  if (flag_unsafe_math_optimizations
9223	      && TREE_CODE (arg0) == CONJ_EXPR
9224	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9225	    return fold_mult_zconjz (type, arg1);
9226	  if (flag_unsafe_math_optimizations
9227	      && TREE_CODE (arg1) == CONJ_EXPR
9228	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9229	    return fold_mult_zconjz (type, arg0);
9230
9231	  if (flag_unsafe_math_optimizations)
9232	    {
9233	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9234	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9235
9236	      /* Optimizations of root(...)*root(...).  */
9237	      if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9238		{
9239		  tree rootfn, arg, arglist;
9240		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9241		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9242
9243		  /* Optimize sqrt(x)*sqrt(x) as x.  */
9244		  if (BUILTIN_SQRT_P (fcode0)
9245		      && operand_equal_p (arg00, arg10, 0)
9246		      && ! HONOR_SNANS (TYPE_MODE (type)))
9247		    return arg00;
9248
9249	          /* Optimize root(x)*root(y) as root(x*y).  */
9250		  rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9251		  arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9252		  arglist = build_tree_list (NULL_TREE, arg);
9253		  return build_function_call_expr (rootfn, arglist);
9254		}
9255
9256	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
9257	      if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9258		{
9259		  tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9260		  tree arg = fold_build2 (PLUS_EXPR, type,
9261					  TREE_VALUE (TREE_OPERAND (arg0, 1)),
9262					  TREE_VALUE (TREE_OPERAND (arg1, 1)));
9263		  tree arglist = build_tree_list (NULL_TREE, arg);
9264		  return build_function_call_expr (expfn, arglist);
9265		}
9266
9267	      /* Optimizations of pow(...)*pow(...).  */
9268	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9269		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9270		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9271		{
9272		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9273		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9274								     1)));
9275		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9276		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9277								     1)));
9278
9279		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
9280		  if (operand_equal_p (arg01, arg11, 0))
9281		    {
9282		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9283		      tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9284		      tree arglist = tree_cons (NULL_TREE, arg,
9285						build_tree_list (NULL_TREE,
9286								 arg01));
9287		      return build_function_call_expr (powfn, arglist);
9288		    }
9289
9290		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
9291		  if (operand_equal_p (arg00, arg10, 0))
9292		    {
9293		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9294		      tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9295		      tree arglist = tree_cons (NULL_TREE, arg00,
9296						build_tree_list (NULL_TREE,
9297								 arg));
9298		      return build_function_call_expr (powfn, arglist);
9299		    }
9300		}
9301
9302	      /* Optimize tan(x)*cos(x) as sin(x).  */
9303	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9304		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9305		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9306		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9307		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9308		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9309		  && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9310				      TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9311		{
9312		  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9313
9314		  if (sinfn != NULL_TREE)
9315		    return build_function_call_expr (sinfn,
9316						     TREE_OPERAND (arg0, 1));
9317		}
9318
9319	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
9320	      if (fcode1 == BUILT_IN_POW
9321		  || fcode1 == BUILT_IN_POWF
9322		  || fcode1 == BUILT_IN_POWL)
9323		{
9324		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9325		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9326								     1)));
9327		  if (TREE_CODE (arg11) == REAL_CST
9328		      && ! TREE_CONSTANT_OVERFLOW (arg11)
9329		      && operand_equal_p (arg0, arg10, 0))
9330		    {
9331		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9332		      REAL_VALUE_TYPE c;
9333		      tree arg, arglist;
9334
9335		      c = TREE_REAL_CST (arg11);
9336		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9337		      arg = build_real (type, c);
9338		      arglist = build_tree_list (NULL_TREE, arg);
9339		      arglist = tree_cons (NULL_TREE, arg0, arglist);
9340		      return build_function_call_expr (powfn, arglist);
9341		    }
9342		}
9343
9344	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
9345	      if (fcode0 == BUILT_IN_POW
9346		  || fcode0 == BUILT_IN_POWF
9347		  || fcode0 == BUILT_IN_POWL)
9348		{
9349		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9350		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9351								     1)));
9352		  if (TREE_CODE (arg01) == REAL_CST
9353		      && ! TREE_CONSTANT_OVERFLOW (arg01)
9354		      && operand_equal_p (arg1, arg00, 0))
9355		    {
9356		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9357		      REAL_VALUE_TYPE c;
9358		      tree arg, arglist;
9359
9360		      c = TREE_REAL_CST (arg01);
9361		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9362		      arg = build_real (type, c);
9363		      arglist = build_tree_list (NULL_TREE, arg);
9364		      arglist = tree_cons (NULL_TREE, arg1, arglist);
9365		      return build_function_call_expr (powfn, arglist);
9366		    }
9367		}
9368
9369	      /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
9370	      if (! optimize_size
9371		  && operand_equal_p (arg0, arg1, 0))
9372		{
9373		  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9374
9375		  if (powfn)
9376		    {
9377		      tree arg = build_real (type, dconst2);
9378		      tree arglist = build_tree_list (NULL_TREE, arg);
9379		      arglist = tree_cons (NULL_TREE, arg0, arglist);
9380		      return build_function_call_expr (powfn, arglist);
9381		    }
9382		}
9383	    }
9384	}
9385      goto associate;
9386
9387    case BIT_IOR_EXPR:
9388    bit_ior:
9389      if (integer_all_onesp (arg1))
9390	return omit_one_operand (type, arg1, arg0);
9391      if (integer_zerop (arg1))
9392	return non_lvalue (fold_convert (type, arg0));
9393      if (operand_equal_p (arg0, arg1, 0))
9394	return non_lvalue (fold_convert (type, arg0));
9395
9396      /* ~X | X is -1.  */
9397      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9398	  && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9399	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9400	{
9401	  t1 = build_int_cst (type, -1);
9402	  t1 = force_fit_type (t1, 0, false, false);
9403	  return omit_one_operand (type, t1, arg1);
9404	}
9405
9406      /* X | ~X is -1.  */
9407      if (TREE_CODE (arg1) == BIT_NOT_EXPR
9408	  && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9409	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9410	{
9411	  t1 = build_int_cst (type, -1);
9412	  t1 = force_fit_type (t1, 0, false, false);
9413	  return omit_one_operand (type, t1, arg0);
9414	}
9415
9416      /* Canonicalize (X & C1) | C2.  */
9417      if (TREE_CODE (arg0) == BIT_AND_EXPR
9418	  && TREE_CODE (arg1) == INTEGER_CST
9419	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9420	{
9421	  unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9422	  int width = TYPE_PRECISION (type);
9423	  hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9424	  lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9425	  hi2 = TREE_INT_CST_HIGH (arg1);
9426	  lo2 = TREE_INT_CST_LOW (arg1);
9427
9428	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
9429	  if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9430	    return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9431
9432	  if (width > HOST_BITS_PER_WIDE_INT)
9433	    {
9434	      mhi = (unsigned HOST_WIDE_INT) -1
9435		    >> (2 * HOST_BITS_PER_WIDE_INT - width);
9436	      mlo = -1;
9437	    }
9438	  else
9439	    {
9440	      mhi = 0;
9441	      mlo = (unsigned HOST_WIDE_INT) -1
9442		    >> (HOST_BITS_PER_WIDE_INT - width);
9443	    }
9444
9445	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
9446	  if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9447	    return fold_build2 (BIT_IOR_EXPR, type,
9448				TREE_OPERAND (arg0, 0), arg1);
9449
9450	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2.  */
9451	  hi1 &= mhi;
9452	  lo1 &= mlo;
9453	  if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9454	    return fold_build2 (BIT_IOR_EXPR, type,
9455				fold_build2 (BIT_AND_EXPR, type,
9456					     TREE_OPERAND (arg0, 0),
9457					     build_int_cst_wide (type,
9458								 lo1 & ~lo2,
9459								 hi1 & ~hi2)),
9460				arg1);
9461	}
9462
9463      /* (X & Y) | Y is (X, Y).  */
9464      if (TREE_CODE (arg0) == BIT_AND_EXPR
9465	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9466	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9467      /* (X & Y) | X is (Y, X).  */
9468      if (TREE_CODE (arg0) == BIT_AND_EXPR
9469	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9470	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9471	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9472      /* X | (X & Y) is (Y, X).  */
9473      if (TREE_CODE (arg1) == BIT_AND_EXPR
9474	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9475	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9476	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9477      /* X | (Y & X) is (Y, X).  */
9478      if (TREE_CODE (arg1) == BIT_AND_EXPR
9479	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9480	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9481	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9482
9483      t1 = distribute_bit_expr (code, type, arg0, arg1);
9484      if (t1 != NULL_TREE)
9485	return t1;
9486
9487      /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9488
9489	 This results in more efficient code for machines without a NAND
9490	 instruction.  Combine will canonicalize to the first form
9491	 which will allow use of NAND instructions provided by the
9492	 backend if they exist.  */
9493      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9494	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
9495	{
9496	  return fold_build1 (BIT_NOT_EXPR, type,
9497			      build2 (BIT_AND_EXPR, type,
9498				      TREE_OPERAND (arg0, 0),
9499				      TREE_OPERAND (arg1, 0)));
9500	}
9501
9502      /* See if this can be simplified into a rotate first.  If that
9503	 is unsuccessful continue in the association code.  */
9504      goto bit_rotate;
9505
9506    case BIT_XOR_EXPR:
9507      if (integer_zerop (arg1))
9508	return non_lvalue (fold_convert (type, arg0));
9509      if (integer_all_onesp (arg1))
9510	return fold_build1 (BIT_NOT_EXPR, type, arg0);
9511      if (operand_equal_p (arg0, arg1, 0))
9512	return omit_one_operand (type, integer_zero_node, arg0);
9513
9514      /* ~X ^ X is -1.  */
9515      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9516	  && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9517	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9518	{
9519	  t1 = build_int_cst (type, -1);
9520	  t1 = force_fit_type (t1, 0, false, false);
9521	  return omit_one_operand (type, t1, arg1);
9522	}
9523
9524      /* X ^ ~X is -1.  */
9525      if (TREE_CODE (arg1) == BIT_NOT_EXPR
9526	  && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9527	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9528	{
9529	  t1 = build_int_cst (type, -1);
9530	  t1 = force_fit_type (t1, 0, false, false);
9531	  return omit_one_operand (type, t1, arg0);
9532	}
9533
9534      /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9535         with a constant, and the two constants have no bits in common,
9536	 we should treat this as a BIT_IOR_EXPR since this may produce more
9537	 simplifications.  */
9538      if (TREE_CODE (arg0) == BIT_AND_EXPR
9539	  && TREE_CODE (arg1) == BIT_AND_EXPR
9540	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9541	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9542	  && integer_zerop (const_binop (BIT_AND_EXPR,
9543					 TREE_OPERAND (arg0, 1),
9544					 TREE_OPERAND (arg1, 1), 0)))
9545	{
9546	  code = BIT_IOR_EXPR;
9547	  goto bit_ior;
9548	}
9549
9550      /* (X | Y) ^ X -> Y & ~ X*/
9551      if (TREE_CODE (arg0) == BIT_IOR_EXPR
9552          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9553        {
9554	  tree t2 = TREE_OPERAND (arg0, 1);
9555	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9556			    arg1);
9557	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9558			    fold_convert (type, t1));
9559	  return t1;
9560	}
9561
9562      /* (Y | X) ^ X -> Y & ~ X*/
9563      if (TREE_CODE (arg0) == BIT_IOR_EXPR
9564          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9565        {
9566	  tree t2 = TREE_OPERAND (arg0, 0);
9567	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9568			    arg1);
9569	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9570			    fold_convert (type, t1));
9571	  return t1;
9572	}
9573
9574      /* X ^ (X | Y) -> Y & ~ X*/
9575      if (TREE_CODE (arg1) == BIT_IOR_EXPR
9576          && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9577        {
9578	  tree t2 = TREE_OPERAND (arg1, 1);
9579	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9580			    arg0);
9581	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9582			    fold_convert (type, t1));
9583	  return t1;
9584	}
9585
9586      /* X ^ (Y | X) -> Y & ~ X*/
9587      if (TREE_CODE (arg1) == BIT_IOR_EXPR
9588          && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9589        {
9590	  tree t2 = TREE_OPERAND (arg1, 0);
9591	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9592			    arg0);
9593	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9594			    fold_convert (type, t1));
9595	  return t1;
9596	}
9597
9598      /* Convert ~X ^ ~Y to X ^ Y.  */
9599      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9600	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
9601	return fold_build2 (code, type,
9602			    fold_convert (type, TREE_OPERAND (arg0, 0)),
9603			    fold_convert (type, TREE_OPERAND (arg1, 0)));
9604
9605      /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
9606      if (TREE_CODE (arg0) == BIT_AND_EXPR
9607	  && integer_onep (TREE_OPERAND (arg0, 1))
9608	  && integer_onep (arg1))
9609	return fold_build2 (EQ_EXPR, type, arg0,
9610			    build_int_cst (TREE_TYPE (arg0), 0));
9611
9612      /* Fold (X & Y) ^ Y as ~X & Y.  */
9613      if (TREE_CODE (arg0) == BIT_AND_EXPR
9614	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9615	{
9616	  tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9617	  return fold_build2 (BIT_AND_EXPR, type,
9618			      fold_build1 (BIT_NOT_EXPR, type, tem),
9619			      fold_convert (type, arg1));
9620	}
9621      /* Fold (X & Y) ^ X as ~Y & X.  */
9622      if (TREE_CODE (arg0) == BIT_AND_EXPR
9623	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9624	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9625	{
9626	  tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9627	  return fold_build2 (BIT_AND_EXPR, type,
9628			      fold_build1 (BIT_NOT_EXPR, type, tem),
9629			      fold_convert (type, arg1));
9630	}
9631      /* Fold X ^ (X & Y) as X & ~Y.  */
9632      if (TREE_CODE (arg1) == BIT_AND_EXPR
9633	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9634	{
9635	  tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9636	  return fold_build2 (BIT_AND_EXPR, type,
9637			      fold_convert (type, arg0),
9638			      fold_build1 (BIT_NOT_EXPR, type, tem));
9639	}
9640      /* Fold X ^ (Y & X) as ~Y & X.  */
9641      if (TREE_CODE (arg1) == BIT_AND_EXPR
9642	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9643	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9644	{
9645	  tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9646	  return fold_build2 (BIT_AND_EXPR, type,
9647			      fold_build1 (BIT_NOT_EXPR, type, tem),
9648			      fold_convert (type, arg0));
9649	}
9650
9651      /* See if this can be simplified into a rotate first.  If that
9652	 is unsuccessful continue in the association code.  */
9653      goto bit_rotate;
9654
9655    case BIT_AND_EXPR:
9656      if (integer_all_onesp (arg1))
9657	return non_lvalue (fold_convert (type, arg0));
9658      if (integer_zerop (arg1))
9659	return omit_one_operand (type, arg1, arg0);
9660      if (operand_equal_p (arg0, arg1, 0))
9661	return non_lvalue (fold_convert (type, arg0));
9662
9663      /* ~X & X is always zero.  */
9664      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9665	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9666	return omit_one_operand (type, integer_zero_node, arg1);
9667
9668      /* X & ~X is always zero.  */
9669      if (TREE_CODE (arg1) == BIT_NOT_EXPR
9670	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9671	return omit_one_operand (type, integer_zero_node, arg0);
9672
9673      /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
9674      if (TREE_CODE (arg0) == BIT_IOR_EXPR
9675	  && TREE_CODE (arg1) == INTEGER_CST
9676	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9677	return fold_build2 (BIT_IOR_EXPR, type,
9678			    fold_build2 (BIT_AND_EXPR, type,
9679					 TREE_OPERAND (arg0, 0), arg1),
9680			    fold_build2 (BIT_AND_EXPR, type,
9681					 TREE_OPERAND (arg0, 1), arg1));
9682
9683      /* (X | Y) & Y is (X, Y).  */
9684      if (TREE_CODE (arg0) == BIT_IOR_EXPR
9685	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9686	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9687      /* (X | Y) & X is (Y, X).  */
9688      if (TREE_CODE (arg0) == BIT_IOR_EXPR
9689	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9690	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9691	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9692      /* X & (X | Y) is (Y, X).  */
9693      if (TREE_CODE (arg1) == BIT_IOR_EXPR
9694	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9695	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9696	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9697      /* X & (Y | X) is (Y, X).  */
9698      if (TREE_CODE (arg1) == BIT_IOR_EXPR
9699	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9700	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9701	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9702
9703      /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
9704      if (TREE_CODE (arg0) == BIT_XOR_EXPR
9705	  && integer_onep (TREE_OPERAND (arg0, 1))
9706	  && integer_onep (arg1))
9707	{
9708	  tem = TREE_OPERAND (arg0, 0);
9709	  return fold_build2 (EQ_EXPR, type,
9710			      fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9711					   build_int_cst (TREE_TYPE (tem), 1)),
9712			      build_int_cst (TREE_TYPE (tem), 0));
9713	}
9714      /* Fold ~X & 1 as (X & 1) == 0.  */
9715      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9716	  && integer_onep (arg1))
9717	{
9718	  tem = TREE_OPERAND (arg0, 0);
9719	  return fold_build2 (EQ_EXPR, type,
9720			      fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9721					   build_int_cst (TREE_TYPE (tem), 1)),
9722			      build_int_cst (TREE_TYPE (tem), 0));
9723	}
9724
9725      /* Fold (X ^ Y) & Y as ~X & Y.  */
9726      if (TREE_CODE (arg0) == BIT_XOR_EXPR
9727	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9728	{
9729	  tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9730	  return fold_build2 (BIT_AND_EXPR, type,
9731			      fold_build1 (BIT_NOT_EXPR, type, tem),
9732			      fold_convert (type, arg1));
9733	}
9734      /* Fold (X ^ Y) & X as ~Y & X.  */
9735      if (TREE_CODE (arg0) == BIT_XOR_EXPR
9736	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9737	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9738	{
9739	  tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9740	  return fold_build2 (BIT_AND_EXPR, type,
9741			      fold_build1 (BIT_NOT_EXPR, type, tem),
9742			      fold_convert (type, arg1));
9743	}
9744      /* Fold X & (X ^ Y) as X & ~Y.  */
9745      if (TREE_CODE (arg1) == BIT_XOR_EXPR
9746	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9747	{
9748	  tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9749	  return fold_build2 (BIT_AND_EXPR, type,
9750			      fold_convert (type, arg0),
9751			      fold_build1 (BIT_NOT_EXPR, type, tem));
9752	}
9753      /* Fold X & (Y ^ X) as ~Y & X.  */
9754      if (TREE_CODE (arg1) == BIT_XOR_EXPR
9755	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9756	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9757	{
9758	  tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9759	  return fold_build2 (BIT_AND_EXPR, type,
9760			      fold_build1 (BIT_NOT_EXPR, type, tem),
9761			      fold_convert (type, arg0));
9762	}
9763
9764      t1 = distribute_bit_expr (code, type, arg0, arg1);
9765      if (t1 != NULL_TREE)
9766	return t1;
9767      /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
9768      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9769	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9770	{
9771	  unsigned int prec
9772	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9773
9774	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9775	      && (~TREE_INT_CST_LOW (arg1)
9776		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9777	    return fold_convert (type, TREE_OPERAND (arg0, 0));
9778	}
9779
9780      /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9781
9782	 This results in more efficient code for machines without a NOR
9783	 instruction.  Combine will canonicalize to the first form
9784	 which will allow use of NOR instructions provided by the
9785	 backend if they exist.  */
9786      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9787	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
9788	{
9789	  return fold_build1 (BIT_NOT_EXPR, type,
9790			      build2 (BIT_IOR_EXPR, type,
9791				      TREE_OPERAND (arg0, 0),
9792				      TREE_OPERAND (arg1, 0)));
9793	}
9794
9795      goto associate;
9796
9797    case RDIV_EXPR:
9798      /* Don't touch a floating-point divide by zero unless the mode
9799	 of the constant can represent infinity.  */
9800      if (TREE_CODE (arg1) == REAL_CST
9801	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9802	  && real_zerop (arg1))
9803	return NULL_TREE;
9804
9805      /* Optimize A / A to 1.0 if we don't care about
9806	 NaNs or Infinities.  Skip the transformation
9807	 for non-real operands.  */
9808      if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9809	  && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9810	  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9811	  && operand_equal_p (arg0, arg1, 0))
9812	{
9813	  tree r = build_real (TREE_TYPE (arg0), dconst1);
9814
9815	  return omit_two_operands (type, r, arg0, arg1);
9816	}
9817
9818      /* The complex version of the above A / A optimization.  */
9819      if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9820	  && operand_equal_p (arg0, arg1, 0))
9821	{
9822	  tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9823	  if (! HONOR_NANS (TYPE_MODE (elem_type))
9824	      && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9825	    {
9826	      tree r = build_real (elem_type, dconst1);
9827	      /* omit_two_operands will call fold_convert for us.  */
9828	      return omit_two_operands (type, r, arg0, arg1);
9829	    }
9830	}
9831
9832      /* (-A) / (-B) -> A / B  */
9833      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9834	return fold_build2 (RDIV_EXPR, type,
9835			    TREE_OPERAND (arg0, 0),
9836			    negate_expr (arg1));
9837      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9838	return fold_build2 (RDIV_EXPR, type,
9839			    negate_expr (arg0),
9840			    TREE_OPERAND (arg1, 0));
9841
9842      /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
9843      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9844	  && real_onep (arg1))
9845	return non_lvalue (fold_convert (type, arg0));
9846
9847      /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
9848      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9849	  && real_minus_onep (arg1))
9850	return non_lvalue (fold_convert (type, negate_expr (arg0)));
9851
9852      /* If ARG1 is a constant, we can convert this to a multiply by the
9853	 reciprocal.  This does not have the same rounding properties,
9854	 so only do this if -funsafe-math-optimizations.  We can actually
9855	 always safely do it if ARG1 is a power of two, but it's hard to
9856	 tell if it is or not in a portable manner.  */
9857      if (TREE_CODE (arg1) == REAL_CST)
9858	{
9859	  if (flag_unsafe_math_optimizations
9860	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
9861					  arg1, 0)))
9862	    return fold_build2 (MULT_EXPR, type, arg0, tem);
9863	  /* Find the reciprocal if optimizing and the result is exact.  */
9864	  if (optimize)
9865	    {
9866	      REAL_VALUE_TYPE r;
9867	      r = TREE_REAL_CST (arg1);
9868	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9869		{
9870		  tem = build_real (type, r);
9871		  return fold_build2 (MULT_EXPR, type,
9872				      fold_convert (type, arg0), tem);
9873		}
9874	    }
9875	}
9876      /* Convert A/B/C to A/(B*C).  */
9877      if (flag_unsafe_math_optimizations
9878	  && TREE_CODE (arg0) == RDIV_EXPR)
9879	return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9880			    fold_build2 (MULT_EXPR, type,
9881					 TREE_OPERAND (arg0, 1), arg1));
9882
9883      /* Convert A/(B/C) to (A/B)*C.  */
9884      if (flag_unsafe_math_optimizations
9885	  && TREE_CODE (arg1) == RDIV_EXPR)
9886	return fold_build2 (MULT_EXPR, type,
9887			    fold_build2 (RDIV_EXPR, type, arg0,
9888					 TREE_OPERAND (arg1, 0)),
9889			    TREE_OPERAND (arg1, 1));
9890
9891      /* Convert C1/(X*C2) into (C1/C2)/X.  */
9892      if (flag_unsafe_math_optimizations
9893	  && TREE_CODE (arg1) == MULT_EXPR
9894	  && TREE_CODE (arg0) == REAL_CST
9895	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9896	{
9897	  tree tem = const_binop (RDIV_EXPR, arg0,
9898				  TREE_OPERAND (arg1, 1), 0);
9899	  if (tem)
9900	    return fold_build2 (RDIV_EXPR, type, tem,
9901				TREE_OPERAND (arg1, 0));
9902	}
9903
9904      if (flag_unsafe_math_optimizations)
9905	{
9906	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9907	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9908
9909	  /* Optimize sin(x)/cos(x) as tan(x).  */
9910	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9911	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9912	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9913	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9914				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9915	    {
9916	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9917
9918	      if (tanfn != NULL_TREE)
9919		return build_function_call_expr (tanfn,
9920						 TREE_OPERAND (arg0, 1));
9921	    }
9922
9923	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
9924	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9925	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9926	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9927	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9928				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9929	    {
9930	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9931
9932	      if (tanfn != NULL_TREE)
9933		{
9934		  tree tmp = TREE_OPERAND (arg0, 1);
9935		  tmp = build_function_call_expr (tanfn, tmp);
9936		  return fold_build2 (RDIV_EXPR, type,
9937				      build_real (type, dconst1), tmp);
9938		}
9939	    }
9940
9941 	  /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9942	     NaNs or Infinities.  */
9943 	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9944 	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9945 	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9946	    {
9947	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9948	      tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9949
9950	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9951		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9952		  && operand_equal_p (arg00, arg01, 0))
9953		{
9954		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9955
9956		  if (cosfn != NULL_TREE)
9957		    return build_function_call_expr (cosfn,
9958						     TREE_OPERAND (arg0, 1));
9959		}
9960	    }
9961
9962 	  /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9963	     NaNs or Infinities.  */
9964 	  if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9965 	       || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9966 	       || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9967	    {
9968	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9969	      tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9970
9971	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9972		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9973		  && operand_equal_p (arg00, arg01, 0))
9974		{
9975		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9976
9977		  if (cosfn != NULL_TREE)
9978		    {
9979		      tree tmp = TREE_OPERAND (arg0, 1);
9980		      tmp = build_function_call_expr (cosfn, tmp);
9981		      return fold_build2 (RDIV_EXPR, type,
9982					  build_real (type, dconst1),
9983					  tmp);
9984		    }
9985		}
9986	    }
9987
9988	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
9989	  if (fcode0 == BUILT_IN_POW
9990	      || fcode0 == BUILT_IN_POWF
9991	      || fcode0 == BUILT_IN_POWL)
9992	    {
9993	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9994	      tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9995	      if (TREE_CODE (arg01) == REAL_CST
9996		  && ! TREE_CONSTANT_OVERFLOW (arg01)
9997		  && operand_equal_p (arg1, arg00, 0))
9998		{
9999		  tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10000		  REAL_VALUE_TYPE c;
10001		  tree arg, arglist;
10002
10003		  c = TREE_REAL_CST (arg01);
10004		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10005		  arg = build_real (type, c);
10006		  arglist = build_tree_list (NULL_TREE, arg);
10007		  arglist = tree_cons (NULL_TREE, arg1, arglist);
10008		  return build_function_call_expr (powfn, arglist);
10009		}
10010	    }
10011
10012	  /* Optimize x/expN(y) into x*expN(-y).  */
10013	  if (BUILTIN_EXPONENT_P (fcode1))
10014	    {
10015	      tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10016	      tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10017	      tree arglist = build_tree_list (NULL_TREE,
10018					      fold_convert (type, arg));
10019	      arg1 = build_function_call_expr (expfn, arglist);
10020	      return fold_build2 (MULT_EXPR, type, arg0, arg1);
10021	    }
10022
10023	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
10024	  if (fcode1 == BUILT_IN_POW
10025	      || fcode1 == BUILT_IN_POWF
10026	      || fcode1 == BUILT_IN_POWL)
10027	    {
10028	      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10029	      tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10030	      tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10031	      tree neg11 = fold_convert (type, negate_expr (arg11));
10032	      tree arglist = tree_cons(NULL_TREE, arg10,
10033				       build_tree_list (NULL_TREE, neg11));
10034	      arg1 = build_function_call_expr (powfn, arglist);
10035	      return fold_build2 (MULT_EXPR, type, arg0, arg1);
10036	    }
10037	}
10038      return NULL_TREE;
10039
10040    case TRUNC_DIV_EXPR:
10041    case FLOOR_DIV_EXPR:
10042      /* Simplify A / (B << N) where A and B are positive and B is
10043	 a power of 2, to A >> (N + log2(B)).  */
10044      strict_overflow_p = false;
10045      if (TREE_CODE (arg1) == LSHIFT_EXPR
10046	  && (TYPE_UNSIGNED (type)
10047	      || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10048	{
10049	  tree sval = TREE_OPERAND (arg1, 0);
10050	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10051	    {
10052	      tree sh_cnt = TREE_OPERAND (arg1, 1);
10053	      unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10054
10055	      if (strict_overflow_p)
10056		fold_overflow_warning (("assuming signed overflow does not "
10057					"occur when simplifying A / (B << N)"),
10058				       WARN_STRICT_OVERFLOW_MISC);
10059
10060	      sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10061				    sh_cnt, build_int_cst (NULL_TREE, pow2));
10062	      return fold_build2 (RSHIFT_EXPR, type,
10063				  fold_convert (type, arg0), sh_cnt);
10064	    }
10065	}
10066      /* Fall thru */
10067
10068    case ROUND_DIV_EXPR:
10069    case CEIL_DIV_EXPR:
10070    case EXACT_DIV_EXPR:
10071      if (integer_onep (arg1))
10072	return non_lvalue (fold_convert (type, arg0));
10073      if (integer_zerop (arg1))
10074	return NULL_TREE;
10075      /* X / -1 is -X.  */
10076      if (!TYPE_UNSIGNED (type)
10077	  && TREE_CODE (arg1) == INTEGER_CST
10078	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10079	  && TREE_INT_CST_HIGH (arg1) == -1)
10080	return fold_convert (type, negate_expr (arg0));
10081
10082      /* Convert -A / -B to A / B when the type is signed and overflow is
10083	 undefined.  */
10084      if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10085	  && TREE_CODE (arg0) == NEGATE_EXPR
10086	  && negate_expr_p (arg1))
10087	{
10088	  if (INTEGRAL_TYPE_P (type))
10089	    fold_overflow_warning (("assuming signed overflow does not occur "
10090				    "when distributing negation across "
10091				    "division"),
10092				   WARN_STRICT_OVERFLOW_MISC);
10093	  return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10094			      negate_expr (arg1));
10095	}
10096      if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10097	  && TREE_CODE (arg1) == NEGATE_EXPR
10098	  && negate_expr_p (arg0))
10099	{
10100	  if (INTEGRAL_TYPE_P (type))
10101	    fold_overflow_warning (("assuming signed overflow does not occur "
10102				    "when distributing negation across "
10103				    "division"),
10104				   WARN_STRICT_OVERFLOW_MISC);
10105	  return fold_build2 (code, type, negate_expr (arg0),
10106			      TREE_OPERAND (arg1, 0));
10107	}
10108
10109      /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10110	 operation, EXACT_DIV_EXPR.
10111
10112	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10113	 At one time others generated faster code, it's not clear if they do
10114	 after the last round to changes to the DIV code in expmed.c.  */
10115      if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10116	  && multiple_of_p (type, arg0, arg1))
10117	return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10118
10119      strict_overflow_p = false;
10120      if (TREE_CODE (arg1) == INTEGER_CST
10121	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10122					 &strict_overflow_p)))
10123	{
10124	  if (strict_overflow_p)
10125	    fold_overflow_warning (("assuming signed overflow does not occur "
10126				    "when simplifying division"),
10127				   WARN_STRICT_OVERFLOW_MISC);
10128	  return fold_convert (type, tem);
10129	}
10130
10131      return NULL_TREE;
10132
10133    case CEIL_MOD_EXPR:
10134    case FLOOR_MOD_EXPR:
10135    case ROUND_MOD_EXPR:
10136    case TRUNC_MOD_EXPR:
10137      /* X % 1 is always zero, but be sure to preserve any side
10138	 effects in X.  */
10139      if (integer_onep (arg1))
10140	return omit_one_operand (type, integer_zero_node, arg0);
10141
10142      /* X % 0, return X % 0 unchanged so that we can get the
10143	 proper warnings and errors.  */
10144      if (integer_zerop (arg1))
10145	return NULL_TREE;
10146
10147      /* 0 % X is always zero, but be sure to preserve any side
10148	 effects in X.  Place this after checking for X == 0.  */
10149      if (integer_zerop (arg0))
10150	return omit_one_operand (type, integer_zero_node, arg1);
10151
10152      /* X % -1 is zero.  */
10153      if (!TYPE_UNSIGNED (type)
10154	  && TREE_CODE (arg1) == INTEGER_CST
10155	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10156	  && TREE_INT_CST_HIGH (arg1) == -1)
10157	return omit_one_operand (type, integer_zero_node, arg0);
10158
10159      /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10160         i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
10161      strict_overflow_p = false;
10162      if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10163	  && (TYPE_UNSIGNED (type)
10164	      || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10165	{
10166	  tree c = arg1;
10167	  /* Also optimize A % (C << N)  where C is a power of 2,
10168	     to A & ((C << N) - 1).  */
10169	  if (TREE_CODE (arg1) == LSHIFT_EXPR)
10170	    c = TREE_OPERAND (arg1, 0);
10171
10172	  if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10173	    {
10174	      tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10175				       arg1, integer_one_node);
10176	      if (strict_overflow_p)
10177		fold_overflow_warning (("assuming signed overflow does not "
10178					"occur when simplifying "
10179					"X % (power of two)"),
10180				       WARN_STRICT_OVERFLOW_MISC);
10181	      return fold_build2 (BIT_AND_EXPR, type,
10182				  fold_convert (type, arg0),
10183				  fold_convert (type, mask));
10184	    }
10185	}
10186
10187      /* X % -C is the same as X % C.  */
10188      if (code == TRUNC_MOD_EXPR
10189	  && !TYPE_UNSIGNED (type)
10190	  && TREE_CODE (arg1) == INTEGER_CST
10191	  && !TREE_CONSTANT_OVERFLOW (arg1)
10192	  && TREE_INT_CST_HIGH (arg1) < 0
10193	  && !TYPE_OVERFLOW_TRAPS (type)
10194	  /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
10195	  && !sign_bit_p (arg1, arg1))
10196	return fold_build2 (code, type, fold_convert (type, arg0),
10197			    fold_convert (type, negate_expr (arg1)));
10198
10199      /* X % -Y is the same as X % Y.  */
10200      if (code == TRUNC_MOD_EXPR
10201	  && !TYPE_UNSIGNED (type)
10202	  && TREE_CODE (arg1) == NEGATE_EXPR
10203	  && !TYPE_OVERFLOW_TRAPS (type))
10204	return fold_build2 (code, type, fold_convert (type, arg0),
10205			    fold_convert (type, TREE_OPERAND (arg1, 0)));
10206
10207      if (TREE_CODE (arg1) == INTEGER_CST
10208	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10209					 &strict_overflow_p)))
10210	{
10211	  if (strict_overflow_p)
10212	    fold_overflow_warning (("assuming signed overflow does not occur "
10213				    "when simplifying modulos"),
10214				   WARN_STRICT_OVERFLOW_MISC);
10215	  return fold_convert (type, tem);
10216	}
10217
10218      return NULL_TREE;
10219
10220    case LROTATE_EXPR:
10221    case RROTATE_EXPR:
10222      if (integer_all_onesp (arg0))
10223	return omit_one_operand (type, arg0, arg1);
10224      goto shift;
10225
10226    case RSHIFT_EXPR:
10227      /* Optimize -1 >> x for arithmetic right shifts.  */
10228      if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10229	return omit_one_operand (type, arg0, arg1);
10230      /* ... fall through ...  */
10231
10232    case LSHIFT_EXPR:
10233    shift:
10234      if (integer_zerop (arg1))
10235	return non_lvalue (fold_convert (type, arg0));
10236      if (integer_zerop (arg0))
10237	return omit_one_operand (type, arg0, arg1);
10238
10239      /* Since negative shift count is not well-defined,
10240	 don't try to compute it in the compiler.  */
10241      if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10242	return NULL_TREE;
10243
10244      /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
10245      if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10246	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10247	  && host_integerp (TREE_OPERAND (arg0, 1), false)
10248	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10249	{
10250	  HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10251			       + TREE_INT_CST_LOW (arg1));
10252
10253	  /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10254	     being well defined.  */
10255	  if (low >= TYPE_PRECISION (type))
10256	    {
10257	      if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10258	        low = low % TYPE_PRECISION (type);
10259	      else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10260	        return build_int_cst (type, 0);
10261	      else
10262		low = TYPE_PRECISION (type) - 1;
10263	    }
10264
10265	  return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10266			      build_int_cst (type, low));
10267	}
10268
10269      /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10270         into x & ((unsigned)-1 >> c) for unsigned types.  */
10271      if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10272           || (TYPE_UNSIGNED (type)
10273	       && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10274	  && host_integerp (arg1, false)
10275	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10276	  && host_integerp (TREE_OPERAND (arg0, 1), false)
10277	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10278	{
10279	  HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10280	  HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10281	  tree lshift;
10282	  tree arg00;
10283
10284	  if (low0 == low1)
10285	    {
10286	      arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10287
10288	      lshift = build_int_cst (type, -1);
10289	      lshift = int_const_binop (code, lshift, arg1, 0);
10290
10291	      return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10292	    }
10293	}
10294
10295      /* Rewrite an LROTATE_EXPR by a constant into an
10296	 RROTATE_EXPR by a new constant.  */
10297      if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10298	{
10299	  tree tem = build_int_cst (NULL_TREE,
10300				    GET_MODE_BITSIZE (TYPE_MODE (type)));
10301	  tem = fold_convert (TREE_TYPE (arg1), tem);
10302	  tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10303	  return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10304	}
10305
10306      /* If we have a rotate of a bit operation with the rotate count and
10307	 the second operand of the bit operation both constant,
10308	 permute the two operations.  */
10309      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10310	  && (TREE_CODE (arg0) == BIT_AND_EXPR
10311	      || TREE_CODE (arg0) == BIT_IOR_EXPR
10312	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
10313	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10314	return fold_build2 (TREE_CODE (arg0), type,
10315			    fold_build2 (code, type,
10316					 TREE_OPERAND (arg0, 0), arg1),
10317			    fold_build2 (code, type,
10318					 TREE_OPERAND (arg0, 1), arg1));
10319
10320      /* Two consecutive rotates adding up to the width of the mode can
10321	 be ignored.  */
10322      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10323	  && TREE_CODE (arg0) == RROTATE_EXPR
10324	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10325	  && TREE_INT_CST_HIGH (arg1) == 0
10326	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10327	  && ((TREE_INT_CST_LOW (arg1)
10328	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10329	      == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10330	return TREE_OPERAND (arg0, 0);
10331
10332      return NULL_TREE;
10333
10334    case MIN_EXPR:
10335      if (operand_equal_p (arg0, arg1, 0))
10336	return omit_one_operand (type, arg0, arg1);
10337      if (INTEGRAL_TYPE_P (type)
10338	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10339	return omit_one_operand (type, arg1, arg0);
10340      tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10341      if (tem)
10342	return tem;
10343      goto associate;
10344
10345    case MAX_EXPR:
10346      if (operand_equal_p (arg0, arg1, 0))
10347	return omit_one_operand (type, arg0, arg1);
10348      if (INTEGRAL_TYPE_P (type)
10349	  && TYPE_MAX_VALUE (type)
10350	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10351	return omit_one_operand (type, arg1, arg0);
10352      tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10353      if (tem)
10354	return tem;
10355      goto associate;
10356
10357    case TRUTH_ANDIF_EXPR:
10358      /* Note that the operands of this must be ints
10359	 and their values must be 0 or 1.
10360	 ("true" is a fixed value perhaps depending on the language.)  */
10361      /* If first arg is constant zero, return it.  */
10362      if (integer_zerop (arg0))
10363	return fold_convert (type, arg0);
10364    case TRUTH_AND_EXPR:
10365      /* If either arg is constant true, drop it.  */
10366      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10367	return non_lvalue (fold_convert (type, arg1));
10368      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10369	  /* Preserve sequence points.  */
10370	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10371	return non_lvalue (fold_convert (type, arg0));
10372      /* If second arg is constant zero, result is zero, but first arg
10373	 must be evaluated.  */
10374      if (integer_zerop (arg1))
10375	return omit_one_operand (type, arg1, arg0);
10376      /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10377	 case will be handled here.  */
10378      if (integer_zerop (arg0))
10379	return omit_one_operand (type, arg0, arg1);
10380
10381      /* !X && X is always false.  */
10382      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10383	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10384	return omit_one_operand (type, integer_zero_node, arg1);
10385      /* X && !X is always false.  */
10386      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10387	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10388	return omit_one_operand (type, integer_zero_node, arg0);
10389
10390      /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
10391	 means A >= Y && A != MAX, but in this case we know that
10392	 A < X <= MAX.  */
10393
10394      if (!TREE_SIDE_EFFECTS (arg0)
10395	  && !TREE_SIDE_EFFECTS (arg1))
10396	{
10397	  tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10398	  if (tem && !operand_equal_p (tem, arg0, 0))
10399	    return fold_build2 (code, type, tem, arg1);
10400
10401	  tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10402	  if (tem && !operand_equal_p (tem, arg1, 0))
10403	    return fold_build2 (code, type, arg0, tem);
10404	}
10405
10406    truth_andor:
10407      /* We only do these simplifications if we are optimizing.  */
10408      if (!optimize)
10409	return NULL_TREE;
10410
10411      /* Check for things like (A || B) && (A || C).  We can convert this
10412	 to A || (B && C).  Note that either operator can be any of the four
10413	 truth and/or operations and the transformation will still be
10414	 valid.   Also note that we only care about order for the
10415	 ANDIF and ORIF operators.  If B contains side effects, this
10416	 might change the truth-value of A.  */
10417      if (TREE_CODE (arg0) == TREE_CODE (arg1)
10418	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10419	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10420	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
10421	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10422	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10423	{
10424	  tree a00 = TREE_OPERAND (arg0, 0);
10425	  tree a01 = TREE_OPERAND (arg0, 1);
10426	  tree a10 = TREE_OPERAND (arg1, 0);
10427	  tree a11 = TREE_OPERAND (arg1, 1);
10428	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10429			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10430			     && (code == TRUTH_AND_EXPR
10431				 || code == TRUTH_OR_EXPR));
10432
10433	  if (operand_equal_p (a00, a10, 0))
10434	    return fold_build2 (TREE_CODE (arg0), type, a00,
10435				fold_build2 (code, type, a01, a11));
10436	  else if (commutative && operand_equal_p (a00, a11, 0))
10437	    return fold_build2 (TREE_CODE (arg0), type, a00,
10438				fold_build2 (code, type, a01, a10));
10439	  else if (commutative && operand_equal_p (a01, a10, 0))
10440	    return fold_build2 (TREE_CODE (arg0), type, a01,
10441				fold_build2 (code, type, a00, a11));
10442
10443	  /* This case if tricky because we must either have commutative
10444	     operators or else A10 must not have side-effects.  */
10445
10446	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10447		   && operand_equal_p (a01, a11, 0))
10448	    return fold_build2 (TREE_CODE (arg0), type,
10449				fold_build2 (code, type, a00, a10),
10450				a01);
10451	}
10452
10453      /* See if we can build a range comparison.  */
10454      if (0 != (tem = fold_range_test (code, type, op0, op1)))
10455	return tem;
10456
10457      /* Check for the possibility of merging component references.  If our
10458	 lhs is another similar operation, try to merge its rhs with our
10459	 rhs.  Then try to merge our lhs and rhs.  */
10460      if (TREE_CODE (arg0) == code
10461	  && 0 != (tem = fold_truthop (code, type,
10462				       TREE_OPERAND (arg0, 1), arg1)))
10463	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10464
10465      if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10466	return tem;
10467
10468      return NULL_TREE;
10469
10470    case TRUTH_ORIF_EXPR:
10471      /* Note that the operands of this must be ints
10472	 and their values must be 0 or true.
10473	 ("true" is a fixed value perhaps depending on the language.)  */
10474      /* If first arg is constant true, return it.  */
10475      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10476	return fold_convert (type, arg0);
10477    case TRUTH_OR_EXPR:
10478      /* If either arg is constant zero, drop it.  */
10479      if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10480	return non_lvalue (fold_convert (type, arg1));
10481      if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10482	  /* Preserve sequence points.  */
10483	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10484	return non_lvalue (fold_convert (type, arg0));
10485      /* If second arg is constant true, result is true, but we must
10486	 evaluate first arg.  */
10487      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10488	return omit_one_operand (type, arg1, arg0);
10489      /* Likewise for first arg, but note this only occurs here for
10490	 TRUTH_OR_EXPR.  */
10491      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10492	return omit_one_operand (type, arg0, arg1);
10493
10494      /* !X || X is always true.  */
10495      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10496	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10497	return omit_one_operand (type, integer_one_node, arg1);
10498      /* X || !X is always true.  */
10499      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10500	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10501	return omit_one_operand (type, integer_one_node, arg0);
10502
10503      goto truth_andor;
10504
10505    case TRUTH_XOR_EXPR:
10506      /* If the second arg is constant zero, drop it.  */
10507      if (integer_zerop (arg1))
10508	return non_lvalue (fold_convert (type, arg0));
10509      /* If the second arg is constant true, this is a logical inversion.  */
10510      if (integer_onep (arg1))
10511	{
10512	  /* Only call invert_truthvalue if operand is a truth value.  */
10513	  if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10514	    tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10515	  else
10516	    tem = invert_truthvalue (arg0);
10517	  return non_lvalue (fold_convert (type, tem));
10518	}
10519      /* Identical arguments cancel to zero.  */
10520      if (operand_equal_p (arg0, arg1, 0))
10521	return omit_one_operand (type, integer_zero_node, arg0);
10522
10523      /* !X ^ X is always true.  */
10524      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10525	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10526	return omit_one_operand (type, integer_one_node, arg1);
10527
10528      /* X ^ !X is always true.  */
10529      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10530	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10531	return omit_one_operand (type, integer_one_node, arg0);
10532
10533      return NULL_TREE;
10534
10535    case EQ_EXPR:
10536    case NE_EXPR:
10537      tem = fold_comparison (code, type, op0, op1);
10538      if (tem != NULL_TREE)
10539	return tem;
10540
10541      /* bool_var != 0 becomes bool_var. */
10542      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10543          && code == NE_EXPR)
10544        return non_lvalue (fold_convert (type, arg0));
10545
10546      /* bool_var == 1 becomes bool_var. */
10547      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10548          && code == EQ_EXPR)
10549        return non_lvalue (fold_convert (type, arg0));
10550
10551      /* bool_var != 1 becomes !bool_var. */
10552      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10553          && code == NE_EXPR)
10554        return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10555
10556      /* bool_var == 0 becomes !bool_var. */
10557      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10558          && code == EQ_EXPR)
10559        return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10560
10561      /*  ~a != C becomes a != ~C where C is a constant.  Likewise for ==.  */
10562      if (TREE_CODE (arg0) == BIT_NOT_EXPR
10563	  && TREE_CODE (arg1) == INTEGER_CST)
10564	{
10565	  tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
10566	  return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10567			      fold_build1 (BIT_NOT_EXPR, cmp_type,
10568					   fold_convert (cmp_type, arg1)));
10569	}
10570
10571      /* If this is an equality comparison of the address of a non-weak
10572	 object against zero, then we know the result.  */
10573      if (TREE_CODE (arg0) == ADDR_EXPR
10574	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10575	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10576	  && integer_zerop (arg1))
10577	return constant_boolean_node (code != EQ_EXPR, type);
10578
10579      /* If this is an equality comparison of the address of two non-weak,
10580	 unaliased symbols neither of which are extern (since we do not
10581	 have access to attributes for externs), then we know the result.  */
10582      if (TREE_CODE (arg0) == ADDR_EXPR
10583	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10584	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10585	  && ! lookup_attribute ("alias",
10586				 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10587	  && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10588	  && TREE_CODE (arg1) == ADDR_EXPR
10589	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10590	  && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10591	  && ! lookup_attribute ("alias",
10592				 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10593	  && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10594	{
10595	  /* We know that we're looking at the address of two
10596	     non-weak, unaliased, static _DECL nodes.
10597
10598	     It is both wasteful and incorrect to call operand_equal_p
10599	     to compare the two ADDR_EXPR nodes.  It is wasteful in that
10600	     all we need to do is test pointer equality for the arguments
10601	     to the two ADDR_EXPR nodes.  It is incorrect to use
10602	     operand_equal_p as that function is NOT equivalent to a
10603	     C equality test.  It can in fact return false for two
10604	     objects which would test as equal using the C equality
10605	     operator.  */
10606	  bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10607	  return constant_boolean_node (equal
10608				        ? code == EQ_EXPR : code != EQ_EXPR,
10609				        type);
10610	}
10611
10612      /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10613	 a MINUS_EXPR of a constant, we can convert it into a comparison with
10614	 a revised constant as long as no overflow occurs.  */
10615      if (TREE_CODE (arg1) == INTEGER_CST
10616	  && (TREE_CODE (arg0) == PLUS_EXPR
10617	      || TREE_CODE (arg0) == MINUS_EXPR)
10618	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10619	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10620				      ? MINUS_EXPR : PLUS_EXPR,
10621				      fold_convert (TREE_TYPE (arg0), arg1),
10622				      TREE_OPERAND (arg0, 1), 0))
10623	  && ! TREE_CONSTANT_OVERFLOW (tem))
10624	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10625
10626      /* Similarly for a NEGATE_EXPR.  */
10627      if (TREE_CODE (arg0) == NEGATE_EXPR
10628	  && TREE_CODE (arg1) == INTEGER_CST
10629	  && 0 != (tem = negate_expr (arg1))
10630	  && TREE_CODE (tem) == INTEGER_CST
10631	  && ! TREE_CONSTANT_OVERFLOW (tem))
10632	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10633
10634      /* If we have X - Y == 0, we can convert that to X == Y and similarly
10635	 for !=.  Don't do this for ordered comparisons due to overflow.  */
10636      if (TREE_CODE (arg0) == MINUS_EXPR
10637	  && integer_zerop (arg1))
10638	return fold_build2 (code, type,
10639			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10640
10641      /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
10642      if (TREE_CODE (arg0) == ABS_EXPR
10643	  && (integer_zerop (arg1) || real_zerop (arg1)))
10644	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10645
10646      /* If this is an EQ or NE comparison with zero and ARG0 is
10647	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
10648	 two operations, but the latter can be done in one less insn
10649	 on machines that have only two-operand insns or on which a
10650	 constant cannot be the first operand.  */
10651      if (TREE_CODE (arg0) == BIT_AND_EXPR
10652	  && integer_zerop (arg1))
10653	{
10654	  tree arg00 = TREE_OPERAND (arg0, 0);
10655	  tree arg01 = TREE_OPERAND (arg0, 1);
10656	  if (TREE_CODE (arg00) == LSHIFT_EXPR
10657	      && integer_onep (TREE_OPERAND (arg00, 0)))
10658	    {
10659	      tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10660				      arg01, TREE_OPERAND (arg00, 1));
10661	      tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10662				 build_int_cst (TREE_TYPE (arg0), 1));
10663	      return fold_build2 (code, type,
10664				  fold_convert (TREE_TYPE (arg1), tem), arg1);
10665	    }
10666	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
10667		   && integer_onep (TREE_OPERAND (arg01, 0)))
10668	    {
10669	      tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10670				      arg00, TREE_OPERAND (arg01, 1));
10671	      tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10672				 build_int_cst (TREE_TYPE (arg0), 1));
10673	      return fold_build2 (code, type,
10674				  fold_convert (TREE_TYPE (arg1), tem), arg1);
10675	    }
10676	}
10677
10678      /* If this is an NE or EQ comparison of zero against the result of a
10679	 signed MOD operation whose second operand is a power of 2, make
10680	 the MOD operation unsigned since it is simpler and equivalent.  */
10681      if (integer_zerop (arg1)
10682	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10683	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10684	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
10685	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10686	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10687	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10688	{
10689	  tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10690	  tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10691				     fold_convert (newtype,
10692						   TREE_OPERAND (arg0, 0)),
10693				     fold_convert (newtype,
10694						   TREE_OPERAND (arg0, 1)));
10695
10696	  return fold_build2 (code, type, newmod,
10697			      fold_convert (newtype, arg1));
10698	}
10699
10700      /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10701	 C1 is a valid shift constant, and C2 is a power of two, i.e.
10702	 a single bit.  */
10703      if (TREE_CODE (arg0) == BIT_AND_EXPR
10704	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10705	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10706	     == INTEGER_CST
10707	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10708	  && integer_zerop (arg1))
10709	{
10710	  tree itype = TREE_TYPE (arg0);
10711	  unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10712	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10713
10714	  /* Check for a valid shift count.  */
10715	  if (TREE_INT_CST_HIGH (arg001) == 0
10716	      && TREE_INT_CST_LOW (arg001) < prec)
10717	    {
10718	      tree arg01 = TREE_OPERAND (arg0, 1);
10719	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10720	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10721	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10722		 can be rewritten as (X & (C2 << C1)) != 0.  */
10723	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10724		{
10725		  tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10726		  tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10727		  return fold_build2 (code, type, tem, arg1);
10728		}
10729	      /* Otherwise, for signed (arithmetic) shifts,
10730		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10731		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
10732	      else if (!TYPE_UNSIGNED (itype))
10733		return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10734				    arg000, build_int_cst (itype, 0));
10735	      /* Otherwise, of unsigned (logical) shifts,
10736		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10737		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
10738	      else
10739		return omit_one_operand (type,
10740					 code == EQ_EXPR ? integer_one_node
10741							 : integer_zero_node,
10742					 arg000);
10743	    }
10744	}
10745
10746      /* If this is an NE comparison of zero with an AND of one, remove the
10747	 comparison since the AND will give the correct value.  */
10748      if (code == NE_EXPR
10749	  && integer_zerop (arg1)
10750	  && TREE_CODE (arg0) == BIT_AND_EXPR
10751	  && integer_onep (TREE_OPERAND (arg0, 1)))
10752	return fold_convert (type, arg0);
10753
10754      /* If we have (A & C) == C where C is a power of 2, convert this into
10755	 (A & C) != 0.  Similarly for NE_EXPR.  */
10756      if (TREE_CODE (arg0) == BIT_AND_EXPR
10757	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10758	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10759	return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10760			    arg0, fold_convert (TREE_TYPE (arg0),
10761						integer_zero_node));
10762
10763      /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10764	 bit, then fold the expression into A < 0 or A >= 0.  */
10765      tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10766      if (tem)
10767	return tem;
10768
10769      /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10770	 Similarly for NE_EXPR.  */
10771      if (TREE_CODE (arg0) == BIT_AND_EXPR
10772	  && TREE_CODE (arg1) == INTEGER_CST
10773	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10774	{
10775	  tree notc = fold_build1 (BIT_NOT_EXPR,
10776				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
10777				   TREE_OPERAND (arg0, 1));
10778	  tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10779				       arg1, notc);
10780	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10781	  if (integer_nonzerop (dandnotc))
10782	    return omit_one_operand (type, rslt, arg0);
10783	}
10784
10785      /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10786	 Similarly for NE_EXPR.  */
10787      if (TREE_CODE (arg0) == BIT_IOR_EXPR
10788	  && TREE_CODE (arg1) == INTEGER_CST
10789	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10790	{
10791	  tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10792	  tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10793				       TREE_OPERAND (arg0, 1), notd);
10794	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10795	  if (integer_nonzerop (candnotd))
10796	    return omit_one_operand (type, rslt, arg0);
10797	}
10798
10799      /* If this is a comparison of a field, we may be able to simplify it.  */
10800      if (((TREE_CODE (arg0) == COMPONENT_REF
10801	    && lang_hooks.can_use_bit_fields_p ())
10802	   || TREE_CODE (arg0) == BIT_FIELD_REF)
10803	  /* Handle the constant case even without -O
10804	     to make sure the warnings are given.  */
10805	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10806	{
10807	  t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10808	  if (t1)
10809	    return t1;
10810	}
10811
10812      /* Optimize comparisons of strlen vs zero to a compare of the
10813	 first character of the string vs zero.  To wit,
10814		strlen(ptr) == 0   =>  *ptr == 0
10815		strlen(ptr) != 0   =>  *ptr != 0
10816	 Other cases should reduce to one of these two (or a constant)
10817	 due to the return value of strlen being unsigned.  */
10818      if (TREE_CODE (arg0) == CALL_EXPR
10819	  && integer_zerop (arg1))
10820	{
10821	  tree fndecl = get_callee_fndecl (arg0);
10822	  tree arglist;
10823
10824	  if (fndecl
10825	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10826	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10827	      && (arglist = TREE_OPERAND (arg0, 1))
10828	      && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10829	      && ! TREE_CHAIN (arglist))
10830	    {
10831	      tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10832	      return fold_build2 (code, type, iref,
10833				  build_int_cst (TREE_TYPE (iref), 0));
10834	    }
10835	}
10836
10837      /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10838	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
10839      if (TREE_CODE (arg0) == RSHIFT_EXPR
10840	  && integer_zerop (arg1)
10841	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10842	{
10843	  tree arg00 = TREE_OPERAND (arg0, 0);
10844	  tree arg01 = TREE_OPERAND (arg0, 1);
10845	  tree itype = TREE_TYPE (arg00);
10846	  if (TREE_INT_CST_HIGH (arg01) == 0
10847	      && TREE_INT_CST_LOW (arg01)
10848		 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10849	    {
10850	      if (TYPE_UNSIGNED (itype))
10851		{
10852		  itype = lang_hooks.types.signed_type (itype);
10853		  arg00 = fold_convert (itype, arg00);
10854		}
10855	      return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10856				  type, arg00, build_int_cst (itype, 0));
10857	    }
10858	}
10859
10860      /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
10861      if (integer_zerop (arg1)
10862	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
10863	return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10864			    TREE_OPERAND (arg0, 1));
10865
10866      /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
10867      if (TREE_CODE (arg0) == BIT_XOR_EXPR
10868	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10869	return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10870			    build_int_cst (TREE_TYPE (arg1), 0));
10871      /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
10872      if (TREE_CODE (arg0) == BIT_XOR_EXPR
10873	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10874	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10875	return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10876			    build_int_cst (TREE_TYPE (arg1), 0));
10877
10878      /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
10879      if (TREE_CODE (arg0) == BIT_XOR_EXPR
10880	  && TREE_CODE (arg1) == INTEGER_CST
10881	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10882	return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10883			    fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10884					 TREE_OPERAND (arg0, 1), arg1));
10885
10886      /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10887	 (X & C) == 0 when C is a single bit.  */
10888      if (TREE_CODE (arg0) == BIT_AND_EXPR
10889	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10890	  && integer_zerop (arg1)
10891	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10892	{
10893	  tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10894			     TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10895			     TREE_OPERAND (arg0, 1));
10896	  return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10897			      type, tem, arg1);
10898	}
10899
10900      /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10901	 constant C is a power of two, i.e. a single bit.  */
10902      if (TREE_CODE (arg0) == BIT_XOR_EXPR
10903	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10904	  && integer_zerop (arg1)
10905	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10906	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10907			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10908	{
10909	  tree arg00 = TREE_OPERAND (arg0, 0);
10910	  return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10911			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
10912	}
10913
10914      /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10915	 when is C is a power of two, i.e. a single bit.  */
10916      if (TREE_CODE (arg0) == BIT_AND_EXPR
10917	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10918	  && integer_zerop (arg1)
10919	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10920	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10921			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10922	{
10923	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10924	  tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10925			     arg000, TREE_OPERAND (arg0, 1));
10926	  return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10927			      tem, build_int_cst (TREE_TYPE (tem), 0));
10928	}
10929
10930      if (integer_zerop (arg1)
10931	  && tree_expr_nonzero_p (arg0))
10932        {
10933	  tree res = constant_boolean_node (code==NE_EXPR, type);
10934	  return omit_one_operand (type, res, arg0);
10935	}
10936      return NULL_TREE;
10937
10938    case LT_EXPR:
10939    case GT_EXPR:
10940    case LE_EXPR:
10941    case GE_EXPR:
10942      tem = fold_comparison (code, type, op0, op1);
10943      if (tem != NULL_TREE)
10944	return tem;
10945
10946      /* Transform comparisons of the form X +- C CMP X.  */
10947      if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10948	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10949	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10950	       && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10951	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10952		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10953	{
10954	  tree arg01 = TREE_OPERAND (arg0, 1);
10955	  enum tree_code code0 = TREE_CODE (arg0);
10956	  int is_positive;
10957
10958	  if (TREE_CODE (arg01) == REAL_CST)
10959	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10960	  else
10961	    is_positive = tree_int_cst_sgn (arg01);
10962
10963	  /* (X - c) > X becomes false.  */
10964	  if (code == GT_EXPR
10965	      && ((code0 == MINUS_EXPR && is_positive >= 0)
10966		  || (code0 == PLUS_EXPR && is_positive <= 0)))
10967	    {
10968	      if (TREE_CODE (arg01) == INTEGER_CST
10969		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10970		fold_overflow_warning (("assuming signed overflow does not "
10971					"occur when assuming that (X - c) > X "
10972					"is always false"),
10973				       WARN_STRICT_OVERFLOW_ALL);
10974	      return constant_boolean_node (0, type);
10975	    }
10976
10977	  /* Likewise (X + c) < X becomes false.  */
10978	  if (code == LT_EXPR
10979	      && ((code0 == PLUS_EXPR && is_positive >= 0)
10980		  || (code0 == MINUS_EXPR && is_positive <= 0)))
10981	    {
10982	      if (TREE_CODE (arg01) == INTEGER_CST
10983		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10984		fold_overflow_warning (("assuming signed overflow does not "
10985					"occur when assuming that "
10986					"(X + c) < X is always false"),
10987				       WARN_STRICT_OVERFLOW_ALL);
10988	      return constant_boolean_node (0, type);
10989	    }
10990
10991	  /* Convert (X - c) <= X to true.  */
10992	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10993	      && code == LE_EXPR
10994	      && ((code0 == MINUS_EXPR && is_positive >= 0)
10995		  || (code0 == PLUS_EXPR && is_positive <= 0)))
10996	    {
10997	      if (TREE_CODE (arg01) == INTEGER_CST
10998		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10999		fold_overflow_warning (("assuming signed overflow does not "
11000					"occur when assuming that "
11001					"(X - c) <= X is always true"),
11002				       WARN_STRICT_OVERFLOW_ALL);
11003	      return constant_boolean_node (1, type);
11004	    }
11005
11006	  /* Convert (X + c) >= X to true.  */
11007	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11008	      && code == GE_EXPR
11009	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11010		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11011	    {
11012	      if (TREE_CODE (arg01) == INTEGER_CST
11013		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11014		fold_overflow_warning (("assuming signed overflow does not "
11015					"occur when assuming that "
11016					"(X + c) >= X is always true"),
11017				       WARN_STRICT_OVERFLOW_ALL);
11018	      return constant_boolean_node (1, type);
11019	    }
11020
11021	  if (TREE_CODE (arg01) == INTEGER_CST)
11022	    {
11023	      /* Convert X + c > X and X - c < X to true for integers.  */
11024	      if (code == GT_EXPR
11025	          && ((code0 == PLUS_EXPR && is_positive > 0)
11026		      || (code0 == MINUS_EXPR && is_positive < 0)))
11027		{
11028		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11029		    fold_overflow_warning (("assuming signed overflow does "
11030					    "not occur when assuming that "
11031					    "(X + c) > X is always true"),
11032					   WARN_STRICT_OVERFLOW_ALL);
11033		  return constant_boolean_node (1, type);
11034		}
11035
11036	      if (code == LT_EXPR
11037	          && ((code0 == MINUS_EXPR && is_positive > 0)
11038		      || (code0 == PLUS_EXPR && is_positive < 0)))
11039		{
11040		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11041		    fold_overflow_warning (("assuming signed overflow does "
11042					    "not occur when assuming that "
11043					    "(X - c) < X is always true"),
11044					   WARN_STRICT_OVERFLOW_ALL);
11045		  return constant_boolean_node (1, type);
11046		}
11047
11048	      /* Convert X + c <= X and X - c >= X to false for integers.  */
11049	      if (code == LE_EXPR
11050	          && ((code0 == PLUS_EXPR && is_positive > 0)
11051		      || (code0 == MINUS_EXPR && is_positive < 0)))
11052		{
11053		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11054		    fold_overflow_warning (("assuming signed overflow does "
11055					    "not occur when assuming that "
11056					    "(X + c) <= X is always false"),
11057					   WARN_STRICT_OVERFLOW_ALL);
11058		  return constant_boolean_node (0, type);
11059		}
11060
11061	      if (code == GE_EXPR
11062	          && ((code0 == MINUS_EXPR && is_positive > 0)
11063		      || (code0 == PLUS_EXPR && is_positive < 0)))
11064		{
11065		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11066		    fold_overflow_warning (("assuming signed overflow does "
11067					    "not occur when assuming that "
11068					    "(X - c) >= X is always true"),
11069					   WARN_STRICT_OVERFLOW_ALL);
11070		  return constant_boolean_node (0, type);
11071		}
11072	    }
11073	}
11074
11075      /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11076	 This transformation affects the cases which are handled in later
11077	 optimizations involving comparisons with non-negative constants.  */
11078      if (TREE_CODE (arg1) == INTEGER_CST
11079	  && TREE_CODE (arg0) != INTEGER_CST
11080	  && tree_int_cst_sgn (arg1) > 0)
11081	{
11082	  if (code == GE_EXPR)
11083	    {
11084	      arg1 = const_binop (MINUS_EXPR, arg1,
11085			          build_int_cst (TREE_TYPE (arg1), 1), 0);
11086	      return fold_build2 (GT_EXPR, type, arg0,
11087				  fold_convert (TREE_TYPE (arg0), arg1));
11088	    }
11089	  if (code == LT_EXPR)
11090	    {
11091	      arg1 = const_binop (MINUS_EXPR, arg1,
11092			          build_int_cst (TREE_TYPE (arg1), 1), 0);
11093	      return fold_build2 (LE_EXPR, type, arg0,
11094				  fold_convert (TREE_TYPE (arg0), arg1));
11095	    }
11096	}
11097
11098      /* Comparisons with the highest or lowest possible integer of
11099	 the specified size will have known values.  */
11100      {
11101	int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
11102
11103	if (TREE_CODE (arg1) == INTEGER_CST
11104	    && ! TREE_CONSTANT_OVERFLOW (arg1)
11105	    && width <= 2 * HOST_BITS_PER_WIDE_INT
11106	    && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11107		|| POINTER_TYPE_P (TREE_TYPE (arg1))))
11108	  {
11109	    HOST_WIDE_INT signed_max_hi;
11110	    unsigned HOST_WIDE_INT signed_max_lo;
11111	    unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11112
11113	    if (width <= HOST_BITS_PER_WIDE_INT)
11114	      {
11115		signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11116				- 1;
11117		signed_max_hi = 0;
11118		max_hi = 0;
11119
11120		if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11121		  {
11122		    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11123		    min_lo = 0;
11124		    min_hi = 0;
11125		  }
11126		else
11127		  {
11128		    max_lo = signed_max_lo;
11129		    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11130		    min_hi = -1;
11131		  }
11132	      }
11133	    else
11134	      {
11135		width -= HOST_BITS_PER_WIDE_INT;
11136		signed_max_lo = -1;
11137		signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11138				- 1;
11139		max_lo = -1;
11140		min_lo = 0;
11141
11142		if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11143		  {
11144		    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11145		    min_hi = 0;
11146		  }
11147		else
11148		  {
11149		    max_hi = signed_max_hi;
11150		    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11151		  }
11152	      }
11153
11154	    if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11155		&& TREE_INT_CST_LOW (arg1) == max_lo)
11156	      switch (code)
11157		{
11158		case GT_EXPR:
11159		  return omit_one_operand (type, integer_zero_node, arg0);
11160
11161		case GE_EXPR:
11162		  return fold_build2 (EQ_EXPR, type, op0, op1);
11163
11164		case LE_EXPR:
11165		  return omit_one_operand (type, integer_one_node, arg0);
11166
11167		case LT_EXPR:
11168		  return fold_build2 (NE_EXPR, type, op0, op1);
11169
11170		/* The GE_EXPR and LT_EXPR cases above are not normally
11171		   reached because of previous transformations.  */
11172
11173		default:
11174		  break;
11175		}
11176	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11177		     == max_hi
11178		     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11179	      switch (code)
11180		{
11181		case GT_EXPR:
11182		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11183		  return fold_build2 (EQ_EXPR, type,
11184				      fold_convert (TREE_TYPE (arg1), arg0),
11185				      arg1);
11186		case LE_EXPR:
11187		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11188		  return fold_build2 (NE_EXPR, type,
11189				      fold_convert (TREE_TYPE (arg1), arg0),
11190				      arg1);
11191		default:
11192		  break;
11193		}
11194	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11195		     == min_hi
11196		     && TREE_INT_CST_LOW (arg1) == min_lo)
11197	      switch (code)
11198		{
11199		case LT_EXPR:
11200		  return omit_one_operand (type, integer_zero_node, arg0);
11201
11202		case LE_EXPR:
11203		  return fold_build2 (EQ_EXPR, type, op0, op1);
11204
11205		case GE_EXPR:
11206		  return omit_one_operand (type, integer_one_node, arg0);
11207
11208		case GT_EXPR:
11209		  return fold_build2 (NE_EXPR, type, op0, op1);
11210
11211		default:
11212		  break;
11213		}
11214	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11215		     == min_hi
11216		     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11217	      switch (code)
11218		{
11219		case GE_EXPR:
11220		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11221		  return fold_build2 (NE_EXPR, type,
11222				      fold_convert (TREE_TYPE (arg1), arg0),
11223				      arg1);
11224		case LT_EXPR:
11225		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11226		  return fold_build2 (EQ_EXPR, type,
11227				      fold_convert (TREE_TYPE (arg1), arg0),
11228				      arg1);
11229		default:
11230		  break;
11231		}
11232
11233	    else if (!in_gimple_form
11234		     && TREE_INT_CST_HIGH (arg1) == signed_max_hi
11235		     && TREE_INT_CST_LOW (arg1) == signed_max_lo
11236		     && TYPE_UNSIGNED (TREE_TYPE (arg1))
11237		     /* signed_type does not work on pointer types.  */
11238		     && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11239	      {
11240		/* The following case also applies to X < signed_max+1
11241		   and X >= signed_max+1 because previous transformations.  */
11242		if (code == LE_EXPR || code == GT_EXPR)
11243		  {
11244		    tree st;
11245		    st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11246		    return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11247					type, fold_convert (st, arg0),
11248					build_int_cst (st, 0));
11249		  }
11250	      }
11251	  }
11252      }
11253
11254      /* If we are comparing an ABS_EXPR with a constant, we can
11255	 convert all the cases into explicit comparisons, but they may
11256	 well not be faster than doing the ABS and one comparison.
11257	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11258	 and a comparison, and is probably faster.  */
11259      if (code == LE_EXPR
11260	  && TREE_CODE (arg1) == INTEGER_CST
11261	  && TREE_CODE (arg0) == ABS_EXPR
11262	  && ! TREE_SIDE_EFFECTS (arg0)
11263	  && (0 != (tem = negate_expr (arg1)))
11264	  && TREE_CODE (tem) == INTEGER_CST
11265	  && ! TREE_CONSTANT_OVERFLOW (tem))
11266	return fold_build2 (TRUTH_ANDIF_EXPR, type,
11267			    build2 (GE_EXPR, type,
11268				    TREE_OPERAND (arg0, 0), tem),
11269			    build2 (LE_EXPR, type,
11270				    TREE_OPERAND (arg0, 0), arg1));
11271
11272      /* Convert ABS_EXPR<x> >= 0 to true.  */
11273      strict_overflow_p = false;
11274      if (code == GE_EXPR
11275	  && (integer_zerop (arg1)
11276	      || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11277		  && real_zerop (arg1)))
11278	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11279	{
11280	  if (strict_overflow_p)
11281	    fold_overflow_warning (("assuming signed overflow does not occur "
11282				    "when simplifying comparison of "
11283				    "absolute value and zero"),
11284				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11285	  return omit_one_operand (type, integer_one_node, arg0);
11286	}
11287
11288      /* Convert ABS_EXPR<x> < 0 to false.  */
11289      strict_overflow_p = false;
11290      if (code == LT_EXPR
11291	  && (integer_zerop (arg1) || real_zerop (arg1))
11292	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11293	{
11294	  if (strict_overflow_p)
11295	    fold_overflow_warning (("assuming signed overflow does not occur "
11296				    "when simplifying comparison of "
11297				    "absolute value and zero"),
11298				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11299	  return omit_one_operand (type, integer_zero_node, arg0);
11300	}
11301
11302      /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11303	 and similarly for >= into !=.  */
11304      if ((code == LT_EXPR || code == GE_EXPR)
11305	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11306	  && TREE_CODE (arg1) == LSHIFT_EXPR
11307	  && integer_onep (TREE_OPERAND (arg1, 0)))
11308	return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11309		       build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11310			       TREE_OPERAND (arg1, 1)),
11311		       build_int_cst (TREE_TYPE (arg0), 0));
11312
11313      if ((code == LT_EXPR || code == GE_EXPR)
11314	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11315	  && (TREE_CODE (arg1) == NOP_EXPR
11316	      || TREE_CODE (arg1) == CONVERT_EXPR)
11317	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11318	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11319	return
11320	  build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11321		  fold_convert (TREE_TYPE (arg0),
11322				build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11323					TREE_OPERAND (TREE_OPERAND (arg1, 0),
11324						      1))),
11325		  build_int_cst (TREE_TYPE (arg0), 0));
11326
11327      return NULL_TREE;
11328
11329    case UNORDERED_EXPR:
11330    case ORDERED_EXPR:
11331    case UNLT_EXPR:
11332    case UNLE_EXPR:
11333    case UNGT_EXPR:
11334    case UNGE_EXPR:
11335    case UNEQ_EXPR:
11336    case LTGT_EXPR:
11337      if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11338	{
11339	  t1 = fold_relational_const (code, type, arg0, arg1);
11340	  if (t1 != NULL_TREE)
11341	    return t1;
11342	}
11343
11344      /* If the first operand is NaN, the result is constant.  */
11345      if (TREE_CODE (arg0) == REAL_CST
11346	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11347	  && (code != LTGT_EXPR || ! flag_trapping_math))
11348	{
11349	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11350	       ? integer_zero_node
11351	       : integer_one_node;
11352	  return omit_one_operand (type, t1, arg1);
11353	}
11354
11355      /* If the second operand is NaN, the result is constant.  */
11356      if (TREE_CODE (arg1) == REAL_CST
11357	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11358	  && (code != LTGT_EXPR || ! flag_trapping_math))
11359	{
11360	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11361	       ? integer_zero_node
11362	       : integer_one_node;
11363	  return omit_one_operand (type, t1, arg0);
11364	}
11365
11366      /* Simplify unordered comparison of something with itself.  */
11367      if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11368	  && operand_equal_p (arg0, arg1, 0))
11369	return constant_boolean_node (1, type);
11370
11371      if (code == LTGT_EXPR
11372	  && !flag_trapping_math
11373	  && operand_equal_p (arg0, arg1, 0))
11374	return constant_boolean_node (0, type);
11375
11376      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11377      {
11378	tree targ0 = strip_float_extensions (arg0);
11379	tree targ1 = strip_float_extensions (arg1);
11380	tree newtype = TREE_TYPE (targ0);
11381
11382	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11383	  newtype = TREE_TYPE (targ1);
11384
11385	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11386	  return fold_build2 (code, type, fold_convert (newtype, targ0),
11387			      fold_convert (newtype, targ1));
11388      }
11389
11390      return NULL_TREE;
11391
11392    case COMPOUND_EXPR:
11393      /* When pedantic, a compound expression can be neither an lvalue
11394	 nor an integer constant expression.  */
11395      if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11396	return NULL_TREE;
11397      /* Don't let (0, 0) be null pointer constant.  */
11398      tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11399				 : fold_convert (type, arg1);
11400      return pedantic_non_lvalue (tem);
11401
11402    case COMPLEX_EXPR:
11403      if ((TREE_CODE (arg0) == REAL_CST
11404	   && TREE_CODE (arg1) == REAL_CST)
11405	  || (TREE_CODE (arg0) == INTEGER_CST
11406	      && TREE_CODE (arg1) == INTEGER_CST))
11407	return build_complex (type, arg0, arg1);
11408      return NULL_TREE;
11409
11410    case ASSERT_EXPR:
11411      /* An ASSERT_EXPR should never be passed to fold_binary.  */
11412      gcc_unreachable ();
11413
11414    default:
11415      return NULL_TREE;
11416    } /* switch (code) */
11417}
11418
11419/* Callback for walk_tree, looking for LABEL_EXPR.
11420   Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11421   Do not check the sub-tree of GOTO_EXPR.  */
11422
11423static tree
11424contains_label_1 (tree *tp,
11425                  int *walk_subtrees,
11426                  void *data ATTRIBUTE_UNUSED)
11427{
11428  switch (TREE_CODE (*tp))
11429    {
11430    case LABEL_EXPR:
11431      return *tp;
11432    case GOTO_EXPR:
11433      *walk_subtrees = 0;
11434    /* no break */
11435    default:
11436      return NULL_TREE;
11437    }
11438}
11439
11440/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11441   accessible from outside the sub-tree. Returns NULL_TREE if no
11442   addressable label is found.  */
11443
11444static bool
11445contains_label_p (tree st)
11446{
11447  return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11448}
11449
11450/* Fold a ternary expression of code CODE and type TYPE with operands
11451   OP0, OP1, and OP2.  Return the folded expression if folding is
11452   successful.  Otherwise, return NULL_TREE.  */
11453
11454tree
11455fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11456{
11457  tree tem;
11458  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11459  enum tree_code_class kind = TREE_CODE_CLASS (code);
11460
11461  gcc_assert (IS_EXPR_CODE_CLASS (kind)
11462	      && TREE_CODE_LENGTH (code) == 3);
11463
11464  /* Strip any conversions that don't change the mode.  This is safe
11465     for every expression, except for a comparison expression because
11466     its signedness is derived from its operands.  So, in the latter
11467     case, only strip conversions that don't change the signedness.
11468
11469     Note that this is done as an internal manipulation within the
11470     constant folder, in order to find the simplest representation of
11471     the arguments so that their form can be studied.  In any cases,
11472     the appropriate type conversions should be put back in the tree
11473     that will get out of the constant folder.  */
11474  if (op0)
11475    {
11476      arg0 = op0;
11477      STRIP_NOPS (arg0);
11478    }
11479
11480  if (op1)
11481    {
11482      arg1 = op1;
11483      STRIP_NOPS (arg1);
11484    }
11485
11486  switch (code)
11487    {
11488    case COMPONENT_REF:
11489      if (TREE_CODE (arg0) == CONSTRUCTOR
11490	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11491	{
11492	  unsigned HOST_WIDE_INT idx;
11493	  tree field, value;
11494	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11495	    if (field == arg1)
11496	      return value;
11497	}
11498      return NULL_TREE;
11499
11500    case COND_EXPR:
11501      /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11502	 so all simple results must be passed through pedantic_non_lvalue.  */
11503      if (TREE_CODE (arg0) == INTEGER_CST)
11504	{
11505	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
11506	  tem = integer_zerop (arg0) ? op2 : op1;
11507	  /* Only optimize constant conditions when the selected branch
11508	     has the same type as the COND_EXPR.  This avoids optimizing
11509             away "c ? x : throw", where the throw has a void type.
11510             Avoid throwing away that operand which contains label.  */
11511          if ((!TREE_SIDE_EFFECTS (unused_op)
11512               || !contains_label_p (unused_op))
11513              && (! VOID_TYPE_P (TREE_TYPE (tem))
11514                  || VOID_TYPE_P (type)))
11515	    return pedantic_non_lvalue (tem);
11516	  return NULL_TREE;
11517	}
11518      if (operand_equal_p (arg1, op2, 0))
11519	return pedantic_omit_one_operand (type, arg1, arg0);
11520
11521      /* If we have A op B ? A : C, we may be able to convert this to a
11522	 simpler expression, depending on the operation and the values
11523	 of B and C.  Signed zeros prevent all of these transformations,
11524	 for reasons given above each one.
11525
11526         Also try swapping the arguments and inverting the conditional.  */
11527      if (COMPARISON_CLASS_P (arg0)
11528	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11529					     arg1, TREE_OPERAND (arg0, 1))
11530	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11531	{
11532	  tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11533	  if (tem)
11534	    return tem;
11535	}
11536
11537      if (COMPARISON_CLASS_P (arg0)
11538	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11539					     op2,
11540					     TREE_OPERAND (arg0, 1))
11541	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11542	{
11543	  tem = fold_truth_not_expr (arg0);
11544	  if (tem && COMPARISON_CLASS_P (tem))
11545	    {
11546	      tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11547	      if (tem)
11548		return tem;
11549	    }
11550	}
11551
11552      /* If the second operand is simpler than the third, swap them
11553	 since that produces better jump optimization results.  */
11554      if (truth_value_p (TREE_CODE (arg0))
11555	  && tree_swap_operands_p (op1, op2, false))
11556	{
11557	  /* See if this can be inverted.  If it can't, possibly because
11558	     it was a floating-point inequality comparison, don't do
11559	     anything.  */
11560	  tem = fold_truth_not_expr (arg0);
11561	  if (tem)
11562	    return fold_build3 (code, type, tem, op2, op1);
11563	}
11564
11565      /* Convert A ? 1 : 0 to simply A.  */
11566      if (integer_onep (op1)
11567	  && integer_zerop (op2)
11568	  /* If we try to convert OP0 to our type, the
11569	     call to fold will try to move the conversion inside
11570	     a COND, which will recurse.  In that case, the COND_EXPR
11571	     is probably the best choice, so leave it alone.  */
11572	  && type == TREE_TYPE (arg0))
11573	return pedantic_non_lvalue (arg0);
11574
11575      /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
11576	 over COND_EXPR in cases such as floating point comparisons.  */
11577      if (integer_zerop (op1)
11578	  && integer_onep (op2)
11579	  && truth_value_p (TREE_CODE (arg0)))
11580	return pedantic_non_lvalue (fold_convert (type,
11581						  invert_truthvalue (arg0)));
11582
11583      /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
11584      if (TREE_CODE (arg0) == LT_EXPR
11585	  && integer_zerop (TREE_OPERAND (arg0, 1))
11586	  && integer_zerop (op2)
11587	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11588	{
11589	  /* sign_bit_p only checks ARG1 bits within A's precision.
11590	     If <sign bit of A> has wider type than A, bits outside
11591	     of A's precision in <sign bit of A> need to be checked.
11592	     If they are all 0, this optimization needs to be done
11593	     in unsigned A's type, if they are all 1 in signed A's type,
11594	     otherwise this can't be done.  */
11595	  if (TYPE_PRECISION (TREE_TYPE (tem))
11596	      < TYPE_PRECISION (TREE_TYPE (arg1))
11597	      && TYPE_PRECISION (TREE_TYPE (tem))
11598		 < TYPE_PRECISION (type))
11599	    {
11600	      unsigned HOST_WIDE_INT mask_lo;
11601	      HOST_WIDE_INT mask_hi;
11602	      int inner_width, outer_width;
11603	      tree tem_type;
11604
11605	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11606	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11607	      if (outer_width > TYPE_PRECISION (type))
11608		outer_width = TYPE_PRECISION (type);
11609
11610	      if (outer_width > HOST_BITS_PER_WIDE_INT)
11611		{
11612		  mask_hi = ((unsigned HOST_WIDE_INT) -1
11613			     >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11614		  mask_lo = -1;
11615		}
11616	      else
11617		{
11618		  mask_hi = 0;
11619		  mask_lo = ((unsigned HOST_WIDE_INT) -1
11620			     >> (HOST_BITS_PER_WIDE_INT - outer_width));
11621		}
11622	      if (inner_width > HOST_BITS_PER_WIDE_INT)
11623		{
11624		  mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11625			       >> (HOST_BITS_PER_WIDE_INT - inner_width));
11626		  mask_lo = 0;
11627		}
11628	      else
11629		mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11630			     >> (HOST_BITS_PER_WIDE_INT - inner_width));
11631
11632	      if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11633		  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11634		{
11635		  tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11636		  tem = fold_convert (tem_type, tem);
11637		}
11638	      else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11639		       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11640		{
11641		  tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11642		  tem = fold_convert (tem_type, tem);
11643		}
11644	      else
11645		tem = NULL;
11646	    }
11647
11648	  if (tem)
11649	    return fold_convert (type,
11650				 fold_build2 (BIT_AND_EXPR,
11651					      TREE_TYPE (tem), tem,
11652					      fold_convert (TREE_TYPE (tem),
11653							    arg1)));
11654	}
11655
11656      /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
11657	 already handled above.  */
11658      if (TREE_CODE (arg0) == BIT_AND_EXPR
11659	  && integer_onep (TREE_OPERAND (arg0, 1))
11660	  && integer_zerop (op2)
11661	  && integer_pow2p (arg1))
11662	{
11663	  tree tem = TREE_OPERAND (arg0, 0);
11664	  STRIP_NOPS (tem);
11665	  if (TREE_CODE (tem) == RSHIFT_EXPR
11666              && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11667              && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11668	         TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11669	    return fold_build2 (BIT_AND_EXPR, type,
11670				TREE_OPERAND (tem, 0), arg1);
11671	}
11672
11673      /* A & N ? N : 0 is simply A & N if N is a power of two.  This
11674	 is probably obsolete because the first operand should be a
11675	 truth value (that's why we have the two cases above), but let's
11676	 leave it in until we can confirm this for all front-ends.  */
11677      if (integer_zerop (op2)
11678	  && TREE_CODE (arg0) == NE_EXPR
11679	  && integer_zerop (TREE_OPERAND (arg0, 1))
11680	  && integer_pow2p (arg1)
11681	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11682	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11683			      arg1, OEP_ONLY_CONST))
11684	return pedantic_non_lvalue (fold_convert (type,
11685						  TREE_OPERAND (arg0, 0)));
11686
11687      /* Convert A ? B : 0 into A && B if A and B are truth values.  */
11688      if (integer_zerop (op2)
11689	  && truth_value_p (TREE_CODE (arg0))
11690	  && truth_value_p (TREE_CODE (arg1)))
11691	return fold_build2 (TRUTH_ANDIF_EXPR, type,
11692			    fold_convert (type, arg0),
11693			    arg1);
11694
11695      /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
11696      if (integer_onep (op2)
11697	  && truth_value_p (TREE_CODE (arg0))
11698	  && truth_value_p (TREE_CODE (arg1)))
11699	{
11700	  /* Only perform transformation if ARG0 is easily inverted.  */
11701	  tem = fold_truth_not_expr (arg0);
11702	  if (tem)
11703	    return fold_build2 (TRUTH_ORIF_EXPR, type,
11704				fold_convert (type, tem),
11705				arg1);
11706	}
11707
11708      /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
11709      if (integer_zerop (arg1)
11710	  && truth_value_p (TREE_CODE (arg0))
11711	  && truth_value_p (TREE_CODE (op2)))
11712	{
11713	  /* Only perform transformation if ARG0 is easily inverted.  */
11714	  tem = fold_truth_not_expr (arg0);
11715	  if (tem)
11716	    return fold_build2 (TRUTH_ANDIF_EXPR, type,
11717				fold_convert (type, tem),
11718				op2);
11719	}
11720
11721      /* Convert A ? 1 : B into A || B if A and B are truth values.  */
11722      if (integer_onep (arg1)
11723	  && truth_value_p (TREE_CODE (arg0))
11724	  && truth_value_p (TREE_CODE (op2)))
11725	return fold_build2 (TRUTH_ORIF_EXPR, type,
11726			    fold_convert (type, arg0),
11727			    op2);
11728
11729      return NULL_TREE;
11730
11731    case CALL_EXPR:
11732      /* Check for a built-in function.  */
11733      if (TREE_CODE (op0) == ADDR_EXPR
11734	  && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11735	  && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11736	return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11737      return NULL_TREE;
11738
11739    case BIT_FIELD_REF:
11740      if (TREE_CODE (arg0) == VECTOR_CST
11741	  && type == TREE_TYPE (TREE_TYPE (arg0))
11742	  && host_integerp (arg1, 1)
11743	  && host_integerp (op2, 1))
11744	{
11745	  unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11746	  unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11747
11748	  if (width != 0
11749	      && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11750	      && (idx % width) == 0
11751	      && (idx = idx / width)
11752		 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11753	    {
11754	      tree elements = TREE_VECTOR_CST_ELTS (arg0);
11755	      while (idx-- > 0 && elements)
11756		elements = TREE_CHAIN (elements);
11757	      if (elements)
11758		return TREE_VALUE (elements);
11759	      else
11760		return fold_convert (type, integer_zero_node);
11761	    }
11762	}
11763      return NULL_TREE;
11764
11765    default:
11766      return NULL_TREE;
11767    } /* switch (code) */
11768}
11769
11770/* Perform constant folding and related simplification of EXPR.
11771   The related simplifications include x*1 => x, x*0 => 0, etc.,
11772   and application of the associative law.
11773   NOP_EXPR conversions may be removed freely (as long as we
11774   are careful not to change the type of the overall expression).
11775   We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11776   but we can constant-fold them if they have constant operands.  */
11777
11778#ifdef ENABLE_FOLD_CHECKING
11779# define fold(x) fold_1 (x)
11780static tree fold_1 (tree);
11781static
11782#endif
11783tree
11784fold (tree expr)
11785{
11786  const tree t = expr;
11787  enum tree_code code = TREE_CODE (t);
11788  enum tree_code_class kind = TREE_CODE_CLASS (code);
11789  tree tem;
11790
11791  /* Return right away if a constant.  */
11792  if (kind == tcc_constant)
11793    return t;
11794
11795  if (IS_EXPR_CODE_CLASS (kind))
11796    {
11797      tree type = TREE_TYPE (t);
11798      tree op0, op1, op2;
11799
11800      switch (TREE_CODE_LENGTH (code))
11801	{
11802	case 1:
11803	  op0 = TREE_OPERAND (t, 0);
11804	  tem = fold_unary (code, type, op0);
11805	  return tem ? tem : expr;
11806	case 2:
11807	  op0 = TREE_OPERAND (t, 0);
11808	  op1 = TREE_OPERAND (t, 1);
11809	  tem = fold_binary (code, type, op0, op1);
11810	  return tem ? tem : expr;
11811	case 3:
11812	  op0 = TREE_OPERAND (t, 0);
11813	  op1 = TREE_OPERAND (t, 1);
11814	  op2 = TREE_OPERAND (t, 2);
11815	  tem = fold_ternary (code, type, op0, op1, op2);
11816	  return tem ? tem : expr;
11817	default:
11818	  break;
11819	}
11820    }
11821
11822  switch (code)
11823    {
11824    case CONST_DECL:
11825      return fold (DECL_INITIAL (t));
11826
11827    default:
11828      return t;
11829    } /* switch (code) */
11830}
11831
11832#ifdef ENABLE_FOLD_CHECKING
11833#undef fold
11834
11835static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11836static void fold_check_failed (tree, tree);
11837void print_fold_checksum (tree);
11838
11839/* When --enable-checking=fold, compute a digest of expr before
11840   and after actual fold call to see if fold did not accidentally
11841   change original expr.  */
11842
11843tree
11844fold (tree expr)
11845{
11846  tree ret;
11847  struct md5_ctx ctx;
11848  unsigned char checksum_before[16], checksum_after[16];
11849  htab_t ht;
11850
11851  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11852  md5_init_ctx (&ctx);
11853  fold_checksum_tree (expr, &ctx, ht);
11854  md5_finish_ctx (&ctx, checksum_before);
11855  htab_empty (ht);
11856
11857  ret = fold_1 (expr);
11858
11859  md5_init_ctx (&ctx);
11860  fold_checksum_tree (expr, &ctx, ht);
11861  md5_finish_ctx (&ctx, checksum_after);
11862  htab_delete (ht);
11863
11864  if (memcmp (checksum_before, checksum_after, 16))
11865    fold_check_failed (expr, ret);
11866
11867  return ret;
11868}
11869
11870void
11871print_fold_checksum (tree expr)
11872{
11873  struct md5_ctx ctx;
11874  unsigned char checksum[16], cnt;
11875  htab_t ht;
11876
11877  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11878  md5_init_ctx (&ctx);
11879  fold_checksum_tree (expr, &ctx, ht);
11880  md5_finish_ctx (&ctx, checksum);
11881  htab_delete (ht);
11882  for (cnt = 0; cnt < 16; ++cnt)
11883    fprintf (stderr, "%02x", checksum[cnt]);
11884  putc ('\n', stderr);
11885}
11886
11887static void
11888fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11889{
11890  internal_error ("fold check: original tree changed by fold");
11891}
11892
11893static void
11894fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11895{
11896  void **slot;
11897  enum tree_code code;
11898  struct tree_function_decl buf;
11899  int i, len;
11900
11901recursive_label:
11902
11903  gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11904	       <= sizeof (struct tree_function_decl))
11905	      && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11906  if (expr == NULL)
11907    return;
11908  slot = htab_find_slot (ht, expr, INSERT);
11909  if (*slot != NULL)
11910    return;
11911  *slot = expr;
11912  code = TREE_CODE (expr);
11913  if (TREE_CODE_CLASS (code) == tcc_declaration
11914      && DECL_ASSEMBLER_NAME_SET_P (expr))
11915    {
11916      /* Allow DECL_ASSEMBLER_NAME to be modified.  */
11917      memcpy ((char *) &buf, expr, tree_size (expr));
11918      expr = (tree) &buf;
11919      SET_DECL_ASSEMBLER_NAME (expr, NULL);
11920    }
11921  else if (TREE_CODE_CLASS (code) == tcc_type
11922	   && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11923	       || TYPE_CACHED_VALUES_P (expr)
11924	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11925    {
11926      /* Allow these fields to be modified.  */
11927      memcpy ((char *) &buf, expr, tree_size (expr));
11928      expr = (tree) &buf;
11929      TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11930      TYPE_POINTER_TO (expr) = NULL;
11931      TYPE_REFERENCE_TO (expr) = NULL;
11932      if (TYPE_CACHED_VALUES_P (expr))
11933	{
11934	  TYPE_CACHED_VALUES_P (expr) = 0;
11935	  TYPE_CACHED_VALUES (expr) = NULL;
11936	}
11937    }
11938  md5_process_bytes (expr, tree_size (expr), ctx);
11939  fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11940  if (TREE_CODE_CLASS (code) != tcc_type
11941      && TREE_CODE_CLASS (code) != tcc_declaration
11942      && code != TREE_LIST)
11943    fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11944  switch (TREE_CODE_CLASS (code))
11945    {
11946    case tcc_constant:
11947      switch (code)
11948	{
11949	case STRING_CST:
11950	  md5_process_bytes (TREE_STRING_POINTER (expr),
11951			     TREE_STRING_LENGTH (expr), ctx);
11952	  break;
11953	case COMPLEX_CST:
11954	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11955	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11956	  break;
11957	case VECTOR_CST:
11958	  fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11959	  break;
11960	default:
11961	  break;
11962	}
11963      break;
11964    case tcc_exceptional:
11965      switch (code)
11966	{
11967	case TREE_LIST:
11968	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11969	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11970	  expr = TREE_CHAIN (expr);
11971	  goto recursive_label;
11972	  break;
11973	case TREE_VEC:
11974	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11975	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11976	  break;
11977	default:
11978	  break;
11979	}
11980      break;
11981    case tcc_expression:
11982    case tcc_reference:
11983    case tcc_comparison:
11984    case tcc_unary:
11985    case tcc_binary:
11986    case tcc_statement:
11987      len = TREE_CODE_LENGTH (code);
11988      for (i = 0; i < len; ++i)
11989	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11990      break;
11991    case tcc_declaration:
11992      fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11993      fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11994      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11995	{
11996	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11997	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11998	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11999	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12000	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12001	}
12002      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12003	fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12004
12005      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12006	{
12007	  fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12008	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12009	  fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12010	}
12011      break;
12012    case tcc_type:
12013      if (TREE_CODE (expr) == ENUMERAL_TYPE)
12014        fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12015      fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12016      fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12017      fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12018      fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12019      if (INTEGRAL_TYPE_P (expr)
12020          || SCALAR_FLOAT_TYPE_P (expr))
12021	{
12022	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12023	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12024	}
12025      fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12026      if (TREE_CODE (expr) == RECORD_TYPE
12027	  || TREE_CODE (expr) == UNION_TYPE
12028	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
12029	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12030      fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12031      break;
12032    default:
12033      break;
12034    }
12035}
12036
12037#endif
12038
12039/* Fold a unary tree expression with code CODE of type TYPE with an
12040   operand OP0.  Return a folded expression if successful.  Otherwise,
12041   return a tree expression with code CODE of type TYPE with an
12042   operand OP0.  */
12043
12044tree
12045fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12046{
12047  tree tem;
12048#ifdef ENABLE_FOLD_CHECKING
12049  unsigned char checksum_before[16], checksum_after[16];
12050  struct md5_ctx ctx;
12051  htab_t ht;
12052
12053  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12054  md5_init_ctx (&ctx);
12055  fold_checksum_tree (op0, &ctx, ht);
12056  md5_finish_ctx (&ctx, checksum_before);
12057  htab_empty (ht);
12058#endif
12059
12060  tem = fold_unary (code, type, op0);
12061  if (!tem)
12062    tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12063
12064#ifdef ENABLE_FOLD_CHECKING
12065  md5_init_ctx (&ctx);
12066  fold_checksum_tree (op0, &ctx, ht);
12067  md5_finish_ctx (&ctx, checksum_after);
12068  htab_delete (ht);
12069
12070  if (memcmp (checksum_before, checksum_after, 16))
12071    fold_check_failed (op0, tem);
12072#endif
12073  return tem;
12074}
12075
12076/* Fold a binary tree expression with code CODE of type TYPE with
12077   operands OP0 and OP1.  Return a folded expression if successful.
12078   Otherwise, return a tree expression with code CODE of type TYPE
12079   with operands OP0 and OP1.  */
12080
12081tree
12082fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12083		  MEM_STAT_DECL)
12084{
12085  tree tem;
12086#ifdef ENABLE_FOLD_CHECKING
12087  unsigned char checksum_before_op0[16],
12088                checksum_before_op1[16],
12089		checksum_after_op0[16],
12090		checksum_after_op1[16];
12091  struct md5_ctx ctx;
12092  htab_t ht;
12093
12094  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12095  md5_init_ctx (&ctx);
12096  fold_checksum_tree (op0, &ctx, ht);
12097  md5_finish_ctx (&ctx, checksum_before_op0);
12098  htab_empty (ht);
12099
12100  md5_init_ctx (&ctx);
12101  fold_checksum_tree (op1, &ctx, ht);
12102  md5_finish_ctx (&ctx, checksum_before_op1);
12103  htab_empty (ht);
12104#endif
12105
12106  tem = fold_binary (code, type, op0, op1);
12107  if (!tem)
12108    tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12109
12110#ifdef ENABLE_FOLD_CHECKING
12111  md5_init_ctx (&ctx);
12112  fold_checksum_tree (op0, &ctx, ht);
12113  md5_finish_ctx (&ctx, checksum_after_op0);
12114  htab_empty (ht);
12115
12116  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12117    fold_check_failed (op0, tem);
12118
12119  md5_init_ctx (&ctx);
12120  fold_checksum_tree (op1, &ctx, ht);
12121  md5_finish_ctx (&ctx, checksum_after_op1);
12122  htab_delete (ht);
12123
12124  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12125    fold_check_failed (op1, tem);
12126#endif
12127  return tem;
12128}
12129
12130/* Fold a ternary tree expression with code CODE of type TYPE with
12131   operands OP0, OP1, and OP2.  Return a folded expression if
12132   successful.  Otherwise, return a tree expression with code CODE of
12133   type TYPE with operands OP0, OP1, and OP2.  */
12134
12135tree
12136fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12137	     MEM_STAT_DECL)
12138{
12139  tree tem;
12140#ifdef ENABLE_FOLD_CHECKING
12141  unsigned char checksum_before_op0[16],
12142                checksum_before_op1[16],
12143                checksum_before_op2[16],
12144		checksum_after_op0[16],
12145		checksum_after_op1[16],
12146		checksum_after_op2[16];
12147  struct md5_ctx ctx;
12148  htab_t ht;
12149
12150  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12151  md5_init_ctx (&ctx);
12152  fold_checksum_tree (op0, &ctx, ht);
12153  md5_finish_ctx (&ctx, checksum_before_op0);
12154  htab_empty (ht);
12155
12156  md5_init_ctx (&ctx);
12157  fold_checksum_tree (op1, &ctx, ht);
12158  md5_finish_ctx (&ctx, checksum_before_op1);
12159  htab_empty (ht);
12160
12161  md5_init_ctx (&ctx);
12162  fold_checksum_tree (op2, &ctx, ht);
12163  md5_finish_ctx (&ctx, checksum_before_op2);
12164  htab_empty (ht);
12165#endif
12166
12167  tem = fold_ternary (code, type, op0, op1, op2);
12168  if (!tem)
12169    tem =  build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12170
12171#ifdef ENABLE_FOLD_CHECKING
12172  md5_init_ctx (&ctx);
12173  fold_checksum_tree (op0, &ctx, ht);
12174  md5_finish_ctx (&ctx, checksum_after_op0);
12175  htab_empty (ht);
12176
12177  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12178    fold_check_failed (op0, tem);
12179
12180  md5_init_ctx (&ctx);
12181  fold_checksum_tree (op1, &ctx, ht);
12182  md5_finish_ctx (&ctx, checksum_after_op1);
12183  htab_empty (ht);
12184
12185  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12186    fold_check_failed (op1, tem);
12187
12188  md5_init_ctx (&ctx);
12189  fold_checksum_tree (op2, &ctx, ht);
12190  md5_finish_ctx (&ctx, checksum_after_op2);
12191  htab_delete (ht);
12192
12193  if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12194    fold_check_failed (op2, tem);
12195#endif
12196  return tem;
12197}
12198
12199/* Perform constant folding and related simplification of initializer
12200   expression EXPR.  These behave identically to "fold_buildN" but ignore
12201   potential run-time traps and exceptions that fold must preserve.  */
12202
12203#define START_FOLD_INIT \
12204  int saved_signaling_nans = flag_signaling_nans;\
12205  int saved_trapping_math = flag_trapping_math;\
12206  int saved_rounding_math = flag_rounding_math;\
12207  int saved_trapv = flag_trapv;\
12208  int saved_folding_initializer = folding_initializer;\
12209  flag_signaling_nans = 0;\
12210  flag_trapping_math = 0;\
12211  flag_rounding_math = 0;\
12212  flag_trapv = 0;\
12213  folding_initializer = 1;
12214
12215#define END_FOLD_INIT \
12216  flag_signaling_nans = saved_signaling_nans;\
12217  flag_trapping_math = saved_trapping_math;\
12218  flag_rounding_math = saved_rounding_math;\
12219  flag_trapv = saved_trapv;\
12220  folding_initializer = saved_folding_initializer;
12221
12222tree
12223fold_build1_initializer (enum tree_code code, tree type, tree op)
12224{
12225  tree result;
12226  START_FOLD_INIT;
12227
12228  result = fold_build1 (code, type, op);
12229
12230  END_FOLD_INIT;
12231  return result;
12232}
12233
12234tree
12235fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12236{
12237  tree result;
12238  START_FOLD_INIT;
12239
12240  result = fold_build2 (code, type, op0, op1);
12241
12242  END_FOLD_INIT;
12243  return result;
12244}
12245
12246tree
12247fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12248			 tree op2)
12249{
12250  tree result;
12251  START_FOLD_INIT;
12252
12253  result = fold_build3 (code, type, op0, op1, op2);
12254
12255  END_FOLD_INIT;
12256  return result;
12257}
12258
12259#undef START_FOLD_INIT
12260#undef END_FOLD_INIT
12261
12262/* Determine if first argument is a multiple of second argument.  Return 0 if
12263   it is not, or we cannot easily determined it to be.
12264
12265   An example of the sort of thing we care about (at this point; this routine
12266   could surely be made more general, and expanded to do what the *_DIV_EXPR's
12267   fold cases do now) is discovering that
12268
12269     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12270
12271   is a multiple of
12272
12273     SAVE_EXPR (J * 8)
12274
12275   when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12276
12277   This code also handles discovering that
12278
12279     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12280
12281   is a multiple of 8 so we don't have to worry about dealing with a
12282   possible remainder.
12283
12284   Note that we *look* inside a SAVE_EXPR only to determine how it was
12285   calculated; it is not safe for fold to do much of anything else with the
12286   internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12287   at run time.  For example, the latter example above *cannot* be implemented
12288   as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12289   evaluation time of the original SAVE_EXPR is not necessarily the same at
12290   the time the new expression is evaluated.  The only optimization of this
12291   sort that would be valid is changing
12292
12293     SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12294
12295   divided by 8 to
12296
12297     SAVE_EXPR (I) * SAVE_EXPR (J)
12298
12299   (where the same SAVE_EXPR (J) is used in the original and the
12300   transformed version).  */
12301
12302static int
12303multiple_of_p (tree type, tree top, tree bottom)
12304{
12305  if (operand_equal_p (top, bottom, 0))
12306    return 1;
12307
12308  if (TREE_CODE (type) != INTEGER_TYPE)
12309    return 0;
12310
12311  switch (TREE_CODE (top))
12312    {
12313    case BIT_AND_EXPR:
12314      /* Bitwise and provides a power of two multiple.  If the mask is
12315	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
12316      if (!integer_pow2p (bottom))
12317	return 0;
12318      /* FALLTHRU */
12319
12320    case MULT_EXPR:
12321      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12322	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12323
12324    case PLUS_EXPR:
12325    case MINUS_EXPR:
12326      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12327	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12328
12329    case LSHIFT_EXPR:
12330      if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12331	{
12332	  tree op1, t1;
12333
12334	  op1 = TREE_OPERAND (top, 1);
12335	  /* const_binop may not detect overflow correctly,
12336	     so check for it explicitly here.  */
12337	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12338	      > TREE_INT_CST_LOW (op1)
12339	      && TREE_INT_CST_HIGH (op1) == 0
12340	      && 0 != (t1 = fold_convert (type,
12341					  const_binop (LSHIFT_EXPR,
12342						       size_one_node,
12343						       op1, 0)))
12344	      && ! TREE_OVERFLOW (t1))
12345	    return multiple_of_p (type, t1, bottom);
12346	}
12347      return 0;
12348
12349    case NOP_EXPR:
12350      /* Can't handle conversions from non-integral or wider integral type.  */
12351      if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12352	  || (TYPE_PRECISION (type)
12353	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12354	return 0;
12355
12356      /* .. fall through ...  */
12357
12358    case SAVE_EXPR:
12359      return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12360
12361    case INTEGER_CST:
12362      if (TREE_CODE (bottom) != INTEGER_CST
12363	  || (TYPE_UNSIGNED (type)
12364	      && (tree_int_cst_sgn (top) < 0
12365		  || tree_int_cst_sgn (bottom) < 0)))
12366	return 0;
12367      return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12368					 top, bottom, 0));
12369
12370    default:
12371      return 0;
12372    }
12373}
12374
12375/* Return true if `t' is known to be non-negative.  If the return
12376   value is based on the assumption that signed overflow is undefined,
12377   set *STRICT_OVERFLOW_P to true; otherwise, don't change
12378   *STRICT_OVERFLOW_P.  */
12379
12380int
12381tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12382{
12383  if (t == error_mark_node)
12384    return 0;
12385
12386  if (TYPE_UNSIGNED (TREE_TYPE (t)))
12387    return 1;
12388
12389  switch (TREE_CODE (t))
12390    {
12391    case SSA_NAME:
12392      /* Query VRP to see if it has recorded any information about
12393	 the range of this object.  */
12394      return ssa_name_nonnegative_p (t);
12395
12396    case ABS_EXPR:
12397      /* We can't return 1 if flag_wrapv is set because
12398	 ABS_EXPR<INT_MIN> = INT_MIN.  */
12399      if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12400	return 1;
12401      if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12402	{
12403	  *strict_overflow_p = true;
12404	  return 1;
12405	}
12406      break;
12407
12408    case INTEGER_CST:
12409      return tree_int_cst_sgn (t) >= 0;
12410
12411    case REAL_CST:
12412      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12413
12414    case PLUS_EXPR:
12415      if (FLOAT_TYPE_P (TREE_TYPE (t)))
12416	return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12417					       strict_overflow_p)
12418		&& tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12419						  strict_overflow_p));
12420
12421      /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12422	 both unsigned and at least 2 bits shorter than the result.  */
12423      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12424	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12425	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12426	{
12427	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12428	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12429	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12430	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12431	    {
12432	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
12433				       TYPE_PRECISION (inner2)) + 1;
12434	      return prec < TYPE_PRECISION (TREE_TYPE (t));
12435	    }
12436	}
12437      break;
12438
12439    case MULT_EXPR:
12440      if (FLOAT_TYPE_P (TREE_TYPE (t)))
12441	{
12442	  /* x * x for floating point x is always non-negative.  */
12443	  if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12444	    return 1;
12445	  return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12446						 strict_overflow_p)
12447		  && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12448						    strict_overflow_p));
12449	}
12450
12451      /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12452	 both unsigned and their total bits is shorter than the result.  */
12453      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12454	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12455	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12456	{
12457	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12458	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12459	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12460	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12461	    return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12462		   < TYPE_PRECISION (TREE_TYPE (t));
12463	}
12464      return 0;
12465
12466    case BIT_AND_EXPR:
12467    case MAX_EXPR:
12468      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12469					     strict_overflow_p)
12470	      || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12471						strict_overflow_p));
12472
12473    case BIT_IOR_EXPR:
12474    case BIT_XOR_EXPR:
12475    case MIN_EXPR:
12476    case RDIV_EXPR:
12477    case TRUNC_DIV_EXPR:
12478    case CEIL_DIV_EXPR:
12479    case FLOOR_DIV_EXPR:
12480    case ROUND_DIV_EXPR:
12481      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12482					     strict_overflow_p)
12483	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12484						strict_overflow_p));
12485
12486    case TRUNC_MOD_EXPR:
12487    case CEIL_MOD_EXPR:
12488    case FLOOR_MOD_EXPR:
12489    case ROUND_MOD_EXPR:
12490    case SAVE_EXPR:
12491    case NON_LVALUE_EXPR:
12492    case FLOAT_EXPR:
12493    case FIX_TRUNC_EXPR:
12494      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12495					    strict_overflow_p);
12496
12497    case COMPOUND_EXPR:
12498    case MODIFY_EXPR:
12499      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12500					    strict_overflow_p);
12501
12502    case BIND_EXPR:
12503      return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
12504					    strict_overflow_p);
12505
12506    case COND_EXPR:
12507      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12508					     strict_overflow_p)
12509	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12510						strict_overflow_p));
12511
12512    case NOP_EXPR:
12513      {
12514	tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12515	tree outer_type = TREE_TYPE (t);
12516
12517	if (TREE_CODE (outer_type) == REAL_TYPE)
12518	  {
12519	    if (TREE_CODE (inner_type) == REAL_TYPE)
12520	      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12521						    strict_overflow_p);
12522	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
12523	      {
12524		if (TYPE_UNSIGNED (inner_type))
12525		  return 1;
12526		return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12527						      strict_overflow_p);
12528	      }
12529	  }
12530	else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12531	  {
12532	    if (TREE_CODE (inner_type) == REAL_TYPE)
12533	      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
12534						    strict_overflow_p);
12535	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
12536	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12537		      && TYPE_UNSIGNED (inner_type);
12538	  }
12539      }
12540      break;
12541
12542    case TARGET_EXPR:
12543      {
12544	tree temp = TARGET_EXPR_SLOT (t);
12545	t = TARGET_EXPR_INITIAL (t);
12546
12547	/* If the initializer is non-void, then it's a normal expression
12548	   that will be assigned to the slot.  */
12549	if (!VOID_TYPE_P (t))
12550	  return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
12551
12552	/* Otherwise, the initializer sets the slot in some way.  One common
12553	   way is an assignment statement at the end of the initializer.  */
12554	while (1)
12555	  {
12556	    if (TREE_CODE (t) == BIND_EXPR)
12557	      t = expr_last (BIND_EXPR_BODY (t));
12558	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12559		     || TREE_CODE (t) == TRY_CATCH_EXPR)
12560	      t = expr_last (TREE_OPERAND (t, 0));
12561	    else if (TREE_CODE (t) == STATEMENT_LIST)
12562	      t = expr_last (t);
12563	    else
12564	      break;
12565	  }
12566	if (TREE_CODE (t) == MODIFY_EXPR
12567	    && TREE_OPERAND (t, 0) == temp)
12568	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12569						strict_overflow_p);
12570
12571	return 0;
12572      }
12573
12574    case CALL_EXPR:
12575      {
12576	tree fndecl = get_callee_fndecl (t);
12577	tree arglist = TREE_OPERAND (t, 1);
12578	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12579	  switch (DECL_FUNCTION_CODE (fndecl))
12580	    {
12581	    CASE_FLT_FN (BUILT_IN_ACOS):
12582	    CASE_FLT_FN (BUILT_IN_ACOSH):
12583	    CASE_FLT_FN (BUILT_IN_CABS):
12584	    CASE_FLT_FN (BUILT_IN_COSH):
12585	    CASE_FLT_FN (BUILT_IN_ERFC):
12586	    CASE_FLT_FN (BUILT_IN_EXP):
12587	    CASE_FLT_FN (BUILT_IN_EXP10):
12588	    CASE_FLT_FN (BUILT_IN_EXP2):
12589	    CASE_FLT_FN (BUILT_IN_FABS):
12590	    CASE_FLT_FN (BUILT_IN_FDIM):
12591	    CASE_FLT_FN (BUILT_IN_HYPOT):
12592	    CASE_FLT_FN (BUILT_IN_POW10):
12593	    CASE_INT_FN (BUILT_IN_FFS):
12594	    CASE_INT_FN (BUILT_IN_PARITY):
12595	    CASE_INT_FN (BUILT_IN_POPCOUNT):
12596	    case BUILT_IN_BSWAP32:
12597	    case BUILT_IN_BSWAP64:
12598	      /* Always true.  */
12599	      return 1;
12600
12601	    CASE_FLT_FN (BUILT_IN_SQRT):
12602	      /* sqrt(-0.0) is -0.0.  */
12603	      if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12604		return 1;
12605	      return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12606						    strict_overflow_p);
12607
12608	    CASE_FLT_FN (BUILT_IN_ASINH):
12609	    CASE_FLT_FN (BUILT_IN_ATAN):
12610	    CASE_FLT_FN (BUILT_IN_ATANH):
12611	    CASE_FLT_FN (BUILT_IN_CBRT):
12612	    CASE_FLT_FN (BUILT_IN_CEIL):
12613	    CASE_FLT_FN (BUILT_IN_ERF):
12614	    CASE_FLT_FN (BUILT_IN_EXPM1):
12615	    CASE_FLT_FN (BUILT_IN_FLOOR):
12616	    CASE_FLT_FN (BUILT_IN_FMOD):
12617	    CASE_FLT_FN (BUILT_IN_FREXP):
12618	    CASE_FLT_FN (BUILT_IN_LCEIL):
12619	    CASE_FLT_FN (BUILT_IN_LDEXP):
12620	    CASE_FLT_FN (BUILT_IN_LFLOOR):
12621	    CASE_FLT_FN (BUILT_IN_LLCEIL):
12622	    CASE_FLT_FN (BUILT_IN_LLFLOOR):
12623	    CASE_FLT_FN (BUILT_IN_LLRINT):
12624	    CASE_FLT_FN (BUILT_IN_LLROUND):
12625	    CASE_FLT_FN (BUILT_IN_LRINT):
12626	    CASE_FLT_FN (BUILT_IN_LROUND):
12627	    CASE_FLT_FN (BUILT_IN_MODF):
12628	    CASE_FLT_FN (BUILT_IN_NEARBYINT):
12629	    CASE_FLT_FN (BUILT_IN_POW):
12630	    CASE_FLT_FN (BUILT_IN_RINT):
12631	    CASE_FLT_FN (BUILT_IN_ROUND):
12632	    CASE_FLT_FN (BUILT_IN_SIGNBIT):
12633	    CASE_FLT_FN (BUILT_IN_SINH):
12634	    CASE_FLT_FN (BUILT_IN_TANH):
12635	    CASE_FLT_FN (BUILT_IN_TRUNC):
12636	      /* True if the 1st argument is nonnegative.  */
12637	      return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12638						    strict_overflow_p);
12639
12640	    CASE_FLT_FN (BUILT_IN_FMAX):
12641	      /* True if the 1st OR 2nd arguments are nonnegative.  */
12642	      return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12643						     strict_overflow_p)
12644		      || (tree_expr_nonnegative_warnv_p
12645			  (TREE_VALUE (TREE_CHAIN (arglist)),
12646			   strict_overflow_p)));
12647
12648	    CASE_FLT_FN (BUILT_IN_FMIN):
12649	      /* True if the 1st AND 2nd arguments are nonnegative.  */
12650	      return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12651						     strict_overflow_p)
12652		      && (tree_expr_nonnegative_warnv_p
12653			  (TREE_VALUE (TREE_CHAIN (arglist)),
12654			   strict_overflow_p)));
12655
12656	    CASE_FLT_FN (BUILT_IN_COPYSIGN):
12657	      /* True if the 2nd argument is nonnegative.  */
12658	      return (tree_expr_nonnegative_warnv_p
12659		      (TREE_VALUE (TREE_CHAIN (arglist)),
12660		       strict_overflow_p));
12661
12662	    default:
12663	      break;
12664	    }
12665      }
12666
12667      /* ... fall through ...  */
12668
12669    default:
12670      {
12671	tree type = TREE_TYPE (t);
12672	if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12673	    && truth_value_p (TREE_CODE (t)))
12674	  /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12675             have a signed:1 type (where the value is -1 and 0).  */
12676	  return true;
12677      }
12678    }
12679
12680  /* We don't know sign of `t', so be conservative and return false.  */
12681  return 0;
12682}
12683
12684/* Return true if `t' is known to be non-negative.  Handle warnings
12685   about undefined signed overflow.  */
12686
12687int
12688tree_expr_nonnegative_p (tree t)
12689{
12690  int ret;
12691  bool strict_overflow_p;
12692
12693  strict_overflow_p = false;
12694  ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
12695  if (strict_overflow_p)
12696    fold_overflow_warning (("assuming signed overflow does not occur when "
12697			    "determining that expression is always "
12698			    "non-negative"),
12699			   WARN_STRICT_OVERFLOW_MISC);
12700  return ret;
12701}
12702
12703/* Return true when T is an address and is known to be nonzero.
12704   For floating point we further ensure that T is not denormal.
12705   Similar logic is present in nonzero_address in rtlanal.h.
12706
12707   If the return value is based on the assumption that signed overflow
12708   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
12709   change *STRICT_OVERFLOW_P.  */
12710
12711bool
12712tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
12713{
12714  tree type = TREE_TYPE (t);
12715  bool sub_strict_overflow_p;
12716
12717  /* Doing something useful for floating point would need more work.  */
12718  if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12719    return false;
12720
12721  switch (TREE_CODE (t))
12722    {
12723    case SSA_NAME:
12724      /* Query VRP to see if it has recorded any information about
12725	 the range of this object.  */
12726      return ssa_name_nonzero_p (t);
12727
12728    case ABS_EXPR:
12729      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12730					strict_overflow_p);
12731
12732    case INTEGER_CST:
12733      /* We used to test for !integer_zerop here.  This does not work correctly
12734	 if TREE_CONSTANT_OVERFLOW (t).  */
12735      return (TREE_INT_CST_LOW (t) != 0
12736	      || TREE_INT_CST_HIGH (t) != 0);
12737
12738    case PLUS_EXPR:
12739      if (TYPE_OVERFLOW_UNDEFINED (type))
12740	{
12741	  /* With the presence of negative values it is hard
12742	     to say something.  */
12743	  sub_strict_overflow_p = false;
12744	  if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12745					      &sub_strict_overflow_p)
12746	      || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12747						 &sub_strict_overflow_p))
12748	    return false;
12749	  /* One of operands must be positive and the other non-negative.  */
12750	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
12751	     overflows, on a twos-complement machine the sum of two
12752	     nonnegative numbers can never be zero.  */
12753	  return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12754					     strict_overflow_p)
12755	          || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12756						strict_overflow_p));
12757	}
12758      break;
12759
12760    case MULT_EXPR:
12761      if (TYPE_OVERFLOW_UNDEFINED (type))
12762	{
12763	  if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12764					 strict_overflow_p)
12765	      && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12766					    strict_overflow_p))
12767	    {
12768	      *strict_overflow_p = true;
12769	      return true;
12770	    }
12771	}
12772      break;
12773
12774    case NOP_EXPR:
12775      {
12776	tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12777	tree outer_type = TREE_TYPE (t);
12778
12779	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12780		&& tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12781					      strict_overflow_p));
12782      }
12783      break;
12784
12785   case ADDR_EXPR:
12786      {
12787	tree base = get_base_address (TREE_OPERAND (t, 0));
12788
12789	if (!base)
12790	  return false;
12791
12792	/* Weak declarations may link to NULL.  */
12793	if (VAR_OR_FUNCTION_DECL_P (base))
12794	  return !DECL_WEAK (base);
12795
12796	/* Constants are never weak.  */
12797	if (CONSTANT_CLASS_P (base))
12798	  return true;
12799
12800	return false;
12801      }
12802
12803    case COND_EXPR:
12804      sub_strict_overflow_p = false;
12805      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12806				     &sub_strict_overflow_p)
12807	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
12808					&sub_strict_overflow_p))
12809	{
12810	  if (sub_strict_overflow_p)
12811	    *strict_overflow_p = true;
12812	  return true;
12813	}
12814      break;
12815
12816    case MIN_EXPR:
12817      sub_strict_overflow_p = false;
12818      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12819				     &sub_strict_overflow_p)
12820	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12821					&sub_strict_overflow_p))
12822	{
12823	  if (sub_strict_overflow_p)
12824	    *strict_overflow_p = true;
12825	}
12826      break;
12827
12828    case MAX_EXPR:
12829      sub_strict_overflow_p = false;
12830      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12831				     &sub_strict_overflow_p))
12832	{
12833	  if (sub_strict_overflow_p)
12834	    *strict_overflow_p = true;
12835
12836	  /* When both operands are nonzero, then MAX must be too.  */
12837	  if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12838					 strict_overflow_p))
12839	    return true;
12840
12841	  /* MAX where operand 0 is positive is positive.  */
12842	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12843					       strict_overflow_p);
12844	}
12845      /* MAX where operand 1 is positive is positive.  */
12846      else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12847					  &sub_strict_overflow_p)
12848	       && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12849						 &sub_strict_overflow_p))
12850	{
12851	  if (sub_strict_overflow_p)
12852	    *strict_overflow_p = true;
12853	  return true;
12854	}
12855      break;
12856
12857    case COMPOUND_EXPR:
12858    case MODIFY_EXPR:
12859    case BIND_EXPR:
12860      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12861					strict_overflow_p);
12862
12863    case SAVE_EXPR:
12864    case NON_LVALUE_EXPR:
12865      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12866					strict_overflow_p);
12867
12868    case BIT_IOR_EXPR:
12869      return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12870					strict_overflow_p)
12871	      || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12872					    strict_overflow_p));
12873
12874    case CALL_EXPR:
12875      return alloca_call_p (t);
12876
12877    default:
12878      break;
12879    }
12880  return false;
12881}
12882
12883/* Return true when T is an address and is known to be nonzero.
12884   Handle warnings about undefined signed overflow.  */
12885
12886bool
12887tree_expr_nonzero_p (tree t)
12888{
12889  bool ret, strict_overflow_p;
12890
12891  strict_overflow_p = false;
12892  ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
12893  if (strict_overflow_p)
12894    fold_overflow_warning (("assuming signed overflow does not occur when "
12895			    "determining that expression is always "
12896			    "non-zero"),
12897			   WARN_STRICT_OVERFLOW_MISC);
12898  return ret;
12899}
12900
12901/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12902   attempt to fold the expression to a constant without modifying TYPE,
12903   OP0 or OP1.
12904
12905   If the expression could be simplified to a constant, then return
12906   the constant.  If the expression would not be simplified to a
12907   constant, then return NULL_TREE.  */
12908
12909tree
12910fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12911{
12912  tree tem = fold_binary (code, type, op0, op1);
12913  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12914}
12915
12916/* Given the components of a unary expression CODE, TYPE and OP0,
12917   attempt to fold the expression to a constant without modifying
12918   TYPE or OP0.
12919
12920   If the expression could be simplified to a constant, then return
12921   the constant.  If the expression would not be simplified to a
12922   constant, then return NULL_TREE.  */
12923
12924tree
12925fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12926{
12927  tree tem = fold_unary (code, type, op0);
12928  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12929}
12930
12931/* If EXP represents referencing an element in a constant string
12932   (either via pointer arithmetic or array indexing), return the
12933   tree representing the value accessed, otherwise return NULL.  */
12934
12935tree
12936fold_read_from_constant_string (tree exp)
12937{
12938  if ((TREE_CODE (exp) == INDIRECT_REF
12939       || TREE_CODE (exp) == ARRAY_REF)
12940      && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12941    {
12942      tree exp1 = TREE_OPERAND (exp, 0);
12943      tree index;
12944      tree string;
12945
12946      if (TREE_CODE (exp) == INDIRECT_REF)
12947	string = string_constant (exp1, &index);
12948      else
12949	{
12950	  tree low_bound = array_ref_low_bound (exp);
12951	  index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12952
12953	  /* Optimize the special-case of a zero lower bound.
12954
12955	     We convert the low_bound to sizetype to avoid some problems
12956	     with constant folding.  (E.g. suppose the lower bound is 1,
12957	     and its mode is QI.  Without the conversion,l (ARRAY
12958	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12959	     +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
12960	  if (! integer_zerop (low_bound))
12961	    index = size_diffop (index, fold_convert (sizetype, low_bound));
12962
12963	  string = exp1;
12964	}
12965
12966      if (string
12967	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12968	  && TREE_CODE (string) == STRING_CST
12969	  && TREE_CODE (index) == INTEGER_CST
12970	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12971	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12972	      == MODE_INT)
12973	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12974	return fold_convert (TREE_TYPE (exp),
12975			     build_int_cst (NULL_TREE,
12976					    (TREE_STRING_POINTER (string)
12977					     [TREE_INT_CST_LOW (index)])));
12978    }
12979  return NULL;
12980}
12981
12982/* Return the tree for neg (ARG0) when ARG0 is known to be either
12983   an integer constant or real constant.
12984
12985   TYPE is the type of the result.  */
12986
12987static tree
12988fold_negate_const (tree arg0, tree type)
12989{
12990  tree t = NULL_TREE;
12991
12992  switch (TREE_CODE (arg0))
12993    {
12994    case INTEGER_CST:
12995      {
12996	unsigned HOST_WIDE_INT low;
12997	HOST_WIDE_INT high;
12998	int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12999				   TREE_INT_CST_HIGH (arg0),
13000				   &low, &high);
13001	t = build_int_cst_wide (type, low, high);
13002	t = force_fit_type (t, 1,
13003			    (overflow | TREE_OVERFLOW (arg0))
13004			    && !TYPE_UNSIGNED (type),
13005			    TREE_CONSTANT_OVERFLOW (arg0));
13006	break;
13007      }
13008
13009    case REAL_CST:
13010      t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13011      break;
13012
13013    default:
13014      gcc_unreachable ();
13015    }
13016
13017  return t;
13018}
13019
13020/* Return the tree for abs (ARG0) when ARG0 is known to be either
13021   an integer constant or real constant.
13022
13023   TYPE is the type of the result.  */
13024
13025tree
13026fold_abs_const (tree arg0, tree type)
13027{
13028  tree t = NULL_TREE;
13029
13030  switch (TREE_CODE (arg0))
13031    {
13032    case INTEGER_CST:
13033      /* If the value is unsigned, then the absolute value is
13034	 the same as the ordinary value.  */
13035      if (TYPE_UNSIGNED (type))
13036	t = arg0;
13037      /* Similarly, if the value is non-negative.  */
13038      else if (INT_CST_LT (integer_minus_one_node, arg0))
13039	t = arg0;
13040      /* If the value is negative, then the absolute value is
13041	 its negation.  */
13042      else
13043	{
13044	  unsigned HOST_WIDE_INT low;
13045	  HOST_WIDE_INT high;
13046	  int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13047				     TREE_INT_CST_HIGH (arg0),
13048				     &low, &high);
13049	  t = build_int_cst_wide (type, low, high);
13050	  t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13051			      TREE_CONSTANT_OVERFLOW (arg0));
13052	}
13053      break;
13054
13055    case REAL_CST:
13056      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13057	t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13058      else
13059	t =  arg0;
13060      break;
13061
13062    default:
13063      gcc_unreachable ();
13064    }
13065
13066  return t;
13067}
13068
13069/* Return the tree for not (ARG0) when ARG0 is known to be an integer
13070   constant.  TYPE is the type of the result.  */
13071
13072static tree
13073fold_not_const (tree arg0, tree type)
13074{
13075  tree t = NULL_TREE;
13076
13077  gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13078
13079  t = build_int_cst_wide (type,
13080			  ~ TREE_INT_CST_LOW (arg0),
13081			  ~ TREE_INT_CST_HIGH (arg0));
13082  t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13083		      TREE_CONSTANT_OVERFLOW (arg0));
13084
13085  return t;
13086}
13087
13088/* Given CODE, a relational operator, the target type, TYPE and two
13089   constant operands OP0 and OP1, return the result of the
13090   relational operation.  If the result is not a compile time
13091   constant, then return NULL_TREE.  */
13092
13093static tree
13094fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13095{
13096  int result, invert;
13097
13098  /* From here on, the only cases we handle are when the result is
13099     known to be a constant.  */
13100
13101  if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13102    {
13103      const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13104      const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13105
13106      /* Handle the cases where either operand is a NaN.  */
13107      if (real_isnan (c0) || real_isnan (c1))
13108	{
13109	  switch (code)
13110	    {
13111	    case EQ_EXPR:
13112	    case ORDERED_EXPR:
13113	      result = 0;
13114	      break;
13115
13116	    case NE_EXPR:
13117	    case UNORDERED_EXPR:
13118	    case UNLT_EXPR:
13119	    case UNLE_EXPR:
13120	    case UNGT_EXPR:
13121	    case UNGE_EXPR:
13122	    case UNEQ_EXPR:
13123              result = 1;
13124	      break;
13125
13126	    case LT_EXPR:
13127	    case LE_EXPR:
13128	    case GT_EXPR:
13129	    case GE_EXPR:
13130	    case LTGT_EXPR:
13131	      if (flag_trapping_math)
13132		return NULL_TREE;
13133	      result = 0;
13134	      break;
13135
13136	    default:
13137	      gcc_unreachable ();
13138	    }
13139
13140	  return constant_boolean_node (result, type);
13141	}
13142
13143      return constant_boolean_node (real_compare (code, c0, c1), type);
13144    }
13145
13146  /* Handle equality/inequality of complex constants.  */
13147  if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13148    {
13149      tree rcond = fold_relational_const (code, type,
13150					  TREE_REALPART (op0),
13151					  TREE_REALPART (op1));
13152      tree icond = fold_relational_const (code, type,
13153					  TREE_IMAGPART (op0),
13154					  TREE_IMAGPART (op1));
13155      if (code == EQ_EXPR)
13156	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13157      else if (code == NE_EXPR)
13158	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13159      else
13160	return NULL_TREE;
13161    }
13162
13163  /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13164
13165     To compute GT, swap the arguments and do LT.
13166     To compute GE, do LT and invert the result.
13167     To compute LE, swap the arguments, do LT and invert the result.
13168     To compute NE, do EQ and invert the result.
13169
13170     Therefore, the code below must handle only EQ and LT.  */
13171
13172  if (code == LE_EXPR || code == GT_EXPR)
13173    {
13174      tree tem = op0;
13175      op0 = op1;
13176      op1 = tem;
13177      code = swap_tree_comparison (code);
13178    }
13179
13180  /* Note that it is safe to invert for real values here because we
13181     have already handled the one case that it matters.  */
13182
13183  invert = 0;
13184  if (code == NE_EXPR || code == GE_EXPR)
13185    {
13186      invert = 1;
13187      code = invert_tree_comparison (code, false);
13188    }
13189
13190  /* Compute a result for LT or EQ if args permit;
13191     Otherwise return T.  */
13192  if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13193    {
13194      if (code == EQ_EXPR)
13195	result = tree_int_cst_equal (op0, op1);
13196      else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13197	result = INT_CST_LT_UNSIGNED (op0, op1);
13198      else
13199	result = INT_CST_LT (op0, op1);
13200    }
13201  else
13202    return NULL_TREE;
13203
13204  if (invert)
13205    result ^= 1;
13206  return constant_boolean_node (result, type);
13207}
13208
13209/* Build an expression for the a clean point containing EXPR with type TYPE.
13210   Don't build a cleanup point expression for EXPR which don't have side
13211   effects.  */
13212
13213tree
13214fold_build_cleanup_point_expr (tree type, tree expr)
13215{
13216  /* If the expression does not have side effects then we don't have to wrap
13217     it with a cleanup point expression.  */
13218  if (!TREE_SIDE_EFFECTS (expr))
13219    return expr;
13220
13221  /* If the expression is a return, check to see if the expression inside the
13222     return has no side effects or the right hand side of the modify expression
13223     inside the return. If either don't have side effects set we don't need to
13224     wrap the expression in a cleanup point expression.  Note we don't check the
13225     left hand side of the modify because it should always be a return decl.  */
13226  if (TREE_CODE (expr) == RETURN_EXPR)
13227    {
13228      tree op = TREE_OPERAND (expr, 0);
13229      if (!op || !TREE_SIDE_EFFECTS (op))
13230        return expr;
13231      op = TREE_OPERAND (op, 1);
13232      if (!TREE_SIDE_EFFECTS (op))
13233        return expr;
13234    }
13235
13236  return build1 (CLEANUP_POINT_EXPR, type, expr);
13237}
13238
13239/* Build an expression for the address of T.  Folds away INDIRECT_REF to
13240   avoid confusing the gimplify process.  */
13241
13242tree
13243build_fold_addr_expr_with_type (tree t, tree ptrtype)
13244{
13245  /* The size of the object is not relevant when talking about its address.  */
13246  if (TREE_CODE (t) == WITH_SIZE_EXPR)
13247    t = TREE_OPERAND (t, 0);
13248
13249  /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13250  if (TREE_CODE (t) == INDIRECT_REF
13251      || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13252    {
13253      t = TREE_OPERAND (t, 0);
13254      if (TREE_TYPE (t) != ptrtype)
13255	t = build1 (NOP_EXPR, ptrtype, t);
13256    }
13257  else
13258    {
13259      tree base = t;
13260
13261      while (handled_component_p (base))
13262	base = TREE_OPERAND (base, 0);
13263      if (DECL_P (base))
13264	TREE_ADDRESSABLE (base) = 1;
13265
13266      t = build1 (ADDR_EXPR, ptrtype, t);
13267    }
13268
13269  return t;
13270}
13271
13272tree
13273build_fold_addr_expr (tree t)
13274{
13275  return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13276}
13277
13278/* Given a pointer value OP0 and a type TYPE, return a simplified version
13279   of an indirection through OP0, or NULL_TREE if no simplification is
13280   possible.  */
13281
13282tree
13283fold_indirect_ref_1 (tree type, tree op0)
13284{
13285  tree sub = op0;
13286  tree subtype;
13287
13288  STRIP_NOPS (sub);
13289  subtype = TREE_TYPE (sub);
13290  if (!POINTER_TYPE_P (subtype))
13291    return NULL_TREE;
13292
13293  if (TREE_CODE (sub) == ADDR_EXPR)
13294    {
13295      tree op = TREE_OPERAND (sub, 0);
13296      tree optype = TREE_TYPE (op);
13297      /* *&CONST_DECL -> to the value of the const decl.  */
13298      if (TREE_CODE (op) == CONST_DECL)
13299	return DECL_INITIAL (op);
13300      /* *&p => p;  make sure to handle *&"str"[cst] here.  */
13301      if (type == optype)
13302	{
13303	  tree fop = fold_read_from_constant_string (op);
13304	  if (fop)
13305	    return fop;
13306	  else
13307	    return op;
13308	}
13309      /* *(foo *)&fooarray => fooarray[0] */
13310      else if (TREE_CODE (optype) == ARRAY_TYPE
13311	       && type == TREE_TYPE (optype))
13312	{
13313	  tree type_domain = TYPE_DOMAIN (optype);
13314	  tree min_val = size_zero_node;
13315	  if (type_domain && TYPE_MIN_VALUE (type_domain))
13316	    min_val = TYPE_MIN_VALUE (type_domain);
13317	  return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13318	}
13319      /* *(foo *)&complexfoo => __real__ complexfoo */
13320      else if (TREE_CODE (optype) == COMPLEX_TYPE
13321	       && type == TREE_TYPE (optype))
13322	return fold_build1 (REALPART_EXPR, type, op);
13323    }
13324
13325  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13326  if (TREE_CODE (sub) == PLUS_EXPR
13327      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13328    {
13329      tree op00 = TREE_OPERAND (sub, 0);
13330      tree op01 = TREE_OPERAND (sub, 1);
13331      tree op00type;
13332
13333      STRIP_NOPS (op00);
13334      op00type = TREE_TYPE (op00);
13335      if (TREE_CODE (op00) == ADDR_EXPR
13336 	  && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13337	  && type == TREE_TYPE (TREE_TYPE (op00type)))
13338	{
13339	  tree size = TYPE_SIZE_UNIT (type);
13340	  if (tree_int_cst_equal (size, op01))
13341	    return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13342	}
13343    }
13344
13345  /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13346  if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13347      && type == TREE_TYPE (TREE_TYPE (subtype)))
13348    {
13349      tree type_domain;
13350      tree min_val = size_zero_node;
13351      sub = build_fold_indirect_ref (sub);
13352      type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13353      if (type_domain && TYPE_MIN_VALUE (type_domain))
13354	min_val = TYPE_MIN_VALUE (type_domain);
13355      return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13356    }
13357
13358  return NULL_TREE;
13359}
13360
13361/* Builds an expression for an indirection through T, simplifying some
13362   cases.  */
13363
13364tree
13365build_fold_indirect_ref (tree t)
13366{
13367  tree type = TREE_TYPE (TREE_TYPE (t));
13368  tree sub = fold_indirect_ref_1 (type, t);
13369
13370  if (sub)
13371    return sub;
13372  else
13373    return build1 (INDIRECT_REF, type, t);
13374}
13375
13376/* Given an INDIRECT_REF T, return either T or a simplified version.  */
13377
13378tree
13379fold_indirect_ref (tree t)
13380{
13381  tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13382
13383  if (sub)
13384    return sub;
13385  else
13386    return t;
13387}
13388
13389/* Strip non-trapping, non-side-effecting tree nodes from an expression
13390   whose result is ignored.  The type of the returned tree need not be
13391   the same as the original expression.  */
13392
13393tree
13394fold_ignored_result (tree t)
13395{
13396  if (!TREE_SIDE_EFFECTS (t))
13397    return integer_zero_node;
13398
13399  for (;;)
13400    switch (TREE_CODE_CLASS (TREE_CODE (t)))
13401      {
13402      case tcc_unary:
13403	t = TREE_OPERAND (t, 0);
13404	break;
13405
13406      case tcc_binary:
13407      case tcc_comparison:
13408	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13409	  t = TREE_OPERAND (t, 0);
13410	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13411	  t = TREE_OPERAND (t, 1);
13412	else
13413	  return t;
13414	break;
13415
13416      case tcc_expression:
13417	switch (TREE_CODE (t))
13418	  {
13419	  case COMPOUND_EXPR:
13420	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13421	      return t;
13422	    t = TREE_OPERAND (t, 0);
13423	    break;
13424
13425	  case COND_EXPR:
13426	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13427		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13428	      return t;
13429	    t = TREE_OPERAND (t, 0);
13430	    break;
13431
13432	  default:
13433	    return t;
13434	  }
13435	break;
13436
13437      default:
13438	return t;
13439      }
13440}
13441
13442/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13443   This can only be applied to objects of a sizetype.  */
13444
13445tree
13446round_up (tree value, int divisor)
13447{
13448  tree div = NULL_TREE;
13449
13450  gcc_assert (divisor > 0);
13451  if (divisor == 1)
13452    return value;
13453
13454  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
13455     have to do anything.  Only do this when we are not given a const,
13456     because in that case, this check is more expensive than just
13457     doing it.  */
13458  if (TREE_CODE (value) != INTEGER_CST)
13459    {
13460      div = build_int_cst (TREE_TYPE (value), divisor);
13461
13462      if (multiple_of_p (TREE_TYPE (value), value, div))
13463	return value;
13464    }
13465
13466  /* If divisor is a power of two, simplify this to bit manipulation.  */
13467  if (divisor == (divisor & -divisor))
13468    {
13469      tree t;
13470
13471      t = build_int_cst (TREE_TYPE (value), divisor - 1);
13472      value = size_binop (PLUS_EXPR, value, t);
13473      t = build_int_cst (TREE_TYPE (value), -divisor);
13474      value = size_binop (BIT_AND_EXPR, value, t);
13475    }
13476  else
13477    {
13478      if (!div)
13479	div = build_int_cst (TREE_TYPE (value), divisor);
13480      value = size_binop (CEIL_DIV_EXPR, value, div);
13481      value = size_binop (MULT_EXPR, value, div);
13482    }
13483
13484  return value;
13485}
13486
13487/* Likewise, but round down.  */
13488
13489tree
13490round_down (tree value, int divisor)
13491{
13492  tree div = NULL_TREE;
13493
13494  gcc_assert (divisor > 0);
13495  if (divisor == 1)
13496    return value;
13497
13498  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
13499     have to do anything.  Only do this when we are not given a const,
13500     because in that case, this check is more expensive than just
13501     doing it.  */
13502  if (TREE_CODE (value) != INTEGER_CST)
13503    {
13504      div = build_int_cst (TREE_TYPE (value), divisor);
13505
13506      if (multiple_of_p (TREE_TYPE (value), value, div))
13507	return value;
13508    }
13509
13510  /* If divisor is a power of two, simplify this to bit manipulation.  */
13511  if (divisor == (divisor & -divisor))
13512    {
13513      tree t;
13514
13515      t = build_int_cst (TREE_TYPE (value), -divisor);
13516      value = size_binop (BIT_AND_EXPR, value, t);
13517    }
13518  else
13519    {
13520      if (!div)
13521	div = build_int_cst (TREE_TYPE (value), divisor);
13522      value = size_binop (FLOOR_DIV_EXPR, value, div);
13523      value = size_binop (MULT_EXPR, value, div);
13524    }
13525
13526  return value;
13527}
13528
13529/* Returns the pointer to the base of the object addressed by EXP and
13530   extracts the information about the offset of the access, storing it
13531   to PBITPOS and POFFSET.  */
13532
13533static tree
13534split_address_to_core_and_offset (tree exp,
13535				  HOST_WIDE_INT *pbitpos, tree *poffset)
13536{
13537  tree core;
13538  enum machine_mode mode;
13539  int unsignedp, volatilep;
13540  HOST_WIDE_INT bitsize;
13541
13542  if (TREE_CODE (exp) == ADDR_EXPR)
13543    {
13544      core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13545				  poffset, &mode, &unsignedp, &volatilep,
13546				  false);
13547      core = build_fold_addr_expr (core);
13548    }
13549  else
13550    {
13551      core = exp;
13552      *pbitpos = 0;
13553      *poffset = NULL_TREE;
13554    }
13555
13556  return core;
13557}
13558
13559/* Returns true if addresses of E1 and E2 differ by a constant, false
13560   otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
13561
13562bool
13563ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13564{
13565  tree core1, core2;
13566  HOST_WIDE_INT bitpos1, bitpos2;
13567  tree toffset1, toffset2, tdiff, type;
13568
13569  core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13570  core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13571
13572  if (bitpos1 % BITS_PER_UNIT != 0
13573      || bitpos2 % BITS_PER_UNIT != 0
13574      || !operand_equal_p (core1, core2, 0))
13575    return false;
13576
13577  if (toffset1 && toffset2)
13578    {
13579      type = TREE_TYPE (toffset1);
13580      if (type != TREE_TYPE (toffset2))
13581	toffset2 = fold_convert (type, toffset2);
13582
13583      tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13584      if (!cst_and_fits_in_hwi (tdiff))
13585	return false;
13586
13587      *diff = int_cst_value (tdiff);
13588    }
13589  else if (toffset1 || toffset2)
13590    {
13591      /* If only one of the offsets is non-constant, the difference cannot
13592	 be a constant.  */
13593      return false;
13594    }
13595  else
13596    *diff = 0;
13597
13598  *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13599  return true;
13600}
13601
13602/* Simplify the floating point expression EXP when the sign of the
13603   result is not significant.  Return NULL_TREE if no simplification
13604   is possible.  */
13605
13606tree
13607fold_strip_sign_ops (tree exp)
13608{
13609  tree arg0, arg1;
13610
13611  switch (TREE_CODE (exp))
13612    {
13613    case ABS_EXPR:
13614    case NEGATE_EXPR:
13615      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13616      return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13617
13618    case MULT_EXPR:
13619    case RDIV_EXPR:
13620      if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13621	return NULL_TREE;
13622      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13623      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13624      if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13625	return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13626			    arg0 ? arg0 : TREE_OPERAND (exp, 0),
13627			    arg1 ? arg1 : TREE_OPERAND (exp, 1));
13628      break;
13629
13630    default:
13631      break;
13632    }
13633  return NULL_TREE;
13634}
13635
13636