1/* Fold a constant sub-tree into a single node for C-compiler 2 Copyright (C) 1987-2015 Free Software Foundation, Inc. 3 4This file is part of GCC. 5 6GCC is free software; you can redistribute it and/or modify it under 7the terms of the GNU General Public License as published by the Free 8Software Foundation; either version 3, or (at your option) any later 9version. 10 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12WARRANTY; without even the implied warranty of MERCHANTABILITY or 13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14for more details. 15 16You should have received a copy of the GNU General Public License 17along with GCC; see the file COPYING3. If not see 18<http://www.gnu.org/licenses/>. */ 19 20/*@@ This file should be rewritten to use an arbitrary precision 21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst". 22 @@ Perhaps the routines could also be used for bc/dc, and made a lib. 23 @@ The routines that translate from the ap rep should 24 @@ warn if precision et. al. is lost. 25 @@ This would also make life easier when this technology is used 26 @@ for cross-compilers. */ 27 28/* The entry points in this file are fold, size_int_wide and size_binop. 29 30 fold takes a tree as argument and returns a simplified tree. 31 32 size_binop takes a tree code for an arithmetic operation 33 and two operands that are trees, and produces a tree for the 34 result, assuming the type comes from `sizetype'. 35 36 size_int takes an integer value, and creates a tree constant 37 with type from `sizetype'. 38 39 Note: Since the folders get called on non-gimple code as well as 40 gimple code, we need to handle GIMPLE tuples as well as their 41 corresponding tree equivalents. */ 42 43#include "config.h" 44#include "system.h" 45#include "coretypes.h" 46#include "tm.h" 47#include "flags.h" 48#include "hash-set.h" 49#include "machmode.h" 50#include "vec.h" 51#include "double-int.h" 52#include "input.h" 53#include "alias.h" 54#include "symtab.h" 55#include "wide-int.h" 56#include "inchash.h" 57#include "tree.h" 58#include "fold-const.h" 59#include "stor-layout.h" 60#include "calls.h" 61#include "tree-iterator.h" 62#include "realmpfr.h" 63#include "rtl.h" 64#include "hashtab.h" 65#include "hard-reg-set.h" 66#include "function.h" 67#include "statistics.h" 68#include "real.h" 69#include "fixed-value.h" 70#include "insn-config.h" 71#include "expmed.h" 72#include "dojump.h" 73#include "explow.h" 74#include "emit-rtl.h" 75#include "varasm.h" 76#include "stmt.h" 77#include "expr.h" 78#include "tm_p.h" 79#include "target.h" 80#include "diagnostic-core.h" 81#include "intl.h" 82#include "langhooks.h" 83#include "md5.h" 84#include "predict.h" 85#include "basic-block.h" 86#include "tree-ssa-alias.h" 87#include "internal-fn.h" 88#include "tree-eh.h" 89#include "gimple-expr.h" 90#include "is-a.h" 91#include "gimple.h" 92#include "gimplify.h" 93#include "tree-dfa.h" 94#include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */ 95#include "builtins.h" 96#include "hash-map.h" 97#include "plugin-api.h" 98#include "ipa-ref.h" 99#include "cgraph.h" 100#include "generic-match.h" 101#include "optabs.h" 102 103/* Nonzero if we are folding constants inside an initializer; zero 104 otherwise. */ 105int folding_initializer = 0; 106 107/* The following constants represent a bit based encoding of GCC's 108 comparison operators. This encoding simplifies transformations 109 on relational comparison operators, such as AND and OR. */ 110enum comparison_code { 111 COMPCODE_FALSE = 0, 112 COMPCODE_LT = 1, 113 COMPCODE_EQ = 2, 114 COMPCODE_LE = 3, 115 COMPCODE_GT = 4, 116 COMPCODE_LTGT = 5, 117 COMPCODE_GE = 6, 118 COMPCODE_ORD = 7, 119 COMPCODE_UNORD = 8, 120 COMPCODE_UNLT = 9, 121 COMPCODE_UNEQ = 10, 122 COMPCODE_UNLE = 11, 123 COMPCODE_UNGT = 12, 124 COMPCODE_NE = 13, 125 COMPCODE_UNGE = 14, 126 COMPCODE_TRUE = 15 127}; 128 129static bool negate_mathfn_p (enum built_in_function); 130static bool negate_expr_p (tree); 131static tree negate_expr (tree); 132static tree split_tree (location_t, tree, tree, enum tree_code, 133 tree *, tree *, tree *, int); 134static tree associate_trees (location_t, tree, tree, enum tree_code, tree); 135static enum comparison_code comparison_to_compcode (enum tree_code); 136static enum tree_code compcode_to_comparison (enum comparison_code); 137static int operand_equal_for_comparison_p (tree, tree, tree); 138static int twoval_comparison_p (tree, tree *, tree *, int *); 139static tree eval_subst (location_t, tree, tree, tree, tree, tree); 140static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree); 141static tree make_bit_field_ref (location_t, tree, tree, 142 HOST_WIDE_INT, HOST_WIDE_INT, int); 143static tree optimize_bit_field_compare (location_t, enum tree_code, 144 tree, tree, tree); 145static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *, 146 HOST_WIDE_INT *, 147 machine_mode *, int *, int *, 148 tree *, tree *); 149static int simple_operand_p (const_tree); 150static bool simple_operand_p_2 (tree); 151static tree range_binop (enum tree_code, tree, tree, int, tree, int); 152static tree range_predecessor (tree); 153static tree range_successor (tree); 154static tree fold_range_test (location_t, enum tree_code, tree, tree, tree); 155static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree); 156static tree unextend (tree, int, int, tree); 157static tree optimize_minmax_comparison (location_t, enum tree_code, 158 tree, tree, tree); 159static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *); 160static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *); 161static tree fold_binary_op_with_conditional_arg (location_t, 162 enum tree_code, tree, 163 tree, tree, 164 tree, tree, int); 165static tree fold_mathfn_compare (location_t, 166 enum built_in_function, enum tree_code, 167 tree, tree, tree); 168static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree); 169static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree); 170static bool reorder_operands_p (const_tree, const_tree); 171static tree fold_negate_const (tree, tree); 172static tree fold_not_const (const_tree, tree); 173static tree fold_relational_const (enum tree_code, tree, tree, tree); 174static tree fold_convert_const (enum tree_code, tree, tree); 175static tree fold_view_convert_expr (tree, tree); 176static bool vec_cst_ctor_to_array (tree, tree *); 177 178 179/* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION. 180 Otherwise, return LOC. */ 181 182static location_t 183expr_location_or (tree t, location_t loc) 184{ 185 location_t tloc = EXPR_LOCATION (t); 186 return tloc == UNKNOWN_LOCATION ? loc : tloc; 187} 188 189/* Similar to protected_set_expr_location, but never modify x in place, 190 if location can and needs to be set, unshare it. */ 191 192static inline tree 193protected_set_expr_location_unshare (tree x, location_t loc) 194{ 195 if (CAN_HAVE_LOCATION_P (x) 196 && EXPR_LOCATION (x) != loc 197 && !(TREE_CODE (x) == SAVE_EXPR 198 || TREE_CODE (x) == TARGET_EXPR 199 || TREE_CODE (x) == BIND_EXPR)) 200 { 201 x = copy_node (x); 202 SET_EXPR_LOCATION (x, loc); 203 } 204 return x; 205} 206 207/* If ARG2 divides ARG1 with zero remainder, carries out the exact 208 division and returns the quotient. Otherwise returns 209 NULL_TREE. */ 210 211tree 212div_if_zero_remainder (const_tree arg1, const_tree arg2) 213{ 214 widest_int quo; 215 216 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2), 217 SIGNED, &quo)) 218 return wide_int_to_tree (TREE_TYPE (arg1), quo); 219 220 return NULL_TREE; 221} 222 223/* This is nonzero if we should defer warnings about undefined 224 overflow. This facility exists because these warnings are a 225 special case. The code to estimate loop iterations does not want 226 to issue any warnings, since it works with expressions which do not 227 occur in user code. Various bits of cleanup code call fold(), but 228 only use the result if it has certain characteristics (e.g., is a 229 constant); that code only wants to issue a warning if the result is 230 used. */ 231 232static int fold_deferring_overflow_warnings; 233 234/* If a warning about undefined overflow is deferred, this is the 235 warning. Note that this may cause us to turn two warnings into 236 one, but that is fine since it is sufficient to only give one 237 warning per expression. */ 238 239static const char* fold_deferred_overflow_warning; 240 241/* If a warning about undefined overflow is deferred, this is the 242 level at which the warning should be emitted. */ 243 244static enum warn_strict_overflow_code fold_deferred_overflow_code; 245 246/* Start deferring overflow warnings. We could use a stack here to 247 permit nested calls, but at present it is not necessary. */ 248 249void 250fold_defer_overflow_warnings (void) 251{ 252 ++fold_deferring_overflow_warnings; 253} 254 255/* Stop deferring overflow warnings. If there is a pending warning, 256 and ISSUE is true, then issue the warning if appropriate. STMT is 257 the statement with which the warning should be associated (used for 258 location information); STMT may be NULL. CODE is the level of the 259 warning--a warn_strict_overflow_code value. This function will use 260 the smaller of CODE and the deferred code when deciding whether to 261 issue the warning. CODE may be zero to mean to always use the 262 deferred code. */ 263 264void 265fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code) 266{ 267 const char *warnmsg; 268 location_t locus; 269 270 gcc_assert (fold_deferring_overflow_warnings > 0); 271 --fold_deferring_overflow_warnings; 272 if (fold_deferring_overflow_warnings > 0) 273 { 274 if (fold_deferred_overflow_warning != NULL 275 && code != 0 276 && code < (int) fold_deferred_overflow_code) 277 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code; 278 return; 279 } 280 281 warnmsg = fold_deferred_overflow_warning; 282 fold_deferred_overflow_warning = NULL; 283 284 if (!issue || warnmsg == NULL) 285 return; 286 287 if (gimple_no_warning_p (stmt)) 288 return; 289 290 /* Use the smallest code level when deciding to issue the 291 warning. */ 292 if (code == 0 || code > (int) fold_deferred_overflow_code) 293 code = fold_deferred_overflow_code; 294 295 if (!issue_strict_overflow_warning (code)) 296 return; 297 298 if (stmt == NULL) 299 locus = input_location; 300 else 301 locus = gimple_location (stmt); 302 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg); 303} 304 305/* Stop deferring overflow warnings, ignoring any deferred 306 warnings. */ 307 308void 309fold_undefer_and_ignore_overflow_warnings (void) 310{ 311 fold_undefer_overflow_warnings (false, NULL, 0); 312} 313 314/* Whether we are deferring overflow warnings. */ 315 316bool 317fold_deferring_overflow_warnings_p (void) 318{ 319 return fold_deferring_overflow_warnings > 0; 320} 321 322/* This is called when we fold something based on the fact that signed 323 overflow is undefined. */ 324 325static void 326fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc) 327{ 328 if (fold_deferring_overflow_warnings > 0) 329 { 330 if (fold_deferred_overflow_warning == NULL 331 || wc < fold_deferred_overflow_code) 332 { 333 fold_deferred_overflow_warning = gmsgid; 334 fold_deferred_overflow_code = wc; 335 } 336 } 337 else if (issue_strict_overflow_warning (wc)) 338 warning (OPT_Wstrict_overflow, gmsgid); 339} 340 341/* Return true if the built-in mathematical function specified by CODE 342 is odd, i.e. -f(x) == f(-x). */ 343 344static bool 345negate_mathfn_p (enum built_in_function code) 346{ 347 switch (code) 348 { 349 CASE_FLT_FN (BUILT_IN_ASIN): 350 CASE_FLT_FN (BUILT_IN_ASINH): 351 CASE_FLT_FN (BUILT_IN_ATAN): 352 CASE_FLT_FN (BUILT_IN_ATANH): 353 CASE_FLT_FN (BUILT_IN_CASIN): 354 CASE_FLT_FN (BUILT_IN_CASINH): 355 CASE_FLT_FN (BUILT_IN_CATAN): 356 CASE_FLT_FN (BUILT_IN_CATANH): 357 CASE_FLT_FN (BUILT_IN_CBRT): 358 CASE_FLT_FN (BUILT_IN_CPROJ): 359 CASE_FLT_FN (BUILT_IN_CSIN): 360 CASE_FLT_FN (BUILT_IN_CSINH): 361 CASE_FLT_FN (BUILT_IN_CTAN): 362 CASE_FLT_FN (BUILT_IN_CTANH): 363 CASE_FLT_FN (BUILT_IN_ERF): 364 CASE_FLT_FN (BUILT_IN_LLROUND): 365 CASE_FLT_FN (BUILT_IN_LROUND): 366 CASE_FLT_FN (BUILT_IN_ROUND): 367 CASE_FLT_FN (BUILT_IN_SIN): 368 CASE_FLT_FN (BUILT_IN_SINH): 369 CASE_FLT_FN (BUILT_IN_TAN): 370 CASE_FLT_FN (BUILT_IN_TANH): 371 CASE_FLT_FN (BUILT_IN_TRUNC): 372 return true; 373 374 CASE_FLT_FN (BUILT_IN_LLRINT): 375 CASE_FLT_FN (BUILT_IN_LRINT): 376 CASE_FLT_FN (BUILT_IN_NEARBYINT): 377 CASE_FLT_FN (BUILT_IN_RINT): 378 return !flag_rounding_math; 379 380 default: 381 break; 382 } 383 return false; 384} 385 386/* Check whether we may negate an integer constant T without causing 387 overflow. */ 388 389bool 390may_negate_without_overflow_p (const_tree t) 391{ 392 tree type; 393 394 gcc_assert (TREE_CODE (t) == INTEGER_CST); 395 396 type = TREE_TYPE (t); 397 if (TYPE_UNSIGNED (type)) 398 return false; 399 400 return !wi::only_sign_bit_p (t); 401} 402 403/* Determine whether an expression T can be cheaply negated using 404 the function negate_expr without introducing undefined overflow. */ 405 406static bool 407negate_expr_p (tree t) 408{ 409 tree type; 410 411 if (t == 0) 412 return false; 413 414 type = TREE_TYPE (t); 415 416 STRIP_SIGN_NOPS (t); 417 switch (TREE_CODE (t)) 418 { 419 case INTEGER_CST: 420 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type)) 421 return true; 422 423 /* Check that -CST will not overflow type. */ 424 return may_negate_without_overflow_p (t); 425 case BIT_NOT_EXPR: 426 return (INTEGRAL_TYPE_P (type) 427 && TYPE_OVERFLOW_WRAPS (type)); 428 429 case FIXED_CST: 430 return true; 431 432 case NEGATE_EXPR: 433 return !TYPE_OVERFLOW_SANITIZED (type); 434 435 case REAL_CST: 436 /* We want to canonicalize to positive real constants. Pretend 437 that only negative ones can be easily negated. */ 438 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); 439 440 case COMPLEX_CST: 441 return negate_expr_p (TREE_REALPART (t)) 442 && negate_expr_p (TREE_IMAGPART (t)); 443 444 case VECTOR_CST: 445 { 446 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type)) 447 return true; 448 449 int count = TYPE_VECTOR_SUBPARTS (type), i; 450 451 for (i = 0; i < count; i++) 452 if (!negate_expr_p (VECTOR_CST_ELT (t, i))) 453 return false; 454 455 return true; 456 } 457 458 case COMPLEX_EXPR: 459 return negate_expr_p (TREE_OPERAND (t, 0)) 460 && negate_expr_p (TREE_OPERAND (t, 1)); 461 462 case CONJ_EXPR: 463 return negate_expr_p (TREE_OPERAND (t, 0)); 464 465 case PLUS_EXPR: 466 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) 467 || HONOR_SIGNED_ZEROS (element_mode (type)) 468 || (INTEGRAL_TYPE_P (type) 469 && ! TYPE_OVERFLOW_WRAPS (type))) 470 return false; 471 /* -(A + B) -> (-B) - A. */ 472 if (negate_expr_p (TREE_OPERAND (t, 1)) 473 && reorder_operands_p (TREE_OPERAND (t, 0), 474 TREE_OPERAND (t, 1))) 475 return true; 476 /* -(A + B) -> (-A) - B. */ 477 return negate_expr_p (TREE_OPERAND (t, 0)); 478 479 case MINUS_EXPR: 480 /* We can't turn -(A-B) into B-A when we honor signed zeros. */ 481 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) 482 && !HONOR_SIGNED_ZEROS (element_mode (type)) 483 && (! INTEGRAL_TYPE_P (type) 484 || TYPE_OVERFLOW_WRAPS (type)) 485 && reorder_operands_p (TREE_OPERAND (t, 0), 486 TREE_OPERAND (t, 1)); 487 488 case MULT_EXPR: 489 if (TYPE_UNSIGNED (type)) 490 break; 491 /* INT_MIN/n * n doesn't overflow while negating one operand it does 492 if n is a power of two. */ 493 if (INTEGRAL_TYPE_P (TREE_TYPE (t)) 494 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t)) 495 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST 496 && ! integer_pow2p (TREE_OPERAND (t, 0))) 497 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST 498 && ! integer_pow2p (TREE_OPERAND (t, 1))))) 499 break; 500 501 /* Fall through. */ 502 503 case RDIV_EXPR: 504 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t)))) 505 return negate_expr_p (TREE_OPERAND (t, 1)) 506 || negate_expr_p (TREE_OPERAND (t, 0)); 507 break; 508 509 case TRUNC_DIV_EXPR: 510 case ROUND_DIV_EXPR: 511 case EXACT_DIV_EXPR: 512 /* In general we can't negate A / B, because if A is INT_MIN and 513 B is 1, we may turn this into INT_MIN / -1 which is undefined 514 and actually traps on some architectures. But if overflow is 515 undefined, we can negate, because - (INT_MIN / 1) is an 516 overflow. */ 517 if (INTEGRAL_TYPE_P (TREE_TYPE (t))) 518 { 519 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))) 520 break; 521 /* If overflow is undefined then we have to be careful because 522 we ask whether it's ok to associate the negate with the 523 division which is not ok for example for 524 -((a - b) / c) where (-(a - b)) / c may invoke undefined 525 overflow because of negating INT_MIN. So do not use 526 negate_expr_p here but open-code the two important cases. */ 527 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR 528 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST 529 && may_negate_without_overflow_p (TREE_OPERAND (t, 0)))) 530 return true; 531 } 532 else if (negate_expr_p (TREE_OPERAND (t, 0))) 533 return true; 534 return negate_expr_p (TREE_OPERAND (t, 1)); 535 536 case NOP_EXPR: 537 /* Negate -((double)float) as (double)(-float). */ 538 if (TREE_CODE (type) == REAL_TYPE) 539 { 540 tree tem = strip_float_extensions (t); 541 if (tem != t) 542 return negate_expr_p (tem); 543 } 544 break; 545 546 case CALL_EXPR: 547 /* Negate -f(x) as f(-x). */ 548 if (negate_mathfn_p (builtin_mathfn_code (t))) 549 return negate_expr_p (CALL_EXPR_ARG (t, 0)); 550 break; 551 552 case RSHIFT_EXPR: 553 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */ 554 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) 555 { 556 tree op1 = TREE_OPERAND (t, 1); 557 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1)) 558 return true; 559 } 560 break; 561 562 default: 563 break; 564 } 565 return false; 566} 567 568/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no 569 simplification is possible. 570 If negate_expr_p would return true for T, NULL_TREE will never be 571 returned. */ 572 573static tree 574fold_negate_expr (location_t loc, tree t) 575{ 576 tree type = TREE_TYPE (t); 577 tree tem; 578 579 switch (TREE_CODE (t)) 580 { 581 /* Convert - (~A) to A + 1. */ 582 case BIT_NOT_EXPR: 583 if (INTEGRAL_TYPE_P (type)) 584 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0), 585 build_one_cst (type)); 586 break; 587 588 case INTEGER_CST: 589 tem = fold_negate_const (t, type); 590 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t) 591 || (ANY_INTEGRAL_TYPE_P (type) 592 && !TYPE_OVERFLOW_TRAPS (type) 593 && TYPE_OVERFLOW_WRAPS (type)) 594 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0) 595 return tem; 596 break; 597 598 case REAL_CST: 599 tem = fold_negate_const (t, type); 600 return tem; 601 602 case FIXED_CST: 603 tem = fold_negate_const (t, type); 604 return tem; 605 606 case COMPLEX_CST: 607 { 608 tree rpart = fold_negate_expr (loc, TREE_REALPART (t)); 609 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t)); 610 if (rpart && ipart) 611 return build_complex (type, rpart, ipart); 612 } 613 break; 614 615 case VECTOR_CST: 616 { 617 int count = TYPE_VECTOR_SUBPARTS (type), i; 618 tree *elts = XALLOCAVEC (tree, count); 619 620 for (i = 0; i < count; i++) 621 { 622 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i)); 623 if (elts[i] == NULL_TREE) 624 return NULL_TREE; 625 } 626 627 return build_vector (type, elts); 628 } 629 630 case COMPLEX_EXPR: 631 if (negate_expr_p (t)) 632 return fold_build2_loc (loc, COMPLEX_EXPR, type, 633 fold_negate_expr (loc, TREE_OPERAND (t, 0)), 634 fold_negate_expr (loc, TREE_OPERAND (t, 1))); 635 break; 636 637 case CONJ_EXPR: 638 if (negate_expr_p (t)) 639 return fold_build1_loc (loc, CONJ_EXPR, type, 640 fold_negate_expr (loc, TREE_OPERAND (t, 0))); 641 break; 642 643 case NEGATE_EXPR: 644 if (!TYPE_OVERFLOW_SANITIZED (type)) 645 return TREE_OPERAND (t, 0); 646 break; 647 648 case PLUS_EXPR: 649 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) 650 && !HONOR_SIGNED_ZEROS (element_mode (type))) 651 { 652 /* -(A + B) -> (-B) - A. */ 653 if (negate_expr_p (TREE_OPERAND (t, 1)) 654 && reorder_operands_p (TREE_OPERAND (t, 0), 655 TREE_OPERAND (t, 1))) 656 { 657 tem = negate_expr (TREE_OPERAND (t, 1)); 658 return fold_build2_loc (loc, MINUS_EXPR, type, 659 tem, TREE_OPERAND (t, 0)); 660 } 661 662 /* -(A + B) -> (-A) - B. */ 663 if (negate_expr_p (TREE_OPERAND (t, 0))) 664 { 665 tem = negate_expr (TREE_OPERAND (t, 0)); 666 return fold_build2_loc (loc, MINUS_EXPR, type, 667 tem, TREE_OPERAND (t, 1)); 668 } 669 } 670 break; 671 672 case MINUS_EXPR: 673 /* - (A - B) -> B - A */ 674 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) 675 && !HONOR_SIGNED_ZEROS (element_mode (type)) 676 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1))) 677 return fold_build2_loc (loc, MINUS_EXPR, type, 678 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0)); 679 break; 680 681 case MULT_EXPR: 682 if (TYPE_UNSIGNED (type)) 683 break; 684 685 /* Fall through. */ 686 687 case RDIV_EXPR: 688 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))) 689 { 690 tem = TREE_OPERAND (t, 1); 691 if (negate_expr_p (tem)) 692 return fold_build2_loc (loc, TREE_CODE (t), type, 693 TREE_OPERAND (t, 0), negate_expr (tem)); 694 tem = TREE_OPERAND (t, 0); 695 if (negate_expr_p (tem)) 696 return fold_build2_loc (loc, TREE_CODE (t), type, 697 negate_expr (tem), TREE_OPERAND (t, 1)); 698 } 699 break; 700 701 case TRUNC_DIV_EXPR: 702 case ROUND_DIV_EXPR: 703 case EXACT_DIV_EXPR: 704 /* In general we can't negate A / B, because if A is INT_MIN and 705 B is 1, we may turn this into INT_MIN / -1 which is undefined 706 and actually traps on some architectures. But if overflow is 707 undefined, we can negate, because - (INT_MIN / 1) is an 708 overflow. */ 709 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 710 { 711 const char * const warnmsg = G_("assuming signed overflow does not " 712 "occur when negating a division"); 713 tem = TREE_OPERAND (t, 1); 714 if (negate_expr_p (tem)) 715 { 716 if (INTEGRAL_TYPE_P (type) 717 && (TREE_CODE (tem) != INTEGER_CST 718 || integer_onep (tem))) 719 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); 720 return fold_build2_loc (loc, TREE_CODE (t), type, 721 TREE_OPERAND (t, 0), negate_expr (tem)); 722 } 723 /* If overflow is undefined then we have to be careful because 724 we ask whether it's ok to associate the negate with the 725 division which is not ok for example for 726 -((a - b) / c) where (-(a - b)) / c may invoke undefined 727 overflow because of negating INT_MIN. So do not use 728 negate_expr_p here but open-code the two important cases. */ 729 tem = TREE_OPERAND (t, 0); 730 if ((INTEGRAL_TYPE_P (type) 731 && (TREE_CODE (tem) == NEGATE_EXPR 732 || (TREE_CODE (tem) == INTEGER_CST 733 && may_negate_without_overflow_p (tem)))) 734 || !INTEGRAL_TYPE_P (type)) 735 return fold_build2_loc (loc, TREE_CODE (t), type, 736 negate_expr (tem), TREE_OPERAND (t, 1)); 737 } 738 break; 739 740 case NOP_EXPR: 741 /* Convert -((double)float) into (double)(-float). */ 742 if (TREE_CODE (type) == REAL_TYPE) 743 { 744 tem = strip_float_extensions (t); 745 if (tem != t && negate_expr_p (tem)) 746 return fold_convert_loc (loc, type, negate_expr (tem)); 747 } 748 break; 749 750 case CALL_EXPR: 751 /* Negate -f(x) as f(-x). */ 752 if (negate_mathfn_p (builtin_mathfn_code (t)) 753 && negate_expr_p (CALL_EXPR_ARG (t, 0))) 754 { 755 tree fndecl, arg; 756 757 fndecl = get_callee_fndecl (t); 758 arg = negate_expr (CALL_EXPR_ARG (t, 0)); 759 return build_call_expr_loc (loc, fndecl, 1, arg); 760 } 761 break; 762 763 case RSHIFT_EXPR: 764 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */ 765 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) 766 { 767 tree op1 = TREE_OPERAND (t, 1); 768 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1)) 769 { 770 tree ntype = TYPE_UNSIGNED (type) 771 ? signed_type_for (type) 772 : unsigned_type_for (type); 773 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0)); 774 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1); 775 return fold_convert_loc (loc, type, temp); 776 } 777 } 778 break; 779 780 default: 781 break; 782 } 783 784 return NULL_TREE; 785} 786 787/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be 788 negated in a simpler way. Also allow for T to be NULL_TREE, in which case 789 return NULL_TREE. */ 790 791static tree 792negate_expr (tree t) 793{ 794 tree type, tem; 795 location_t loc; 796 797 if (t == NULL_TREE) 798 return NULL_TREE; 799 800 loc = EXPR_LOCATION (t); 801 type = TREE_TYPE (t); 802 STRIP_SIGN_NOPS (t); 803 804 tem = fold_negate_expr (loc, t); 805 if (!tem) 806 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t); 807 return fold_convert_loc (loc, type, tem); 808} 809 810/* Split a tree IN into a constant, literal and variable parts that could be 811 combined with CODE to make IN. "constant" means an expression with 812 TREE_CONSTANT but that isn't an actual constant. CODE must be a 813 commutative arithmetic operation. Store the constant part into *CONP, 814 the literal in *LITP and return the variable part. If a part isn't 815 present, set it to null. If the tree does not decompose in this way, 816 return the entire tree as the variable part and the other parts as null. 817 818 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that 819 case, we negate an operand that was subtracted. Except if it is a 820 literal for which we use *MINUS_LITP instead. 821 822 If NEGATE_P is true, we are negating all of IN, again except a literal 823 for which we use *MINUS_LITP instead. If a variable part is of pointer 824 type, it is negated after converting to TYPE. This prevents us from 825 generating illegal MINUS pointer expression. LOC is the location of 826 the converted variable part. 827 828 If IN is itself a literal or constant, return it as appropriate. 829 830 Note that we do not guarantee that any of the three values will be the 831 same type as IN, but they will have the same signedness and mode. */ 832 833static tree 834split_tree (location_t loc, tree in, tree type, enum tree_code code, 835 tree *conp, tree *litp, tree *minus_litp, int negate_p) 836{ 837 tree var = 0; 838 839 *conp = 0; 840 *litp = 0; 841 *minus_litp = 0; 842 843 /* Strip any conversions that don't change the machine mode or signedness. */ 844 STRIP_SIGN_NOPS (in); 845 846 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST 847 || TREE_CODE (in) == FIXED_CST) 848 *litp = in; 849 else if (TREE_CODE (in) == code 850 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math) 851 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in)) 852 /* We can associate addition and subtraction together (even 853 though the C standard doesn't say so) for integers because 854 the value is not affected. For reals, the value might be 855 affected, so we can't. */ 856 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR) 857 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR)))) 858 { 859 tree op0 = TREE_OPERAND (in, 0); 860 tree op1 = TREE_OPERAND (in, 1); 861 int neg1_p = TREE_CODE (in) == MINUS_EXPR; 862 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0; 863 864 /* First see if either of the operands is a literal, then a constant. */ 865 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST 866 || TREE_CODE (op0) == FIXED_CST) 867 *litp = op0, op0 = 0; 868 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST 869 || TREE_CODE (op1) == FIXED_CST) 870 *litp = op1, neg_litp_p = neg1_p, op1 = 0; 871 872 if (op0 != 0 && TREE_CONSTANT (op0)) 873 *conp = op0, op0 = 0; 874 else if (op1 != 0 && TREE_CONSTANT (op1)) 875 *conp = op1, neg_conp_p = neg1_p, op1 = 0; 876 877 /* If we haven't dealt with either operand, this is not a case we can 878 decompose. Otherwise, VAR is either of the ones remaining, if any. */ 879 if (op0 != 0 && op1 != 0) 880 var = in; 881 else if (op0 != 0) 882 var = op0; 883 else 884 var = op1, neg_var_p = neg1_p; 885 886 /* Now do any needed negations. */ 887 if (neg_litp_p) 888 *minus_litp = *litp, *litp = 0; 889 if (neg_conp_p) 890 *conp = negate_expr (*conp); 891 if (neg_var_p && var) 892 { 893 /* Convert to TYPE before negating. */ 894 var = fold_convert_loc (loc, type, var); 895 var = negate_expr (var); 896 } 897 } 898 else if (TREE_CODE (in) == BIT_NOT_EXPR 899 && code == PLUS_EXPR) 900 { 901 /* -X - 1 is folded to ~X, undo that here. */ 902 *minus_litp = build_one_cst (TREE_TYPE (in)); 903 var = negate_expr (TREE_OPERAND (in, 0)); 904 } 905 else if (TREE_CONSTANT (in)) 906 *conp = in; 907 else 908 var = in; 909 910 if (negate_p) 911 { 912 if (*litp) 913 *minus_litp = *litp, *litp = 0; 914 else if (*minus_litp) 915 *litp = *minus_litp, *minus_litp = 0; 916 *conp = negate_expr (*conp); 917 if (var) 918 { 919 /* Convert to TYPE before negating. */ 920 var = fold_convert_loc (loc, type, var); 921 var = negate_expr (var); 922 } 923 } 924 925 return var; 926} 927 928/* Re-associate trees split by the above function. T1 and T2 are 929 either expressions to associate or null. Return the new 930 expression, if any. LOC is the location of the new expression. If 931 we build an operation, do it in TYPE and with CODE. */ 932 933static tree 934associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type) 935{ 936 if (t1 == 0) 937 return t2; 938 else if (t2 == 0) 939 return t1; 940 941 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't 942 try to fold this since we will have infinite recursion. But do 943 deal with any NEGATE_EXPRs. */ 944 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code 945 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR) 946 { 947 if (code == PLUS_EXPR) 948 { 949 if (TREE_CODE (t1) == NEGATE_EXPR) 950 return build2_loc (loc, MINUS_EXPR, type, 951 fold_convert_loc (loc, type, t2), 952 fold_convert_loc (loc, type, 953 TREE_OPERAND (t1, 0))); 954 else if (TREE_CODE (t2) == NEGATE_EXPR) 955 return build2_loc (loc, MINUS_EXPR, type, 956 fold_convert_loc (loc, type, t1), 957 fold_convert_loc (loc, type, 958 TREE_OPERAND (t2, 0))); 959 else if (integer_zerop (t2)) 960 return fold_convert_loc (loc, type, t1); 961 } 962 else if (code == MINUS_EXPR) 963 { 964 if (integer_zerop (t2)) 965 return fold_convert_loc (loc, type, t1); 966 } 967 968 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1), 969 fold_convert_loc (loc, type, t2)); 970 } 971 972 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1), 973 fold_convert_loc (loc, type, t2)); 974} 975 976/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable 977 for use in int_const_binop, size_binop and size_diffop. */ 978 979static bool 980int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2) 981{ 982 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1)) 983 return false; 984 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2)) 985 return false; 986 987 switch (code) 988 { 989 case LSHIFT_EXPR: 990 case RSHIFT_EXPR: 991 case LROTATE_EXPR: 992 case RROTATE_EXPR: 993 return true; 994 995 default: 996 break; 997 } 998 999 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2) 1000 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2) 1001 && TYPE_MODE (type1) == TYPE_MODE (type2); 1002} 1003 1004 1005/* Combine two integer constants ARG1 and ARG2 under operation CODE 1006 to produce a new constant. Return NULL_TREE if we don't know how 1007 to evaluate CODE at compile-time. */ 1008 1009static tree 1010int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2, 1011 int overflowable) 1012{ 1013 wide_int res; 1014 tree t; 1015 tree type = TREE_TYPE (arg1); 1016 signop sign = TYPE_SIGN (type); 1017 bool overflow = false; 1018 1019 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type), 1020 TYPE_SIGN (TREE_TYPE (parg2))); 1021 1022 switch (code) 1023 { 1024 case BIT_IOR_EXPR: 1025 res = wi::bit_or (arg1, arg2); 1026 break; 1027 1028 case BIT_XOR_EXPR: 1029 res = wi::bit_xor (arg1, arg2); 1030 break; 1031 1032 case BIT_AND_EXPR: 1033 res = wi::bit_and (arg1, arg2); 1034 break; 1035 1036 case RSHIFT_EXPR: 1037 case LSHIFT_EXPR: 1038 if (wi::neg_p (arg2)) 1039 { 1040 arg2 = -arg2; 1041 if (code == RSHIFT_EXPR) 1042 code = LSHIFT_EXPR; 1043 else 1044 code = RSHIFT_EXPR; 1045 } 1046 1047 if (code == RSHIFT_EXPR) 1048 /* It's unclear from the C standard whether shifts can overflow. 1049 The following code ignores overflow; perhaps a C standard 1050 interpretation ruling is needed. */ 1051 res = wi::rshift (arg1, arg2, sign); 1052 else 1053 res = wi::lshift (arg1, arg2); 1054 break; 1055 1056 case RROTATE_EXPR: 1057 case LROTATE_EXPR: 1058 if (wi::neg_p (arg2)) 1059 { 1060 arg2 = -arg2; 1061 if (code == RROTATE_EXPR) 1062 code = LROTATE_EXPR; 1063 else 1064 code = RROTATE_EXPR; 1065 } 1066 1067 if (code == RROTATE_EXPR) 1068 res = wi::rrotate (arg1, arg2); 1069 else 1070 res = wi::lrotate (arg1, arg2); 1071 break; 1072 1073 case PLUS_EXPR: 1074 res = wi::add (arg1, arg2, sign, &overflow); 1075 break; 1076 1077 case MINUS_EXPR: 1078 res = wi::sub (arg1, arg2, sign, &overflow); 1079 break; 1080 1081 case MULT_EXPR: 1082 res = wi::mul (arg1, arg2, sign, &overflow); 1083 break; 1084 1085 case MULT_HIGHPART_EXPR: 1086 res = wi::mul_high (arg1, arg2, sign); 1087 break; 1088 1089 case TRUNC_DIV_EXPR: 1090 case EXACT_DIV_EXPR: 1091 if (arg2 == 0) 1092 return NULL_TREE; 1093 res = wi::div_trunc (arg1, arg2, sign, &overflow); 1094 break; 1095 1096 case FLOOR_DIV_EXPR: 1097 if (arg2 == 0) 1098 return NULL_TREE; 1099 res = wi::div_floor (arg1, arg2, sign, &overflow); 1100 break; 1101 1102 case CEIL_DIV_EXPR: 1103 if (arg2 == 0) 1104 return NULL_TREE; 1105 res = wi::div_ceil (arg1, arg2, sign, &overflow); 1106 break; 1107 1108 case ROUND_DIV_EXPR: 1109 if (arg2 == 0) 1110 return NULL_TREE; 1111 res = wi::div_round (arg1, arg2, sign, &overflow); 1112 break; 1113 1114 case TRUNC_MOD_EXPR: 1115 if (arg2 == 0) 1116 return NULL_TREE; 1117 res = wi::mod_trunc (arg1, arg2, sign, &overflow); 1118 break; 1119 1120 case FLOOR_MOD_EXPR: 1121 if (arg2 == 0) 1122 return NULL_TREE; 1123 res = wi::mod_floor (arg1, arg2, sign, &overflow); 1124 break; 1125 1126 case CEIL_MOD_EXPR: 1127 if (arg2 == 0) 1128 return NULL_TREE; 1129 res = wi::mod_ceil (arg1, arg2, sign, &overflow); 1130 break; 1131 1132 case ROUND_MOD_EXPR: 1133 if (arg2 == 0) 1134 return NULL_TREE; 1135 res = wi::mod_round (arg1, arg2, sign, &overflow); 1136 break; 1137 1138 case MIN_EXPR: 1139 res = wi::min (arg1, arg2, sign); 1140 break; 1141 1142 case MAX_EXPR: 1143 res = wi::max (arg1, arg2, sign); 1144 break; 1145 1146 default: 1147 return NULL_TREE; 1148 } 1149 1150 t = force_fit_type (type, res, overflowable, 1151 (((sign == SIGNED || overflowable == -1) 1152 && overflow) 1153 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2))); 1154 1155 return t; 1156} 1157 1158tree 1159int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2) 1160{ 1161 return int_const_binop_1 (code, arg1, arg2, 1); 1162} 1163 1164/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new 1165 constant. We assume ARG1 and ARG2 have the same data type, or at least 1166 are the same kind of constant and the same machine mode. Return zero if 1167 combining the constants is not allowed in the current operating mode. */ 1168 1169static tree 1170const_binop (enum tree_code code, tree arg1, tree arg2) 1171{ 1172 /* Sanity check for the recursive cases. */ 1173 if (!arg1 || !arg2) 1174 return NULL_TREE; 1175 1176 STRIP_NOPS (arg1); 1177 STRIP_NOPS (arg2); 1178 1179 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST) 1180 { 1181 if (code == POINTER_PLUS_EXPR) 1182 return int_const_binop (PLUS_EXPR, 1183 arg1, fold_convert (TREE_TYPE (arg1), arg2)); 1184 1185 return int_const_binop (code, arg1, arg2); 1186 } 1187 1188 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST) 1189 { 1190 machine_mode mode; 1191 REAL_VALUE_TYPE d1; 1192 REAL_VALUE_TYPE d2; 1193 REAL_VALUE_TYPE value; 1194 REAL_VALUE_TYPE result; 1195 bool inexact; 1196 tree t, type; 1197 1198 /* The following codes are handled by real_arithmetic. */ 1199 switch (code) 1200 { 1201 case PLUS_EXPR: 1202 case MINUS_EXPR: 1203 case MULT_EXPR: 1204 case RDIV_EXPR: 1205 case MIN_EXPR: 1206 case MAX_EXPR: 1207 break; 1208 1209 default: 1210 return NULL_TREE; 1211 } 1212 1213 d1 = TREE_REAL_CST (arg1); 1214 d2 = TREE_REAL_CST (arg2); 1215 1216 type = TREE_TYPE (arg1); 1217 mode = TYPE_MODE (type); 1218 1219 /* Don't perform operation if we honor signaling NaNs and 1220 either operand is a NaN. */ 1221 if (HONOR_SNANS (mode) 1222 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2))) 1223 return NULL_TREE; 1224 1225 /* Don't perform operation if it would raise a division 1226 by zero exception. */ 1227 if (code == RDIV_EXPR 1228 && REAL_VALUES_EQUAL (d2, dconst0) 1229 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode))) 1230 return NULL_TREE; 1231 1232 /* If either operand is a NaN, just return it. Otherwise, set up 1233 for floating-point trap; we return an overflow. */ 1234 if (REAL_VALUE_ISNAN (d1)) 1235 return arg1; 1236 else if (REAL_VALUE_ISNAN (d2)) 1237 return arg2; 1238 1239 inexact = real_arithmetic (&value, code, &d1, &d2); 1240 real_convert (&result, mode, &value); 1241 1242 /* Don't constant fold this floating point operation if 1243 the result has overflowed and flag_trapping_math. */ 1244 if (flag_trapping_math 1245 && MODE_HAS_INFINITIES (mode) 1246 && REAL_VALUE_ISINF (result) 1247 && !REAL_VALUE_ISINF (d1) 1248 && !REAL_VALUE_ISINF (d2)) 1249 return NULL_TREE; 1250 1251 /* Don't constant fold this floating point operation if the 1252 result may dependent upon the run-time rounding mode and 1253 flag_rounding_math is set, or if GCC's software emulation 1254 is unable to accurately represent the result. */ 1255 if ((flag_rounding_math 1256 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations)) 1257 && (inexact || !real_identical (&result, &value))) 1258 return NULL_TREE; 1259 1260 t = build_real (type, result); 1261 1262 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2); 1263 return t; 1264 } 1265 1266 if (TREE_CODE (arg1) == FIXED_CST) 1267 { 1268 FIXED_VALUE_TYPE f1; 1269 FIXED_VALUE_TYPE f2; 1270 FIXED_VALUE_TYPE result; 1271 tree t, type; 1272 int sat_p; 1273 bool overflow_p; 1274 1275 /* The following codes are handled by fixed_arithmetic. */ 1276 switch (code) 1277 { 1278 case PLUS_EXPR: 1279 case MINUS_EXPR: 1280 case MULT_EXPR: 1281 case TRUNC_DIV_EXPR: 1282 if (TREE_CODE (arg2) != FIXED_CST) 1283 return NULL_TREE; 1284 f2 = TREE_FIXED_CST (arg2); 1285 break; 1286 1287 case LSHIFT_EXPR: 1288 case RSHIFT_EXPR: 1289 { 1290 if (TREE_CODE (arg2) != INTEGER_CST) 1291 return NULL_TREE; 1292 wide_int w2 = arg2; 1293 f2.data.high = w2.elt (1); 1294 f2.data.low = w2.elt (0); 1295 f2.mode = SImode; 1296 } 1297 break; 1298 1299 default: 1300 return NULL_TREE; 1301 } 1302 1303 f1 = TREE_FIXED_CST (arg1); 1304 type = TREE_TYPE (arg1); 1305 sat_p = TYPE_SATURATING (type); 1306 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p); 1307 t = build_fixed (type, result); 1308 /* Propagate overflow flags. */ 1309 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) 1310 TREE_OVERFLOW (t) = 1; 1311 return t; 1312 } 1313 1314 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST) 1315 { 1316 tree type = TREE_TYPE (arg1); 1317 tree r1 = TREE_REALPART (arg1); 1318 tree i1 = TREE_IMAGPART (arg1); 1319 tree r2 = TREE_REALPART (arg2); 1320 tree i2 = TREE_IMAGPART (arg2); 1321 tree real, imag; 1322 1323 switch (code) 1324 { 1325 case PLUS_EXPR: 1326 case MINUS_EXPR: 1327 real = const_binop (code, r1, r2); 1328 imag = const_binop (code, i1, i2); 1329 break; 1330 1331 case MULT_EXPR: 1332 if (COMPLEX_FLOAT_TYPE_P (type)) 1333 return do_mpc_arg2 (arg1, arg2, type, 1334 /* do_nonfinite= */ folding_initializer, 1335 mpc_mul); 1336 1337 real = const_binop (MINUS_EXPR, 1338 const_binop (MULT_EXPR, r1, r2), 1339 const_binop (MULT_EXPR, i1, i2)); 1340 imag = const_binop (PLUS_EXPR, 1341 const_binop (MULT_EXPR, r1, i2), 1342 const_binop (MULT_EXPR, i1, r2)); 1343 break; 1344 1345 case RDIV_EXPR: 1346 if (COMPLEX_FLOAT_TYPE_P (type)) 1347 return do_mpc_arg2 (arg1, arg2, type, 1348 /* do_nonfinite= */ folding_initializer, 1349 mpc_div); 1350 /* Fallthru ... */ 1351 case TRUNC_DIV_EXPR: 1352 case CEIL_DIV_EXPR: 1353 case FLOOR_DIV_EXPR: 1354 case ROUND_DIV_EXPR: 1355 if (flag_complex_method == 0) 1356 { 1357 /* Keep this algorithm in sync with 1358 tree-complex.c:expand_complex_div_straight(). 1359 1360 Expand complex division to scalars, straightforward algorithm. 1361 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t) 1362 t = br*br + bi*bi 1363 */ 1364 tree magsquared 1365 = const_binop (PLUS_EXPR, 1366 const_binop (MULT_EXPR, r2, r2), 1367 const_binop (MULT_EXPR, i2, i2)); 1368 tree t1 1369 = const_binop (PLUS_EXPR, 1370 const_binop (MULT_EXPR, r1, r2), 1371 const_binop (MULT_EXPR, i1, i2)); 1372 tree t2 1373 = const_binop (MINUS_EXPR, 1374 const_binop (MULT_EXPR, i1, r2), 1375 const_binop (MULT_EXPR, r1, i2)); 1376 1377 real = const_binop (code, t1, magsquared); 1378 imag = const_binop (code, t2, magsquared); 1379 } 1380 else 1381 { 1382 /* Keep this algorithm in sync with 1383 tree-complex.c:expand_complex_div_wide(). 1384 1385 Expand complex division to scalars, modified algorithm to minimize 1386 overflow with wide input ranges. */ 1387 tree compare = fold_build2 (LT_EXPR, boolean_type_node, 1388 fold_abs_const (r2, TREE_TYPE (type)), 1389 fold_abs_const (i2, TREE_TYPE (type))); 1390 1391 if (integer_nonzerop (compare)) 1392 { 1393 /* In the TRUE branch, we compute 1394 ratio = br/bi; 1395 div = (br * ratio) + bi; 1396 tr = (ar * ratio) + ai; 1397 ti = (ai * ratio) - ar; 1398 tr = tr / div; 1399 ti = ti / div; */ 1400 tree ratio = const_binop (code, r2, i2); 1401 tree div = const_binop (PLUS_EXPR, i2, 1402 const_binop (MULT_EXPR, r2, ratio)); 1403 real = const_binop (MULT_EXPR, r1, ratio); 1404 real = const_binop (PLUS_EXPR, real, i1); 1405 real = const_binop (code, real, div); 1406 1407 imag = const_binop (MULT_EXPR, i1, ratio); 1408 imag = const_binop (MINUS_EXPR, imag, r1); 1409 imag = const_binop (code, imag, div); 1410 } 1411 else 1412 { 1413 /* In the FALSE branch, we compute 1414 ratio = d/c; 1415 divisor = (d * ratio) + c; 1416 tr = (b * ratio) + a; 1417 ti = b - (a * ratio); 1418 tr = tr / div; 1419 ti = ti / div; */ 1420 tree ratio = const_binop (code, i2, r2); 1421 tree div = const_binop (PLUS_EXPR, r2, 1422 const_binop (MULT_EXPR, i2, ratio)); 1423 1424 real = const_binop (MULT_EXPR, i1, ratio); 1425 real = const_binop (PLUS_EXPR, real, r1); 1426 real = const_binop (code, real, div); 1427 1428 imag = const_binop (MULT_EXPR, r1, ratio); 1429 imag = const_binop (MINUS_EXPR, i1, imag); 1430 imag = const_binop (code, imag, div); 1431 } 1432 } 1433 break; 1434 1435 default: 1436 return NULL_TREE; 1437 } 1438 1439 if (real && imag) 1440 return build_complex (type, real, imag); 1441 } 1442 1443 if (TREE_CODE (arg1) == VECTOR_CST 1444 && TREE_CODE (arg2) == VECTOR_CST) 1445 { 1446 tree type = TREE_TYPE (arg1); 1447 int count = TYPE_VECTOR_SUBPARTS (type), i; 1448 tree *elts = XALLOCAVEC (tree, count); 1449 1450 for (i = 0; i < count; i++) 1451 { 1452 tree elem1 = VECTOR_CST_ELT (arg1, i); 1453 tree elem2 = VECTOR_CST_ELT (arg2, i); 1454 1455 elts[i] = const_binop (code, elem1, elem2); 1456 1457 /* It is possible that const_binop cannot handle the given 1458 code and return NULL_TREE */ 1459 if (elts[i] == NULL_TREE) 1460 return NULL_TREE; 1461 } 1462 1463 return build_vector (type, elts); 1464 } 1465 1466 /* Shifts allow a scalar offset for a vector. */ 1467 if (TREE_CODE (arg1) == VECTOR_CST 1468 && TREE_CODE (arg2) == INTEGER_CST) 1469 { 1470 tree type = TREE_TYPE (arg1); 1471 int count = TYPE_VECTOR_SUBPARTS (type), i; 1472 tree *elts = XALLOCAVEC (tree, count); 1473 1474 for (i = 0; i < count; i++) 1475 { 1476 tree elem1 = VECTOR_CST_ELT (arg1, i); 1477 1478 elts[i] = const_binop (code, elem1, arg2); 1479 1480 /* It is possible that const_binop cannot handle the given 1481 code and return NULL_TREE. */ 1482 if (elts[i] == NULL_TREE) 1483 return NULL_TREE; 1484 } 1485 1486 return build_vector (type, elts); 1487 } 1488 return NULL_TREE; 1489} 1490 1491/* Overload that adds a TYPE parameter to be able to dispatch 1492 to fold_relational_const. */ 1493 1494tree 1495const_binop (enum tree_code code, tree type, tree arg1, tree arg2) 1496{ 1497 if (TREE_CODE_CLASS (code) == tcc_comparison) 1498 return fold_relational_const (code, type, arg1, arg2); 1499 1500 /* ??? Until we make the const_binop worker take the type of the 1501 result as argument put those cases that need it here. */ 1502 switch (code) 1503 { 1504 case COMPLEX_EXPR: 1505 if ((TREE_CODE (arg1) == REAL_CST 1506 && TREE_CODE (arg2) == REAL_CST) 1507 || (TREE_CODE (arg1) == INTEGER_CST 1508 && TREE_CODE (arg2) == INTEGER_CST)) 1509 return build_complex (type, arg1, arg2); 1510 return NULL_TREE; 1511 1512 case VEC_PACK_TRUNC_EXPR: 1513 case VEC_PACK_FIX_TRUNC_EXPR: 1514 { 1515 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i; 1516 tree *elts; 1517 1518 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2 1519 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2); 1520 if (TREE_CODE (arg1) != VECTOR_CST 1521 || TREE_CODE (arg2) != VECTOR_CST) 1522 return NULL_TREE; 1523 1524 elts = XALLOCAVEC (tree, nelts); 1525 if (!vec_cst_ctor_to_array (arg1, elts) 1526 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2)) 1527 return NULL_TREE; 1528 1529 for (i = 0; i < nelts; i++) 1530 { 1531 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR 1532 ? NOP_EXPR : FIX_TRUNC_EXPR, 1533 TREE_TYPE (type), elts[i]); 1534 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i])) 1535 return NULL_TREE; 1536 } 1537 1538 return build_vector (type, elts); 1539 } 1540 1541 case VEC_WIDEN_MULT_LO_EXPR: 1542 case VEC_WIDEN_MULT_HI_EXPR: 1543 case VEC_WIDEN_MULT_EVEN_EXPR: 1544 case VEC_WIDEN_MULT_ODD_EXPR: 1545 { 1546 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type); 1547 unsigned int out, ofs, scale; 1548 tree *elts; 1549 1550 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2 1551 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2); 1552 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST) 1553 return NULL_TREE; 1554 1555 elts = XALLOCAVEC (tree, nelts * 4); 1556 if (!vec_cst_ctor_to_array (arg1, elts) 1557 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2)) 1558 return NULL_TREE; 1559 1560 if (code == VEC_WIDEN_MULT_LO_EXPR) 1561 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0; 1562 else if (code == VEC_WIDEN_MULT_HI_EXPR) 1563 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts; 1564 else if (code == VEC_WIDEN_MULT_EVEN_EXPR) 1565 scale = 1, ofs = 0; 1566 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */ 1567 scale = 1, ofs = 1; 1568 1569 for (out = 0; out < nelts; out++) 1570 { 1571 unsigned int in1 = (out << scale) + ofs; 1572 unsigned int in2 = in1 + nelts * 2; 1573 tree t1, t2; 1574 1575 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]); 1576 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]); 1577 1578 if (t1 == NULL_TREE || t2 == NULL_TREE) 1579 return NULL_TREE; 1580 elts[out] = const_binop (MULT_EXPR, t1, t2); 1581 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out])) 1582 return NULL_TREE; 1583 } 1584 1585 return build_vector (type, elts); 1586 } 1587 1588 default:; 1589 } 1590 1591 if (TREE_CODE_CLASS (code) != tcc_binary) 1592 return NULL_TREE; 1593 1594 /* Make sure type and arg0 have the same saturating flag. */ 1595 gcc_checking_assert (TYPE_SATURATING (type) 1596 == TYPE_SATURATING (TREE_TYPE (arg1))); 1597 1598 return const_binop (code, arg1, arg2); 1599} 1600 1601/* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant. 1602 Return zero if computing the constants is not possible. */ 1603 1604tree 1605const_unop (enum tree_code code, tree type, tree arg0) 1606{ 1607 switch (code) 1608 { 1609 CASE_CONVERT: 1610 case FLOAT_EXPR: 1611 case FIX_TRUNC_EXPR: 1612 case FIXED_CONVERT_EXPR: 1613 return fold_convert_const (code, type, arg0); 1614 1615 case ADDR_SPACE_CONVERT_EXPR: 1616 if (integer_zerop (arg0)) 1617 return fold_convert_const (code, type, arg0); 1618 break; 1619 1620 case VIEW_CONVERT_EXPR: 1621 return fold_view_convert_expr (type, arg0); 1622 1623 case NEGATE_EXPR: 1624 { 1625 /* Can't call fold_negate_const directly here as that doesn't 1626 handle all cases and we might not be able to negate some 1627 constants. */ 1628 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0); 1629 if (tem && CONSTANT_CLASS_P (tem)) 1630 return tem; 1631 break; 1632 } 1633 1634 case ABS_EXPR: 1635 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST) 1636 return fold_abs_const (arg0, type); 1637 break; 1638 1639 case CONJ_EXPR: 1640 if (TREE_CODE (arg0) == COMPLEX_CST) 1641 { 1642 tree ipart = fold_negate_const (TREE_IMAGPART (arg0), 1643 TREE_TYPE (type)); 1644 return build_complex (type, TREE_REALPART (arg0), ipart); 1645 } 1646 break; 1647 1648 case BIT_NOT_EXPR: 1649 if (TREE_CODE (arg0) == INTEGER_CST) 1650 return fold_not_const (arg0, type); 1651 /* Perform BIT_NOT_EXPR on each element individually. */ 1652 else if (TREE_CODE (arg0) == VECTOR_CST) 1653 { 1654 tree *elements; 1655 tree elem; 1656 unsigned count = VECTOR_CST_NELTS (arg0), i; 1657 1658 elements = XALLOCAVEC (tree, count); 1659 for (i = 0; i < count; i++) 1660 { 1661 elem = VECTOR_CST_ELT (arg0, i); 1662 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem); 1663 if (elem == NULL_TREE) 1664 break; 1665 elements[i] = elem; 1666 } 1667 if (i == count) 1668 return build_vector (type, elements); 1669 } 1670 break; 1671 1672 case TRUTH_NOT_EXPR: 1673 if (TREE_CODE (arg0) == INTEGER_CST) 1674 return constant_boolean_node (integer_zerop (arg0), type); 1675 break; 1676 1677 case REALPART_EXPR: 1678 if (TREE_CODE (arg0) == COMPLEX_CST) 1679 return fold_convert (type, TREE_REALPART (arg0)); 1680 break; 1681 1682 case IMAGPART_EXPR: 1683 if (TREE_CODE (arg0) == COMPLEX_CST) 1684 return fold_convert (type, TREE_IMAGPART (arg0)); 1685 break; 1686 1687 case VEC_UNPACK_LO_EXPR: 1688 case VEC_UNPACK_HI_EXPR: 1689 case VEC_UNPACK_FLOAT_LO_EXPR: 1690 case VEC_UNPACK_FLOAT_HI_EXPR: 1691 { 1692 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i; 1693 tree *elts; 1694 enum tree_code subcode; 1695 1696 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2); 1697 if (TREE_CODE (arg0) != VECTOR_CST) 1698 return NULL_TREE; 1699 1700 elts = XALLOCAVEC (tree, nelts * 2); 1701 if (!vec_cst_ctor_to_array (arg0, elts)) 1702 return NULL_TREE; 1703 1704 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR 1705 || code == VEC_UNPACK_FLOAT_LO_EXPR)) 1706 elts += nelts; 1707 1708 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR) 1709 subcode = NOP_EXPR; 1710 else 1711 subcode = FLOAT_EXPR; 1712 1713 for (i = 0; i < nelts; i++) 1714 { 1715 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]); 1716 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i])) 1717 return NULL_TREE; 1718 } 1719 1720 return build_vector (type, elts); 1721 } 1722 1723 case REDUC_MIN_EXPR: 1724 case REDUC_MAX_EXPR: 1725 case REDUC_PLUS_EXPR: 1726 { 1727 unsigned int nelts, i; 1728 tree *elts; 1729 enum tree_code subcode; 1730 1731 if (TREE_CODE (arg0) != VECTOR_CST) 1732 return NULL_TREE; 1733 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)); 1734 1735 elts = XALLOCAVEC (tree, nelts); 1736 if (!vec_cst_ctor_to_array (arg0, elts)) 1737 return NULL_TREE; 1738 1739 switch (code) 1740 { 1741 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break; 1742 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break; 1743 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break; 1744 default: gcc_unreachable (); 1745 } 1746 1747 for (i = 1; i < nelts; i++) 1748 { 1749 elts[0] = const_binop (subcode, elts[0], elts[i]); 1750 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0])) 1751 return NULL_TREE; 1752 } 1753 1754 return elts[0]; 1755 } 1756 1757 default: 1758 break; 1759 } 1760 1761 return NULL_TREE; 1762} 1763 1764/* Create a sizetype INT_CST node with NUMBER sign extended. KIND 1765 indicates which particular sizetype to create. */ 1766 1767tree 1768size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) 1769{ 1770 return build_int_cst (sizetype_tab[(int) kind], number); 1771} 1772 1773/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE 1774 is a tree code. The type of the result is taken from the operands. 1775 Both must be equivalent integer types, ala int_binop_types_match_p. 1776 If the operands are constant, so is the result. */ 1777 1778tree 1779size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1) 1780{ 1781 tree type = TREE_TYPE (arg0); 1782 1783 if (arg0 == error_mark_node || arg1 == error_mark_node) 1784 return error_mark_node; 1785 1786 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0), 1787 TREE_TYPE (arg1))); 1788 1789 /* Handle the special case of two integer constants faster. */ 1790 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 1791 { 1792 /* And some specific cases even faster than that. */ 1793 if (code == PLUS_EXPR) 1794 { 1795 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0)) 1796 return arg1; 1797 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1)) 1798 return arg0; 1799 } 1800 else if (code == MINUS_EXPR) 1801 { 1802 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1)) 1803 return arg0; 1804 } 1805 else if (code == MULT_EXPR) 1806 { 1807 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0)) 1808 return arg1; 1809 } 1810 1811 /* Handle general case of two integer constants. For sizetype 1812 constant calculations we always want to know about overflow, 1813 even in the unsigned case. */ 1814 return int_const_binop_1 (code, arg0, arg1, -1); 1815 } 1816 1817 return fold_build2_loc (loc, code, type, arg0, arg1); 1818} 1819 1820/* Given two values, either both of sizetype or both of bitsizetype, 1821 compute the difference between the two values. Return the value 1822 in signed type corresponding to the type of the operands. */ 1823 1824tree 1825size_diffop_loc (location_t loc, tree arg0, tree arg1) 1826{ 1827 tree type = TREE_TYPE (arg0); 1828 tree ctype; 1829 1830 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0), 1831 TREE_TYPE (arg1))); 1832 1833 /* If the type is already signed, just do the simple thing. */ 1834 if (!TYPE_UNSIGNED (type)) 1835 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1); 1836 1837 if (type == sizetype) 1838 ctype = ssizetype; 1839 else if (type == bitsizetype) 1840 ctype = sbitsizetype; 1841 else 1842 ctype = signed_type_for (type); 1843 1844 /* If either operand is not a constant, do the conversions to the signed 1845 type and subtract. The hardware will do the right thing with any 1846 overflow in the subtraction. */ 1847 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST) 1848 return size_binop_loc (loc, MINUS_EXPR, 1849 fold_convert_loc (loc, ctype, arg0), 1850 fold_convert_loc (loc, ctype, arg1)); 1851 1852 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE. 1853 Otherwise, subtract the other way, convert to CTYPE (we know that can't 1854 overflow) and negate (which can't either). Special-case a result 1855 of zero while we're here. */ 1856 if (tree_int_cst_equal (arg0, arg1)) 1857 return build_int_cst (ctype, 0); 1858 else if (tree_int_cst_lt (arg1, arg0)) 1859 return fold_convert_loc (loc, ctype, 1860 size_binop_loc (loc, MINUS_EXPR, arg0, arg1)); 1861 else 1862 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0), 1863 fold_convert_loc (loc, ctype, 1864 size_binop_loc (loc, 1865 MINUS_EXPR, 1866 arg1, arg0))); 1867} 1868 1869/* A subroutine of fold_convert_const handling conversions of an 1870 INTEGER_CST to another integer type. */ 1871 1872static tree 1873fold_convert_const_int_from_int (tree type, const_tree arg1) 1874{ 1875 /* Given an integer constant, make new constant with new type, 1876 appropriately sign-extended or truncated. Use widest_int 1877 so that any extension is done according ARG1's type. */ 1878 return force_fit_type (type, wi::to_widest (arg1), 1879 !POINTER_TYPE_P (TREE_TYPE (arg1)), 1880 TREE_OVERFLOW (arg1)); 1881} 1882 1883/* A subroutine of fold_convert_const handling conversions a REAL_CST 1884 to an integer type. */ 1885 1886static tree 1887fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1) 1888{ 1889 bool overflow = false; 1890 tree t; 1891 1892 /* The following code implements the floating point to integer 1893 conversion rules required by the Java Language Specification, 1894 that IEEE NaNs are mapped to zero and values that overflow 1895 the target precision saturate, i.e. values greater than 1896 INT_MAX are mapped to INT_MAX, and values less than INT_MIN 1897 are mapped to INT_MIN. These semantics are allowed by the 1898 C and C++ standards that simply state that the behavior of 1899 FP-to-integer conversion is unspecified upon overflow. */ 1900 1901 wide_int val; 1902 REAL_VALUE_TYPE r; 1903 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1); 1904 1905 switch (code) 1906 { 1907 case FIX_TRUNC_EXPR: 1908 real_trunc (&r, VOIDmode, &x); 1909 break; 1910 1911 default: 1912 gcc_unreachable (); 1913 } 1914 1915 /* If R is NaN, return zero and show we have an overflow. */ 1916 if (REAL_VALUE_ISNAN (r)) 1917 { 1918 overflow = true; 1919 val = wi::zero (TYPE_PRECISION (type)); 1920 } 1921 1922 /* See if R is less than the lower bound or greater than the 1923 upper bound. */ 1924 1925 if (! overflow) 1926 { 1927 tree lt = TYPE_MIN_VALUE (type); 1928 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt); 1929 if (REAL_VALUES_LESS (r, l)) 1930 { 1931 overflow = true; 1932 val = lt; 1933 } 1934 } 1935 1936 if (! overflow) 1937 { 1938 tree ut = TYPE_MAX_VALUE (type); 1939 if (ut) 1940 { 1941 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut); 1942 if (REAL_VALUES_LESS (u, r)) 1943 { 1944 overflow = true; 1945 val = ut; 1946 } 1947 } 1948 } 1949 1950 if (! overflow) 1951 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type)); 1952 1953 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1)); 1954 return t; 1955} 1956 1957/* A subroutine of fold_convert_const handling conversions of a 1958 FIXED_CST to an integer type. */ 1959 1960static tree 1961fold_convert_const_int_from_fixed (tree type, const_tree arg1) 1962{ 1963 tree t; 1964 double_int temp, temp_trunc; 1965 unsigned int mode; 1966 1967 /* Right shift FIXED_CST to temp by fbit. */ 1968 temp = TREE_FIXED_CST (arg1).data; 1969 mode = TREE_FIXED_CST (arg1).mode; 1970 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT) 1971 { 1972 temp = temp.rshift (GET_MODE_FBIT (mode), 1973 HOST_BITS_PER_DOUBLE_INT, 1974 SIGNED_FIXED_POINT_MODE_P (mode)); 1975 1976 /* Left shift temp to temp_trunc by fbit. */ 1977 temp_trunc = temp.lshift (GET_MODE_FBIT (mode), 1978 HOST_BITS_PER_DOUBLE_INT, 1979 SIGNED_FIXED_POINT_MODE_P (mode)); 1980 } 1981 else 1982 { 1983 temp = double_int_zero; 1984 temp_trunc = double_int_zero; 1985 } 1986 1987 /* If FIXED_CST is negative, we need to round the value toward 0. 1988 By checking if the fractional bits are not zero to add 1 to temp. */ 1989 if (SIGNED_FIXED_POINT_MODE_P (mode) 1990 && temp_trunc.is_negative () 1991 && TREE_FIXED_CST (arg1).data != temp_trunc) 1992 temp += double_int_one; 1993 1994 /* Given a fixed-point constant, make new constant with new type, 1995 appropriately sign-extended or truncated. */ 1996 t = force_fit_type (type, temp, -1, 1997 (temp.is_negative () 1998 && (TYPE_UNSIGNED (type) 1999 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) 2000 | TREE_OVERFLOW (arg1)); 2001 2002 return t; 2003} 2004 2005/* A subroutine of fold_convert_const handling conversions a REAL_CST 2006 to another floating point type. */ 2007 2008static tree 2009fold_convert_const_real_from_real (tree type, const_tree arg1) 2010{ 2011 REAL_VALUE_TYPE value; 2012 tree t; 2013 2014 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1)); 2015 t = build_real (type, value); 2016 2017 /* If converting an infinity or NAN to a representation that doesn't 2018 have one, set the overflow bit so that we can produce some kind of 2019 error message at the appropriate point if necessary. It's not the 2020 most user-friendly message, but it's better than nothing. */ 2021 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1)) 2022 && !MODE_HAS_INFINITIES (TYPE_MODE (type))) 2023 TREE_OVERFLOW (t) = 1; 2024 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)) 2025 && !MODE_HAS_NANS (TYPE_MODE (type))) 2026 TREE_OVERFLOW (t) = 1; 2027 /* Regular overflow, conversion produced an infinity in a mode that 2028 can't represent them. */ 2029 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) 2030 && REAL_VALUE_ISINF (value) 2031 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1))) 2032 TREE_OVERFLOW (t) = 1; 2033 else 2034 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); 2035 return t; 2036} 2037 2038/* A subroutine of fold_convert_const handling conversions a FIXED_CST 2039 to a floating point type. */ 2040 2041static tree 2042fold_convert_const_real_from_fixed (tree type, const_tree arg1) 2043{ 2044 REAL_VALUE_TYPE value; 2045 tree t; 2046 2047 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1)); 2048 t = build_real (type, value); 2049 2050 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); 2051 return t; 2052} 2053 2054/* A subroutine of fold_convert_const handling conversions a FIXED_CST 2055 to another fixed-point type. */ 2056 2057static tree 2058fold_convert_const_fixed_from_fixed (tree type, const_tree arg1) 2059{ 2060 FIXED_VALUE_TYPE value; 2061 tree t; 2062 bool overflow_p; 2063 2064 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1), 2065 TYPE_SATURATING (type)); 2066 t = build_fixed (type, value); 2067 2068 /* Propagate overflow flags. */ 2069 if (overflow_p | TREE_OVERFLOW (arg1)) 2070 TREE_OVERFLOW (t) = 1; 2071 return t; 2072} 2073 2074/* A subroutine of fold_convert_const handling conversions an INTEGER_CST 2075 to a fixed-point type. */ 2076 2077static tree 2078fold_convert_const_fixed_from_int (tree type, const_tree arg1) 2079{ 2080 FIXED_VALUE_TYPE value; 2081 tree t; 2082 bool overflow_p; 2083 double_int di; 2084 2085 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2); 2086 2087 di.low = TREE_INT_CST_ELT (arg1, 0); 2088 if (TREE_INT_CST_NUNITS (arg1) == 1) 2089 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0; 2090 else 2091 di.high = TREE_INT_CST_ELT (arg1, 1); 2092 2093 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di, 2094 TYPE_UNSIGNED (TREE_TYPE (arg1)), 2095 TYPE_SATURATING (type)); 2096 t = build_fixed (type, value); 2097 2098 /* Propagate overflow flags. */ 2099 if (overflow_p | TREE_OVERFLOW (arg1)) 2100 TREE_OVERFLOW (t) = 1; 2101 return t; 2102} 2103 2104/* A subroutine of fold_convert_const handling conversions a REAL_CST 2105 to a fixed-point type. */ 2106 2107static tree 2108fold_convert_const_fixed_from_real (tree type, const_tree arg1) 2109{ 2110 FIXED_VALUE_TYPE value; 2111 tree t; 2112 bool overflow_p; 2113 2114 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type), 2115 &TREE_REAL_CST (arg1), 2116 TYPE_SATURATING (type)); 2117 t = build_fixed (type, value); 2118 2119 /* Propagate overflow flags. */ 2120 if (overflow_p | TREE_OVERFLOW (arg1)) 2121 TREE_OVERFLOW (t) = 1; 2122 return t; 2123} 2124 2125/* Attempt to fold type conversion operation CODE of expression ARG1 to 2126 type TYPE. If no simplification can be done return NULL_TREE. */ 2127 2128static tree 2129fold_convert_const (enum tree_code code, tree type, tree arg1) 2130{ 2131 if (TREE_TYPE (arg1) == type) 2132 return arg1; 2133 2134 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type) 2135 || TREE_CODE (type) == OFFSET_TYPE) 2136 { 2137 if (TREE_CODE (arg1) == INTEGER_CST) 2138 return fold_convert_const_int_from_int (type, arg1); 2139 else if (TREE_CODE (arg1) == REAL_CST) 2140 return fold_convert_const_int_from_real (code, type, arg1); 2141 else if (TREE_CODE (arg1) == FIXED_CST) 2142 return fold_convert_const_int_from_fixed (type, arg1); 2143 } 2144 else if (TREE_CODE (type) == REAL_TYPE) 2145 { 2146 if (TREE_CODE (arg1) == INTEGER_CST) 2147 return build_real_from_int_cst (type, arg1); 2148 else if (TREE_CODE (arg1) == REAL_CST) 2149 return fold_convert_const_real_from_real (type, arg1); 2150 else if (TREE_CODE (arg1) == FIXED_CST) 2151 return fold_convert_const_real_from_fixed (type, arg1); 2152 } 2153 else if (TREE_CODE (type) == FIXED_POINT_TYPE) 2154 { 2155 if (TREE_CODE (arg1) == FIXED_CST) 2156 return fold_convert_const_fixed_from_fixed (type, arg1); 2157 else if (TREE_CODE (arg1) == INTEGER_CST) 2158 return fold_convert_const_fixed_from_int (type, arg1); 2159 else if (TREE_CODE (arg1) == REAL_CST) 2160 return fold_convert_const_fixed_from_real (type, arg1); 2161 } 2162 return NULL_TREE; 2163} 2164 2165/* Construct a vector of zero elements of vector type TYPE. */ 2166 2167static tree 2168build_zero_vector (tree type) 2169{ 2170 tree t; 2171 2172 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); 2173 return build_vector_from_val (type, t); 2174} 2175 2176/* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */ 2177 2178bool 2179fold_convertible_p (const_tree type, const_tree arg) 2180{ 2181 tree orig = TREE_TYPE (arg); 2182 2183 if (type == orig) 2184 return true; 2185 2186 if (TREE_CODE (arg) == ERROR_MARK 2187 || TREE_CODE (type) == ERROR_MARK 2188 || TREE_CODE (orig) == ERROR_MARK) 2189 return false; 2190 2191 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) 2192 return true; 2193 2194 switch (TREE_CODE (type)) 2195 { 2196 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2197 case POINTER_TYPE: case REFERENCE_TYPE: 2198 case OFFSET_TYPE: 2199 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 2200 || TREE_CODE (orig) == OFFSET_TYPE) 2201 return true; 2202 return (TREE_CODE (orig) == VECTOR_TYPE 2203 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 2204 2205 case REAL_TYPE: 2206 case FIXED_POINT_TYPE: 2207 case COMPLEX_TYPE: 2208 case VECTOR_TYPE: 2209 case VOID_TYPE: 2210 return TREE_CODE (type) == TREE_CODE (orig); 2211 2212 default: 2213 return false; 2214 } 2215} 2216 2217/* Convert expression ARG to type TYPE. Used by the middle-end for 2218 simple conversions in preference to calling the front-end's convert. */ 2219 2220tree 2221fold_convert_loc (location_t loc, tree type, tree arg) 2222{ 2223 tree orig = TREE_TYPE (arg); 2224 tree tem; 2225 2226 if (type == orig) 2227 return arg; 2228 2229 if (TREE_CODE (arg) == ERROR_MARK 2230 || TREE_CODE (type) == ERROR_MARK 2231 || TREE_CODE (orig) == ERROR_MARK) 2232 return error_mark_node; 2233 2234 switch (TREE_CODE (type)) 2235 { 2236 case POINTER_TYPE: 2237 case REFERENCE_TYPE: 2238 /* Handle conversions between pointers to different address spaces. */ 2239 if (POINTER_TYPE_P (orig) 2240 && (TYPE_ADDR_SPACE (TREE_TYPE (type)) 2241 != TYPE_ADDR_SPACE (TREE_TYPE (orig)))) 2242 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg); 2243 /* fall through */ 2244 2245 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2246 case OFFSET_TYPE: 2247 if (TREE_CODE (arg) == INTEGER_CST) 2248 { 2249 tem = fold_convert_const (NOP_EXPR, type, arg); 2250 if (tem != NULL_TREE) 2251 return tem; 2252 } 2253 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 2254 || TREE_CODE (orig) == OFFSET_TYPE) 2255 return fold_build1_loc (loc, NOP_EXPR, type, arg); 2256 if (TREE_CODE (orig) == COMPLEX_TYPE) 2257 return fold_convert_loc (loc, type, 2258 fold_build1_loc (loc, REALPART_EXPR, 2259 TREE_TYPE (orig), arg)); 2260 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE 2261 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 2262 return fold_build1_loc (loc, NOP_EXPR, type, arg); 2263 2264 case REAL_TYPE: 2265 if (TREE_CODE (arg) == INTEGER_CST) 2266 { 2267 tem = fold_convert_const (FLOAT_EXPR, type, arg); 2268 if (tem != NULL_TREE) 2269 return tem; 2270 } 2271 else if (TREE_CODE (arg) == REAL_CST) 2272 { 2273 tem = fold_convert_const (NOP_EXPR, type, arg); 2274 if (tem != NULL_TREE) 2275 return tem; 2276 } 2277 else if (TREE_CODE (arg) == FIXED_CST) 2278 { 2279 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg); 2280 if (tem != NULL_TREE) 2281 return tem; 2282 } 2283 2284 switch (TREE_CODE (orig)) 2285 { 2286 case INTEGER_TYPE: 2287 case BOOLEAN_TYPE: case ENUMERAL_TYPE: 2288 case POINTER_TYPE: case REFERENCE_TYPE: 2289 return fold_build1_loc (loc, FLOAT_EXPR, type, arg); 2290 2291 case REAL_TYPE: 2292 return fold_build1_loc (loc, NOP_EXPR, type, arg); 2293 2294 case FIXED_POINT_TYPE: 2295 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg); 2296 2297 case COMPLEX_TYPE: 2298 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); 2299 return fold_convert_loc (loc, type, tem); 2300 2301 default: 2302 gcc_unreachable (); 2303 } 2304 2305 case FIXED_POINT_TYPE: 2306 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST 2307 || TREE_CODE (arg) == REAL_CST) 2308 { 2309 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg); 2310 if (tem != NULL_TREE) 2311 goto fold_convert_exit; 2312 } 2313 2314 switch (TREE_CODE (orig)) 2315 { 2316 case FIXED_POINT_TYPE: 2317 case INTEGER_TYPE: 2318 case ENUMERAL_TYPE: 2319 case BOOLEAN_TYPE: 2320 case REAL_TYPE: 2321 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg); 2322 2323 case COMPLEX_TYPE: 2324 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); 2325 return fold_convert_loc (loc, type, tem); 2326 2327 default: 2328 gcc_unreachable (); 2329 } 2330 2331 case COMPLEX_TYPE: 2332 switch (TREE_CODE (orig)) 2333 { 2334 case INTEGER_TYPE: 2335 case BOOLEAN_TYPE: case ENUMERAL_TYPE: 2336 case POINTER_TYPE: case REFERENCE_TYPE: 2337 case REAL_TYPE: 2338 case FIXED_POINT_TYPE: 2339 return fold_build2_loc (loc, COMPLEX_EXPR, type, 2340 fold_convert_loc (loc, TREE_TYPE (type), arg), 2341 fold_convert_loc (loc, TREE_TYPE (type), 2342 integer_zero_node)); 2343 case COMPLEX_TYPE: 2344 { 2345 tree rpart, ipart; 2346 2347 if (TREE_CODE (arg) == COMPLEX_EXPR) 2348 { 2349 rpart = fold_convert_loc (loc, TREE_TYPE (type), 2350 TREE_OPERAND (arg, 0)); 2351 ipart = fold_convert_loc (loc, TREE_TYPE (type), 2352 TREE_OPERAND (arg, 1)); 2353 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart); 2354 } 2355 2356 arg = save_expr (arg); 2357 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); 2358 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg); 2359 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart); 2360 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart); 2361 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart); 2362 } 2363 2364 default: 2365 gcc_unreachable (); 2366 } 2367 2368 case VECTOR_TYPE: 2369 if (integer_zerop (arg)) 2370 return build_zero_vector (type); 2371 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 2372 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 2373 || TREE_CODE (orig) == VECTOR_TYPE); 2374 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg); 2375 2376 case VOID_TYPE: 2377 tem = fold_ignored_result (arg); 2378 return fold_build1_loc (loc, NOP_EXPR, type, tem); 2379 2380 default: 2381 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) 2382 return fold_build1_loc (loc, NOP_EXPR, type, arg); 2383 gcc_unreachable (); 2384 } 2385 fold_convert_exit: 2386 protected_set_expr_location_unshare (tem, loc); 2387 return tem; 2388} 2389 2390/* Return false if expr can be assumed not to be an lvalue, true 2391 otherwise. */ 2392 2393static bool 2394maybe_lvalue_p (const_tree x) 2395{ 2396 /* We only need to wrap lvalue tree codes. */ 2397 switch (TREE_CODE (x)) 2398 { 2399 case VAR_DECL: 2400 case PARM_DECL: 2401 case RESULT_DECL: 2402 case LABEL_DECL: 2403 case FUNCTION_DECL: 2404 case SSA_NAME: 2405 2406 case COMPONENT_REF: 2407 case MEM_REF: 2408 case INDIRECT_REF: 2409 case ARRAY_REF: 2410 case ARRAY_RANGE_REF: 2411 case BIT_FIELD_REF: 2412 case OBJ_TYPE_REF: 2413 2414 case REALPART_EXPR: 2415 case IMAGPART_EXPR: 2416 case PREINCREMENT_EXPR: 2417 case PREDECREMENT_EXPR: 2418 case SAVE_EXPR: 2419 case TRY_CATCH_EXPR: 2420 case WITH_CLEANUP_EXPR: 2421 case COMPOUND_EXPR: 2422 case MODIFY_EXPR: 2423 case TARGET_EXPR: 2424 case COND_EXPR: 2425 case BIND_EXPR: 2426 break; 2427 2428 default: 2429 /* Assume the worst for front-end tree codes. */ 2430 if ((int)TREE_CODE (x) >= NUM_TREE_CODES) 2431 break; 2432 return false; 2433 } 2434 2435 return true; 2436} 2437 2438/* Return an expr equal to X but certainly not valid as an lvalue. */ 2439 2440tree 2441non_lvalue_loc (location_t loc, tree x) 2442{ 2443 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to 2444 us. */ 2445 if (in_gimple_form) 2446 return x; 2447 2448 if (! maybe_lvalue_p (x)) 2449 return x; 2450 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x); 2451} 2452 2453/* When pedantic, return an expr equal to X but certainly not valid as a 2454 pedantic lvalue. Otherwise, return X. */ 2455 2456static tree 2457pedantic_non_lvalue_loc (location_t loc, tree x) 2458{ 2459 return protected_set_expr_location_unshare (x, loc); 2460} 2461 2462/* Given a tree comparison code, return the code that is the logical inverse. 2463 It is generally not safe to do this for floating-point comparisons, except 2464 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return 2465 ERROR_MARK in this case. */ 2466 2467enum tree_code 2468invert_tree_comparison (enum tree_code code, bool honor_nans) 2469{ 2470 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR 2471 && code != ORDERED_EXPR && code != UNORDERED_EXPR) 2472 return ERROR_MARK; 2473 2474 switch (code) 2475 { 2476 case EQ_EXPR: 2477 return NE_EXPR; 2478 case NE_EXPR: 2479 return EQ_EXPR; 2480 case GT_EXPR: 2481 return honor_nans ? UNLE_EXPR : LE_EXPR; 2482 case GE_EXPR: 2483 return honor_nans ? UNLT_EXPR : LT_EXPR; 2484 case LT_EXPR: 2485 return honor_nans ? UNGE_EXPR : GE_EXPR; 2486 case LE_EXPR: 2487 return honor_nans ? UNGT_EXPR : GT_EXPR; 2488 case LTGT_EXPR: 2489 return UNEQ_EXPR; 2490 case UNEQ_EXPR: 2491 return LTGT_EXPR; 2492 case UNGT_EXPR: 2493 return LE_EXPR; 2494 case UNGE_EXPR: 2495 return LT_EXPR; 2496 case UNLT_EXPR: 2497 return GE_EXPR; 2498 case UNLE_EXPR: 2499 return GT_EXPR; 2500 case ORDERED_EXPR: 2501 return UNORDERED_EXPR; 2502 case UNORDERED_EXPR: 2503 return ORDERED_EXPR; 2504 default: 2505 gcc_unreachable (); 2506 } 2507} 2508 2509/* Similar, but return the comparison that results if the operands are 2510 swapped. This is safe for floating-point. */ 2511 2512enum tree_code 2513swap_tree_comparison (enum tree_code code) 2514{ 2515 switch (code) 2516 { 2517 case EQ_EXPR: 2518 case NE_EXPR: 2519 case ORDERED_EXPR: 2520 case UNORDERED_EXPR: 2521 case LTGT_EXPR: 2522 case UNEQ_EXPR: 2523 return code; 2524 case GT_EXPR: 2525 return LT_EXPR; 2526 case GE_EXPR: 2527 return LE_EXPR; 2528 case LT_EXPR: 2529 return GT_EXPR; 2530 case LE_EXPR: 2531 return GE_EXPR; 2532 case UNGT_EXPR: 2533 return UNLT_EXPR; 2534 case UNGE_EXPR: 2535 return UNLE_EXPR; 2536 case UNLT_EXPR: 2537 return UNGT_EXPR; 2538 case UNLE_EXPR: 2539 return UNGE_EXPR; 2540 default: 2541 gcc_unreachable (); 2542 } 2543} 2544 2545 2546/* Convert a comparison tree code from an enum tree_code representation 2547 into a compcode bit-based encoding. This function is the inverse of 2548 compcode_to_comparison. */ 2549 2550static enum comparison_code 2551comparison_to_compcode (enum tree_code code) 2552{ 2553 switch (code) 2554 { 2555 case LT_EXPR: 2556 return COMPCODE_LT; 2557 case EQ_EXPR: 2558 return COMPCODE_EQ; 2559 case LE_EXPR: 2560 return COMPCODE_LE; 2561 case GT_EXPR: 2562 return COMPCODE_GT; 2563 case NE_EXPR: 2564 return COMPCODE_NE; 2565 case GE_EXPR: 2566 return COMPCODE_GE; 2567 case ORDERED_EXPR: 2568 return COMPCODE_ORD; 2569 case UNORDERED_EXPR: 2570 return COMPCODE_UNORD; 2571 case UNLT_EXPR: 2572 return COMPCODE_UNLT; 2573 case UNEQ_EXPR: 2574 return COMPCODE_UNEQ; 2575 case UNLE_EXPR: 2576 return COMPCODE_UNLE; 2577 case UNGT_EXPR: 2578 return COMPCODE_UNGT; 2579 case LTGT_EXPR: 2580 return COMPCODE_LTGT; 2581 case UNGE_EXPR: 2582 return COMPCODE_UNGE; 2583 default: 2584 gcc_unreachable (); 2585 } 2586} 2587 2588/* Convert a compcode bit-based encoding of a comparison operator back 2589 to GCC's enum tree_code representation. This function is the 2590 inverse of comparison_to_compcode. */ 2591 2592static enum tree_code 2593compcode_to_comparison (enum comparison_code code) 2594{ 2595 switch (code) 2596 { 2597 case COMPCODE_LT: 2598 return LT_EXPR; 2599 case COMPCODE_EQ: 2600 return EQ_EXPR; 2601 case COMPCODE_LE: 2602 return LE_EXPR; 2603 case COMPCODE_GT: 2604 return GT_EXPR; 2605 case COMPCODE_NE: 2606 return NE_EXPR; 2607 case COMPCODE_GE: 2608 return GE_EXPR; 2609 case COMPCODE_ORD: 2610 return ORDERED_EXPR; 2611 case COMPCODE_UNORD: 2612 return UNORDERED_EXPR; 2613 case COMPCODE_UNLT: 2614 return UNLT_EXPR; 2615 case COMPCODE_UNEQ: 2616 return UNEQ_EXPR; 2617 case COMPCODE_UNLE: 2618 return UNLE_EXPR; 2619 case COMPCODE_UNGT: 2620 return UNGT_EXPR; 2621 case COMPCODE_LTGT: 2622 return LTGT_EXPR; 2623 case COMPCODE_UNGE: 2624 return UNGE_EXPR; 2625 default: 2626 gcc_unreachable (); 2627 } 2628} 2629 2630/* Return a tree for the comparison which is the combination of 2631 doing the AND or OR (depending on CODE) of the two operations LCODE 2632 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account 2633 the possibility of trapping if the mode has NaNs, and return NULL_TREE 2634 if this makes the transformation invalid. */ 2635 2636tree 2637combine_comparisons (location_t loc, 2638 enum tree_code code, enum tree_code lcode, 2639 enum tree_code rcode, tree truth_type, 2640 tree ll_arg, tree lr_arg) 2641{ 2642 bool honor_nans = HONOR_NANS (ll_arg); 2643 enum comparison_code lcompcode = comparison_to_compcode (lcode); 2644 enum comparison_code rcompcode = comparison_to_compcode (rcode); 2645 int compcode; 2646 2647 switch (code) 2648 { 2649 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR: 2650 compcode = lcompcode & rcompcode; 2651 break; 2652 2653 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR: 2654 compcode = lcompcode | rcompcode; 2655 break; 2656 2657 default: 2658 return NULL_TREE; 2659 } 2660 2661 if (!honor_nans) 2662 { 2663 /* Eliminate unordered comparisons, as well as LTGT and ORD 2664 which are not used unless the mode has NaNs. */ 2665 compcode &= ~COMPCODE_UNORD; 2666 if (compcode == COMPCODE_LTGT) 2667 compcode = COMPCODE_NE; 2668 else if (compcode == COMPCODE_ORD) 2669 compcode = COMPCODE_TRUE; 2670 } 2671 else if (flag_trapping_math) 2672 { 2673 /* Check that the original operation and the optimized ones will trap 2674 under the same condition. */ 2675 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0 2676 && (lcompcode != COMPCODE_EQ) 2677 && (lcompcode != COMPCODE_ORD); 2678 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0 2679 && (rcompcode != COMPCODE_EQ) 2680 && (rcompcode != COMPCODE_ORD); 2681 bool trap = (compcode & COMPCODE_UNORD) == 0 2682 && (compcode != COMPCODE_EQ) 2683 && (compcode != COMPCODE_ORD); 2684 2685 /* In a short-circuited boolean expression the LHS might be 2686 such that the RHS, if evaluated, will never trap. For 2687 example, in ORD (x, y) && (x < y), we evaluate the RHS only 2688 if neither x nor y is NaN. (This is a mixed blessing: for 2689 example, the expression above will never trap, hence 2690 optimizing it to x < y would be invalid). */ 2691 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD)) 2692 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD))) 2693 rtrap = false; 2694 2695 /* If the comparison was short-circuited, and only the RHS 2696 trapped, we may now generate a spurious trap. */ 2697 if (rtrap && !ltrap 2698 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) 2699 return NULL_TREE; 2700 2701 /* If we changed the conditions that cause a trap, we lose. */ 2702 if ((ltrap || rtrap) != trap) 2703 return NULL_TREE; 2704 } 2705 2706 if (compcode == COMPCODE_TRUE) 2707 return constant_boolean_node (true, truth_type); 2708 else if (compcode == COMPCODE_FALSE) 2709 return constant_boolean_node (false, truth_type); 2710 else 2711 { 2712 enum tree_code tcode; 2713 2714 tcode = compcode_to_comparison ((enum comparison_code) compcode); 2715 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg); 2716 } 2717} 2718 2719/* Return nonzero if two operands (typically of the same tree node) 2720 are necessarily equal. If either argument has side-effects this 2721 function returns zero. FLAGS modifies behavior as follows: 2722 2723 If OEP_ONLY_CONST is set, only return nonzero for constants. 2724 This function tests whether the operands are indistinguishable; 2725 it does not test whether they are equal using C's == operation. 2726 The distinction is important for IEEE floating point, because 2727 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and 2728 (2) two NaNs may be indistinguishable, but NaN!=NaN. 2729 2730 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself 2731 even though it may hold multiple values during a function. 2732 This is because a GCC tree node guarantees that nothing else is 2733 executed between the evaluation of its "operands" (which may often 2734 be evaluated in arbitrary order). Hence if the operands themselves 2735 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the 2736 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST 2737 unset means assuming isochronic (or instantaneous) tree equivalence. 2738 Unless comparing arbitrary expression trees, such as from different 2739 statements, this flag can usually be left unset. 2740 2741 If OEP_PURE_SAME is set, then pure functions with identical arguments 2742 are considered the same. It is used when the caller has other ways 2743 to ensure that global memory is unchanged in between. */ 2744 2745int 2746operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) 2747{ 2748 /* If either is ERROR_MARK, they aren't equal. */ 2749 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK 2750 || TREE_TYPE (arg0) == error_mark_node 2751 || TREE_TYPE (arg1) == error_mark_node) 2752 return 0; 2753 2754 /* Similar, if either does not have a type (like a released SSA name), 2755 they aren't equal. */ 2756 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1)) 2757 return 0; 2758 2759 /* Check equality of integer constants before bailing out due to 2760 precision differences. */ 2761 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 2762 return tree_int_cst_equal (arg0, arg1); 2763 2764 /* If both types don't have the same signedness, then we can't consider 2765 them equal. We must check this before the STRIP_NOPS calls 2766 because they may change the signedness of the arguments. As pointers 2767 strictly don't have a signedness, require either two pointers or 2768 two non-pointers as well. */ 2769 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)) 2770 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1))) 2771 return 0; 2772 2773 /* We cannot consider pointers to different address space equal. */ 2774 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1)) 2775 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))) 2776 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1))))) 2777 return 0; 2778 2779 /* If both types don't have the same precision, then it is not safe 2780 to strip NOPs. */ 2781 if (element_precision (TREE_TYPE (arg0)) 2782 != element_precision (TREE_TYPE (arg1))) 2783 return 0; 2784 2785 STRIP_NOPS (arg0); 2786 STRIP_NOPS (arg1); 2787 2788 /* In case both args are comparisons but with different comparison 2789 code, try to swap the comparison operands of one arg to produce 2790 a match and compare that variant. */ 2791 if (TREE_CODE (arg0) != TREE_CODE (arg1) 2792 && COMPARISON_CLASS_P (arg0) 2793 && COMPARISON_CLASS_P (arg1)) 2794 { 2795 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1)); 2796 2797 if (TREE_CODE (arg0) == swap_code) 2798 return operand_equal_p (TREE_OPERAND (arg0, 0), 2799 TREE_OPERAND (arg1, 1), flags) 2800 && operand_equal_p (TREE_OPERAND (arg0, 1), 2801 TREE_OPERAND (arg1, 0), flags); 2802 } 2803 2804 if (TREE_CODE (arg0) != TREE_CODE (arg1) 2805 /* NOP_EXPR and CONVERT_EXPR are considered equal. */ 2806 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))) 2807 return 0; 2808 2809 /* This is needed for conversions and for COMPONENT_REF. 2810 Might as well play it safe and always test this. */ 2811 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK 2812 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK 2813 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))) 2814 return 0; 2815 2816 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal. 2817 We don't care about side effects in that case because the SAVE_EXPR 2818 takes care of that for us. In all other cases, two expressions are 2819 equal if they have no side effects. If we have two identical 2820 expressions with side effects that should be treated the same due 2821 to the only side effects being identical SAVE_EXPR's, that will 2822 be detected in the recursive calls below. 2823 If we are taking an invariant address of two identical objects 2824 they are necessarily equal as well. */ 2825 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST) 2826 && (TREE_CODE (arg0) == SAVE_EXPR 2827 || (flags & OEP_CONSTANT_ADDRESS_OF) 2828 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1)))) 2829 return 1; 2830 2831 /* Next handle constant cases, those for which we can return 1 even 2832 if ONLY_CONST is set. */ 2833 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)) 2834 switch (TREE_CODE (arg0)) 2835 { 2836 case INTEGER_CST: 2837 return tree_int_cst_equal (arg0, arg1); 2838 2839 case FIXED_CST: 2840 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0), 2841 TREE_FIXED_CST (arg1)); 2842 2843 case REAL_CST: 2844 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0), 2845 TREE_REAL_CST (arg1))) 2846 return 1; 2847 2848 2849 if (!HONOR_SIGNED_ZEROS (arg0)) 2850 { 2851 /* If we do not distinguish between signed and unsigned zero, 2852 consider them equal. */ 2853 if (real_zerop (arg0) && real_zerop (arg1)) 2854 return 1; 2855 } 2856 return 0; 2857 2858 case VECTOR_CST: 2859 { 2860 unsigned i; 2861 2862 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1)) 2863 return 0; 2864 2865 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i) 2866 { 2867 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i), 2868 VECTOR_CST_ELT (arg1, i), flags)) 2869 return 0; 2870 } 2871 return 1; 2872 } 2873 2874 case COMPLEX_CST: 2875 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1), 2876 flags) 2877 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1), 2878 flags)); 2879 2880 case STRING_CST: 2881 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1) 2882 && ! memcmp (TREE_STRING_POINTER (arg0), 2883 TREE_STRING_POINTER (arg1), 2884 TREE_STRING_LENGTH (arg0))); 2885 2886 case ADDR_EXPR: 2887 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 2888 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1) 2889 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0); 2890 default: 2891 break; 2892 } 2893 2894 if (flags & OEP_ONLY_CONST) 2895 return 0; 2896 2897/* Define macros to test an operand from arg0 and arg1 for equality and a 2898 variant that allows null and views null as being different from any 2899 non-null value. In the latter case, if either is null, the both 2900 must be; otherwise, do the normal comparison. */ 2901#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \ 2902 TREE_OPERAND (arg1, N), flags) 2903 2904#define OP_SAME_WITH_NULL(N) \ 2905 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \ 2906 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N)) 2907 2908 switch (TREE_CODE_CLASS (TREE_CODE (arg0))) 2909 { 2910 case tcc_unary: 2911 /* Two conversions are equal only if signedness and modes match. */ 2912 switch (TREE_CODE (arg0)) 2913 { 2914 CASE_CONVERT: 2915 case FIX_TRUNC_EXPR: 2916 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) 2917 != TYPE_UNSIGNED (TREE_TYPE (arg1))) 2918 return 0; 2919 break; 2920 default: 2921 break; 2922 } 2923 2924 return OP_SAME (0); 2925 2926 2927 case tcc_comparison: 2928 case tcc_binary: 2929 if (OP_SAME (0) && OP_SAME (1)) 2930 return 1; 2931 2932 /* For commutative ops, allow the other order. */ 2933 return (commutative_tree_code (TREE_CODE (arg0)) 2934 && operand_equal_p (TREE_OPERAND (arg0, 0), 2935 TREE_OPERAND (arg1, 1), flags) 2936 && operand_equal_p (TREE_OPERAND (arg0, 1), 2937 TREE_OPERAND (arg1, 0), flags)); 2938 2939 case tcc_reference: 2940 /* If either of the pointer (or reference) expressions we are 2941 dereferencing contain a side effect, these cannot be equal, 2942 but their addresses can be. */ 2943 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0 2944 && (TREE_SIDE_EFFECTS (arg0) 2945 || TREE_SIDE_EFFECTS (arg1))) 2946 return 0; 2947 2948 switch (TREE_CODE (arg0)) 2949 { 2950 case INDIRECT_REF: 2951 if (!(flags & OEP_ADDRESS_OF) 2952 && (TYPE_ALIGN (TREE_TYPE (arg0)) 2953 != TYPE_ALIGN (TREE_TYPE (arg1)))) 2954 return 0; 2955 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF); 2956 return OP_SAME (0); 2957 2958 case REALPART_EXPR: 2959 case IMAGPART_EXPR: 2960 return OP_SAME (0); 2961 2962 case TARGET_MEM_REF: 2963 case MEM_REF: 2964 /* Require equal access sizes, and similar pointer types. 2965 We can have incomplete types for array references of 2966 variable-sized arrays from the Fortran frontend 2967 though. Also verify the types are compatible. */ 2968 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1)) 2969 || (TYPE_SIZE (TREE_TYPE (arg0)) 2970 && TYPE_SIZE (TREE_TYPE (arg1)) 2971 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)), 2972 TYPE_SIZE (TREE_TYPE (arg1)), flags))) 2973 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)) 2974 && ((flags & OEP_ADDRESS_OF) 2975 || (alias_ptr_types_compatible_p 2976 (TREE_TYPE (TREE_OPERAND (arg0, 1)), 2977 TREE_TYPE (TREE_OPERAND (arg1, 1))) 2978 && (MR_DEPENDENCE_CLIQUE (arg0) 2979 == MR_DEPENDENCE_CLIQUE (arg1)) 2980 && (MR_DEPENDENCE_BASE (arg0) 2981 == MR_DEPENDENCE_BASE (arg1)) 2982 && (TYPE_ALIGN (TREE_TYPE (arg0)) 2983 == TYPE_ALIGN (TREE_TYPE (arg1))))))) 2984 return 0; 2985 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF); 2986 return (OP_SAME (0) && OP_SAME (1) 2987 /* TARGET_MEM_REF require equal extra operands. */ 2988 && (TREE_CODE (arg0) != TARGET_MEM_REF 2989 || (OP_SAME_WITH_NULL (2) 2990 && OP_SAME_WITH_NULL (3) 2991 && OP_SAME_WITH_NULL (4)))); 2992 2993 case ARRAY_REF: 2994 case ARRAY_RANGE_REF: 2995 /* Operands 2 and 3 may be null. 2996 Compare the array index by value if it is constant first as we 2997 may have different types but same value here. */ 2998 if (!OP_SAME (0)) 2999 return 0; 3000 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF); 3001 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1), 3002 TREE_OPERAND (arg1, 1)) 3003 || OP_SAME (1)) 3004 && OP_SAME_WITH_NULL (2) 3005 && OP_SAME_WITH_NULL (3)); 3006 3007 case COMPONENT_REF: 3008 /* Handle operand 2 the same as for ARRAY_REF. Operand 0 3009 may be NULL when we're called to compare MEM_EXPRs. */ 3010 if (!OP_SAME_WITH_NULL (0) 3011 || !OP_SAME (1)) 3012 return 0; 3013 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF); 3014 return OP_SAME_WITH_NULL (2); 3015 3016 case BIT_FIELD_REF: 3017 if (!OP_SAME (0)) 3018 return 0; 3019 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF); 3020 return OP_SAME (1) && OP_SAME (2); 3021 3022 default: 3023 return 0; 3024 } 3025 3026 case tcc_expression: 3027 switch (TREE_CODE (arg0)) 3028 { 3029 case ADDR_EXPR: 3030 return operand_equal_p (TREE_OPERAND (arg0, 0), 3031 TREE_OPERAND (arg1, 0), 3032 flags | OEP_ADDRESS_OF); 3033 3034 case TRUTH_NOT_EXPR: 3035 return OP_SAME (0); 3036 3037 case TRUTH_ANDIF_EXPR: 3038 case TRUTH_ORIF_EXPR: 3039 return OP_SAME (0) && OP_SAME (1); 3040 3041 case FMA_EXPR: 3042 case WIDEN_MULT_PLUS_EXPR: 3043 case WIDEN_MULT_MINUS_EXPR: 3044 if (!OP_SAME (2)) 3045 return 0; 3046 /* The multiplcation operands are commutative. */ 3047 /* FALLTHRU */ 3048 3049 case TRUTH_AND_EXPR: 3050 case TRUTH_OR_EXPR: 3051 case TRUTH_XOR_EXPR: 3052 if (OP_SAME (0) && OP_SAME (1)) 3053 return 1; 3054 3055 /* Otherwise take into account this is a commutative operation. */ 3056 return (operand_equal_p (TREE_OPERAND (arg0, 0), 3057 TREE_OPERAND (arg1, 1), flags) 3058 && operand_equal_p (TREE_OPERAND (arg0, 1), 3059 TREE_OPERAND (arg1, 0), flags)); 3060 3061 case COND_EXPR: 3062 case VEC_COND_EXPR: 3063 case DOT_PROD_EXPR: 3064 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); 3065 3066 default: 3067 return 0; 3068 } 3069 3070 case tcc_vl_exp: 3071 switch (TREE_CODE (arg0)) 3072 { 3073 case CALL_EXPR: 3074 /* If the CALL_EXPRs call different functions, then they 3075 clearly can not be equal. */ 3076 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1), 3077 flags)) 3078 return 0; 3079 3080 { 3081 unsigned int cef = call_expr_flags (arg0); 3082 if (flags & OEP_PURE_SAME) 3083 cef &= ECF_CONST | ECF_PURE; 3084 else 3085 cef &= ECF_CONST; 3086 if (!cef) 3087 return 0; 3088 } 3089 3090 /* Now see if all the arguments are the same. */ 3091 { 3092 const_call_expr_arg_iterator iter0, iter1; 3093 const_tree a0, a1; 3094 for (a0 = first_const_call_expr_arg (arg0, &iter0), 3095 a1 = first_const_call_expr_arg (arg1, &iter1); 3096 a0 && a1; 3097 a0 = next_const_call_expr_arg (&iter0), 3098 a1 = next_const_call_expr_arg (&iter1)) 3099 if (! operand_equal_p (a0, a1, flags)) 3100 return 0; 3101 3102 /* If we get here and both argument lists are exhausted 3103 then the CALL_EXPRs are equal. */ 3104 return ! (a0 || a1); 3105 } 3106 default: 3107 return 0; 3108 } 3109 3110 case tcc_declaration: 3111 /* Consider __builtin_sqrt equal to sqrt. */ 3112 return (TREE_CODE (arg0) == FUNCTION_DECL 3113 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1) 3114 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1) 3115 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1)); 3116 3117 default: 3118 return 0; 3119 } 3120 3121#undef OP_SAME 3122#undef OP_SAME_WITH_NULL 3123} 3124 3125/* Similar to operand_equal_p, but see if ARG0 might have been made by 3126 shorten_compare from ARG1 when ARG1 was being compared with OTHER. 3127 3128 When in doubt, return 0. */ 3129 3130static int 3131operand_equal_for_comparison_p (tree arg0, tree arg1, tree other) 3132{ 3133 int unsignedp1, unsignedpo; 3134 tree primarg0, primarg1, primother; 3135 unsigned int correct_width; 3136 3137 if (operand_equal_p (arg0, arg1, 0)) 3138 return 1; 3139 3140 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 3141 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1))) 3142 return 0; 3143 3144 /* Discard any conversions that don't change the modes of ARG0 and ARG1 3145 and see if the inner values are the same. This removes any 3146 signedness comparison, which doesn't matter here. */ 3147 primarg0 = arg0, primarg1 = arg1; 3148 STRIP_NOPS (primarg0); 3149 STRIP_NOPS (primarg1); 3150 if (operand_equal_p (primarg0, primarg1, 0)) 3151 return 1; 3152 3153 /* Duplicate what shorten_compare does to ARG1 and see if that gives the 3154 actual comparison operand, ARG0. 3155 3156 First throw away any conversions to wider types 3157 already present in the operands. */ 3158 3159 primarg1 = get_narrower (arg1, &unsignedp1); 3160 primother = get_narrower (other, &unsignedpo); 3161 3162 correct_width = TYPE_PRECISION (TREE_TYPE (arg1)); 3163 if (unsignedp1 == unsignedpo 3164 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width 3165 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width) 3166 { 3167 tree type = TREE_TYPE (arg0); 3168 3169 /* Make sure shorter operand is extended the right way 3170 to match the longer operand. */ 3171 primarg1 = fold_convert (signed_or_unsigned_type_for 3172 (unsignedp1, TREE_TYPE (primarg1)), primarg1); 3173 3174 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0)) 3175 return 1; 3176 } 3177 3178 return 0; 3179} 3180 3181/* See if ARG is an expression that is either a comparison or is performing 3182 arithmetic on comparisons. The comparisons must only be comparing 3183 two different values, which will be stored in *CVAL1 and *CVAL2; if 3184 they are nonzero it means that some operands have already been found. 3185 No variables may be used anywhere else in the expression except in the 3186 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around 3187 the expression and save_expr needs to be called with CVAL1 and CVAL2. 3188 3189 If this is true, return 1. Otherwise, return zero. */ 3190 3191static int 3192twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p) 3193{ 3194 enum tree_code code = TREE_CODE (arg); 3195 enum tree_code_class tclass = TREE_CODE_CLASS (code); 3196 3197 /* We can handle some of the tcc_expression cases here. */ 3198 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR) 3199 tclass = tcc_unary; 3200 else if (tclass == tcc_expression 3201 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR 3202 || code == COMPOUND_EXPR)) 3203 tclass = tcc_binary; 3204 3205 else if (tclass == tcc_expression && code == SAVE_EXPR 3206 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0))) 3207 { 3208 /* If we've already found a CVAL1 or CVAL2, this expression is 3209 two complex to handle. */ 3210 if (*cval1 || *cval2) 3211 return 0; 3212 3213 tclass = tcc_unary; 3214 *save_p = 1; 3215 } 3216 3217 switch (tclass) 3218 { 3219 case tcc_unary: 3220 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p); 3221 3222 case tcc_binary: 3223 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p) 3224 && twoval_comparison_p (TREE_OPERAND (arg, 1), 3225 cval1, cval2, save_p)); 3226 3227 case tcc_constant: 3228 return 1; 3229 3230 case tcc_expression: 3231 if (code == COND_EXPR) 3232 return (twoval_comparison_p (TREE_OPERAND (arg, 0), 3233 cval1, cval2, save_p) 3234 && twoval_comparison_p (TREE_OPERAND (arg, 1), 3235 cval1, cval2, save_p) 3236 && twoval_comparison_p (TREE_OPERAND (arg, 2), 3237 cval1, cval2, save_p)); 3238 return 0; 3239 3240 case tcc_comparison: 3241 /* First see if we can handle the first operand, then the second. For 3242 the second operand, we know *CVAL1 can't be zero. It must be that 3243 one side of the comparison is each of the values; test for the 3244 case where this isn't true by failing if the two operands 3245 are the same. */ 3246 3247 if (operand_equal_p (TREE_OPERAND (arg, 0), 3248 TREE_OPERAND (arg, 1), 0)) 3249 return 0; 3250 3251 if (*cval1 == 0) 3252 *cval1 = TREE_OPERAND (arg, 0); 3253 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0)) 3254 ; 3255 else if (*cval2 == 0) 3256 *cval2 = TREE_OPERAND (arg, 0); 3257 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0)) 3258 ; 3259 else 3260 return 0; 3261 3262 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0)) 3263 ; 3264 else if (*cval2 == 0) 3265 *cval2 = TREE_OPERAND (arg, 1); 3266 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0)) 3267 ; 3268 else 3269 return 0; 3270 3271 return 1; 3272 3273 default: 3274 return 0; 3275 } 3276} 3277 3278/* ARG is a tree that is known to contain just arithmetic operations and 3279 comparisons. Evaluate the operations in the tree substituting NEW0 for 3280 any occurrence of OLD0 as an operand of a comparison and likewise for 3281 NEW1 and OLD1. */ 3282 3283static tree 3284eval_subst (location_t loc, tree arg, tree old0, tree new0, 3285 tree old1, tree new1) 3286{ 3287 tree type = TREE_TYPE (arg); 3288 enum tree_code code = TREE_CODE (arg); 3289 enum tree_code_class tclass = TREE_CODE_CLASS (code); 3290 3291 /* We can handle some of the tcc_expression cases here. */ 3292 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR) 3293 tclass = tcc_unary; 3294 else if (tclass == tcc_expression 3295 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) 3296 tclass = tcc_binary; 3297 3298 switch (tclass) 3299 { 3300 case tcc_unary: 3301 return fold_build1_loc (loc, code, type, 3302 eval_subst (loc, TREE_OPERAND (arg, 0), 3303 old0, new0, old1, new1)); 3304 3305 case tcc_binary: 3306 return fold_build2_loc (loc, code, type, 3307 eval_subst (loc, TREE_OPERAND (arg, 0), 3308 old0, new0, old1, new1), 3309 eval_subst (loc, TREE_OPERAND (arg, 1), 3310 old0, new0, old1, new1)); 3311 3312 case tcc_expression: 3313 switch (code) 3314 { 3315 case SAVE_EXPR: 3316 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0, 3317 old1, new1); 3318 3319 case COMPOUND_EXPR: 3320 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0, 3321 old1, new1); 3322 3323 case COND_EXPR: 3324 return fold_build3_loc (loc, code, type, 3325 eval_subst (loc, TREE_OPERAND (arg, 0), 3326 old0, new0, old1, new1), 3327 eval_subst (loc, TREE_OPERAND (arg, 1), 3328 old0, new0, old1, new1), 3329 eval_subst (loc, TREE_OPERAND (arg, 2), 3330 old0, new0, old1, new1)); 3331 default: 3332 break; 3333 } 3334 /* Fall through - ??? */ 3335 3336 case tcc_comparison: 3337 { 3338 tree arg0 = TREE_OPERAND (arg, 0); 3339 tree arg1 = TREE_OPERAND (arg, 1); 3340 3341 /* We need to check both for exact equality and tree equality. The 3342 former will be true if the operand has a side-effect. In that 3343 case, we know the operand occurred exactly once. */ 3344 3345 if (arg0 == old0 || operand_equal_p (arg0, old0, 0)) 3346 arg0 = new0; 3347 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0)) 3348 arg0 = new1; 3349 3350 if (arg1 == old0 || operand_equal_p (arg1, old0, 0)) 3351 arg1 = new0; 3352 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0)) 3353 arg1 = new1; 3354 3355 return fold_build2_loc (loc, code, type, arg0, arg1); 3356 } 3357 3358 default: 3359 return arg; 3360 } 3361} 3362 3363/* Return a tree for the case when the result of an expression is RESULT 3364 converted to TYPE and OMITTED was previously an operand of the expression 3365 but is now not needed (e.g., we folded OMITTED * 0). 3366 3367 If OMITTED has side effects, we must evaluate it. Otherwise, just do 3368 the conversion of RESULT to TYPE. */ 3369 3370tree 3371omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted) 3372{ 3373 tree t = fold_convert_loc (loc, type, result); 3374 3375 /* If the resulting operand is an empty statement, just return the omitted 3376 statement casted to void. */ 3377 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted)) 3378 return build1_loc (loc, NOP_EXPR, void_type_node, 3379 fold_ignored_result (omitted)); 3380 3381 if (TREE_SIDE_EFFECTS (omitted)) 3382 return build2_loc (loc, COMPOUND_EXPR, type, 3383 fold_ignored_result (omitted), t); 3384 3385 return non_lvalue_loc (loc, t); 3386} 3387 3388/* Return a tree for the case when the result of an expression is RESULT 3389 converted to TYPE and OMITTED1 and OMITTED2 were previously operands 3390 of the expression but are now not needed. 3391 3392 If OMITTED1 or OMITTED2 has side effects, they must be evaluated. 3393 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is 3394 evaluated before OMITTED2. Otherwise, if neither has side effects, 3395 just do the conversion of RESULT to TYPE. */ 3396 3397tree 3398omit_two_operands_loc (location_t loc, tree type, tree result, 3399 tree omitted1, tree omitted2) 3400{ 3401 tree t = fold_convert_loc (loc, type, result); 3402 3403 if (TREE_SIDE_EFFECTS (omitted2)) 3404 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t); 3405 if (TREE_SIDE_EFFECTS (omitted1)) 3406 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t); 3407 3408 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t; 3409} 3410 3411 3412/* Return a simplified tree node for the truth-negation of ARG. This 3413 never alters ARG itself. We assume that ARG is an operation that 3414 returns a truth value (0 or 1). 3415 3416 FIXME: one would think we would fold the result, but it causes 3417 problems with the dominator optimizer. */ 3418 3419static tree 3420fold_truth_not_expr (location_t loc, tree arg) 3421{ 3422 tree type = TREE_TYPE (arg); 3423 enum tree_code code = TREE_CODE (arg); 3424 location_t loc1, loc2; 3425 3426 /* If this is a comparison, we can simply invert it, except for 3427 floating-point non-equality comparisons, in which case we just 3428 enclose a TRUTH_NOT_EXPR around what we have. */ 3429 3430 if (TREE_CODE_CLASS (code) == tcc_comparison) 3431 { 3432 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0)); 3433 if (FLOAT_TYPE_P (op_type) 3434 && flag_trapping_math 3435 && code != ORDERED_EXPR && code != UNORDERED_EXPR 3436 && code != NE_EXPR && code != EQ_EXPR) 3437 return NULL_TREE; 3438 3439 code = invert_tree_comparison (code, HONOR_NANS (op_type)); 3440 if (code == ERROR_MARK) 3441 return NULL_TREE; 3442 3443 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0), 3444 TREE_OPERAND (arg, 1)); 3445 } 3446 3447 switch (code) 3448 { 3449 case INTEGER_CST: 3450 return constant_boolean_node (integer_zerop (arg), type); 3451 3452 case TRUTH_AND_EXPR: 3453 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3454 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3455 return build2_loc (loc, TRUTH_OR_EXPR, type, 3456 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3457 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3458 3459 case TRUTH_OR_EXPR: 3460 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3461 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3462 return build2_loc (loc, TRUTH_AND_EXPR, type, 3463 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3464 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3465 3466 case TRUTH_XOR_EXPR: 3467 /* Here we can invert either operand. We invert the first operand 3468 unless the second operand is a TRUTH_NOT_EXPR in which case our 3469 result is the XOR of the first operand with the inside of the 3470 negation of the second operand. */ 3471 3472 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR) 3473 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0), 3474 TREE_OPERAND (TREE_OPERAND (arg, 1), 0)); 3475 else 3476 return build2_loc (loc, TRUTH_XOR_EXPR, type, 3477 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)), 3478 TREE_OPERAND (arg, 1)); 3479 3480 case TRUTH_ANDIF_EXPR: 3481 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3482 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3483 return build2_loc (loc, TRUTH_ORIF_EXPR, type, 3484 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3485 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3486 3487 case TRUTH_ORIF_EXPR: 3488 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3489 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3490 return build2_loc (loc, TRUTH_ANDIF_EXPR, type, 3491 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3492 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3493 3494 case TRUTH_NOT_EXPR: 3495 return TREE_OPERAND (arg, 0); 3496 3497 case COND_EXPR: 3498 { 3499 tree arg1 = TREE_OPERAND (arg, 1); 3500 tree arg2 = TREE_OPERAND (arg, 2); 3501 3502 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3503 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc); 3504 3505 /* A COND_EXPR may have a throw as one operand, which 3506 then has void type. Just leave void operands 3507 as they are. */ 3508 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0), 3509 VOID_TYPE_P (TREE_TYPE (arg1)) 3510 ? arg1 : invert_truthvalue_loc (loc1, arg1), 3511 VOID_TYPE_P (TREE_TYPE (arg2)) 3512 ? arg2 : invert_truthvalue_loc (loc2, arg2)); 3513 } 3514 3515 case COMPOUND_EXPR: 3516 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3517 return build2_loc (loc, COMPOUND_EXPR, type, 3518 TREE_OPERAND (arg, 0), 3519 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1))); 3520 3521 case NON_LVALUE_EXPR: 3522 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3523 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)); 3524 3525 CASE_CONVERT: 3526 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE) 3527 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg); 3528 3529 /* ... fall through ... */ 3530 3531 case FLOAT_EXPR: 3532 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3533 return build1_loc (loc, TREE_CODE (arg), type, 3534 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); 3535 3536 case BIT_AND_EXPR: 3537 if (!integer_onep (TREE_OPERAND (arg, 1))) 3538 return NULL_TREE; 3539 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0)); 3540 3541 case SAVE_EXPR: 3542 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg); 3543 3544 case CLEANUP_POINT_EXPR: 3545 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3546 return build1_loc (loc, CLEANUP_POINT_EXPR, type, 3547 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); 3548 3549 default: 3550 return NULL_TREE; 3551 } 3552} 3553 3554/* Fold the truth-negation of ARG. This never alters ARG itself. We 3555 assume that ARG is an operation that returns a truth value (0 or 1 3556 for scalars, 0 or -1 for vectors). Return the folded expression if 3557 folding is successful. Otherwise, return NULL_TREE. */ 3558 3559static tree 3560fold_invert_truthvalue (location_t loc, tree arg) 3561{ 3562 tree type = TREE_TYPE (arg); 3563 return fold_unary_loc (loc, VECTOR_TYPE_P (type) 3564 ? BIT_NOT_EXPR 3565 : TRUTH_NOT_EXPR, 3566 type, arg); 3567} 3568 3569/* Return a simplified tree node for the truth-negation of ARG. This 3570 never alters ARG itself. We assume that ARG is an operation that 3571 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */ 3572 3573tree 3574invert_truthvalue_loc (location_t loc, tree arg) 3575{ 3576 if (TREE_CODE (arg) == ERROR_MARK) 3577 return arg; 3578 3579 tree type = TREE_TYPE (arg); 3580 return fold_build1_loc (loc, VECTOR_TYPE_P (type) 3581 ? BIT_NOT_EXPR 3582 : TRUTH_NOT_EXPR, 3583 type, arg); 3584} 3585 3586/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both 3587 operands are another bit-wise operation with a common input. If so, 3588 distribute the bit operations to save an operation and possibly two if 3589 constants are involved. For example, convert 3590 (A | B) & (A | C) into A | (B & C) 3591 Further simplification will occur if B and C are constants. 3592 3593 If this optimization cannot be done, 0 will be returned. */ 3594 3595static tree 3596distribute_bit_expr (location_t loc, enum tree_code code, tree type, 3597 tree arg0, tree arg1) 3598{ 3599 tree common; 3600 tree left, right; 3601 3602 if (TREE_CODE (arg0) != TREE_CODE (arg1) 3603 || TREE_CODE (arg0) == code 3604 || (TREE_CODE (arg0) != BIT_AND_EXPR 3605 && TREE_CODE (arg0) != BIT_IOR_EXPR)) 3606 return 0; 3607 3608 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)) 3609 { 3610 common = TREE_OPERAND (arg0, 0); 3611 left = TREE_OPERAND (arg0, 1); 3612 right = TREE_OPERAND (arg1, 1); 3613 } 3614 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0)) 3615 { 3616 common = TREE_OPERAND (arg0, 0); 3617 left = TREE_OPERAND (arg0, 1); 3618 right = TREE_OPERAND (arg1, 0); 3619 } 3620 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0)) 3621 { 3622 common = TREE_OPERAND (arg0, 1); 3623 left = TREE_OPERAND (arg0, 0); 3624 right = TREE_OPERAND (arg1, 1); 3625 } 3626 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0)) 3627 { 3628 common = TREE_OPERAND (arg0, 1); 3629 left = TREE_OPERAND (arg0, 0); 3630 right = TREE_OPERAND (arg1, 0); 3631 } 3632 else 3633 return 0; 3634 3635 common = fold_convert_loc (loc, type, common); 3636 left = fold_convert_loc (loc, type, left); 3637 right = fold_convert_loc (loc, type, right); 3638 return fold_build2_loc (loc, TREE_CODE (arg0), type, common, 3639 fold_build2_loc (loc, code, type, left, right)); 3640} 3641 3642/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation 3643 with code CODE. This optimization is unsafe. */ 3644static tree 3645distribute_real_division (location_t loc, enum tree_code code, tree type, 3646 tree arg0, tree arg1) 3647{ 3648 bool mul0 = TREE_CODE (arg0) == MULT_EXPR; 3649 bool mul1 = TREE_CODE (arg1) == MULT_EXPR; 3650 3651 /* (A / C) +- (B / C) -> (A +- B) / C. */ 3652 if (mul0 == mul1 3653 && operand_equal_p (TREE_OPERAND (arg0, 1), 3654 TREE_OPERAND (arg1, 1), 0)) 3655 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type, 3656 fold_build2_loc (loc, code, type, 3657 TREE_OPERAND (arg0, 0), 3658 TREE_OPERAND (arg1, 0)), 3659 TREE_OPERAND (arg0, 1)); 3660 3661 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */ 3662 if (operand_equal_p (TREE_OPERAND (arg0, 0), 3663 TREE_OPERAND (arg1, 0), 0) 3664 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 3665 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) 3666 { 3667 REAL_VALUE_TYPE r0, r1; 3668 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1)); 3669 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1)); 3670 if (!mul0) 3671 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0); 3672 if (!mul1) 3673 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1); 3674 real_arithmetic (&r0, code, &r0, &r1); 3675 return fold_build2_loc (loc, MULT_EXPR, type, 3676 TREE_OPERAND (arg0, 0), 3677 build_real (type, r0)); 3678 } 3679 3680 return NULL_TREE; 3681} 3682 3683/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER 3684 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */ 3685 3686static tree 3687make_bit_field_ref (location_t loc, tree inner, tree type, 3688 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp) 3689{ 3690 tree result, bftype; 3691 3692 if (bitpos == 0) 3693 { 3694 tree size = TYPE_SIZE (TREE_TYPE (inner)); 3695 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner)) 3696 || POINTER_TYPE_P (TREE_TYPE (inner))) 3697 && tree_fits_shwi_p (size) 3698 && tree_to_shwi (size) == bitsize) 3699 return fold_convert_loc (loc, type, inner); 3700 } 3701 3702 bftype = type; 3703 if (TYPE_PRECISION (bftype) != bitsize 3704 || TYPE_UNSIGNED (bftype) == !unsignedp) 3705 bftype = build_nonstandard_integer_type (bitsize, 0); 3706 3707 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner, 3708 size_int (bitsize), bitsize_int (bitpos)); 3709 3710 if (bftype != type) 3711 result = fold_convert_loc (loc, type, result); 3712 3713 return result; 3714} 3715 3716/* Optimize a bit-field compare. 3717 3718 There are two cases: First is a compare against a constant and the 3719 second is a comparison of two items where the fields are at the same 3720 bit position relative to the start of a chunk (byte, halfword, word) 3721 large enough to contain it. In these cases we can avoid the shift 3722 implicit in bitfield extractions. 3723 3724 For constants, we emit a compare of the shifted constant with the 3725 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being 3726 compared. For two fields at the same position, we do the ANDs with the 3727 similar mask and compare the result of the ANDs. 3728 3729 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR. 3730 COMPARE_TYPE is the type of the comparison, and LHS and RHS 3731 are the left and right operands of the comparison, respectively. 3732 3733 If the optimization described above can be done, we return the resulting 3734 tree. Otherwise we return zero. */ 3735 3736static tree 3737optimize_bit_field_compare (location_t loc, enum tree_code code, 3738 tree compare_type, tree lhs, tree rhs) 3739{ 3740 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize; 3741 tree type = TREE_TYPE (lhs); 3742 tree unsigned_type; 3743 int const_p = TREE_CODE (rhs) == INTEGER_CST; 3744 machine_mode lmode, rmode, nmode; 3745 int lunsignedp, runsignedp; 3746 int lvolatilep = 0, rvolatilep = 0; 3747 tree linner, rinner = NULL_TREE; 3748 tree mask; 3749 tree offset; 3750 3751 /* Get all the information about the extractions being done. If the bit size 3752 if the same as the size of the underlying object, we aren't doing an 3753 extraction at all and so can do nothing. We also don't want to 3754 do anything if the inner expression is a PLACEHOLDER_EXPR since we 3755 then will no longer be able to replace it. */ 3756 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, 3757 &lunsignedp, &lvolatilep, false); 3758 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 3759 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep) 3760 return 0; 3761 3762 if (!const_p) 3763 { 3764 /* If this is not a constant, we can only do something if bit positions, 3765 sizes, and signedness are the same. */ 3766 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode, 3767 &runsignedp, &rvolatilep, false); 3768 3769 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize 3770 || lunsignedp != runsignedp || offset != 0 3771 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep) 3772 return 0; 3773 } 3774 3775 /* See if we can find a mode to refer to this field. We should be able to, 3776 but fail if we can't. */ 3777 nmode = get_best_mode (lbitsize, lbitpos, 0, 0, 3778 const_p ? TYPE_ALIGN (TREE_TYPE (linner)) 3779 : MIN (TYPE_ALIGN (TREE_TYPE (linner)), 3780 TYPE_ALIGN (TREE_TYPE (rinner))), 3781 word_mode, false); 3782 if (nmode == VOIDmode) 3783 return 0; 3784 3785 /* Set signed and unsigned types of the precision of this mode for the 3786 shifts below. */ 3787 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1); 3788 3789 /* Compute the bit position and size for the new reference and our offset 3790 within it. If the new reference is the same size as the original, we 3791 won't optimize anything, so return zero. */ 3792 nbitsize = GET_MODE_BITSIZE (nmode); 3793 nbitpos = lbitpos & ~ (nbitsize - 1); 3794 lbitpos -= nbitpos; 3795 if (nbitsize == lbitsize) 3796 return 0; 3797 3798 if (BYTES_BIG_ENDIAN) 3799 lbitpos = nbitsize - lbitsize - lbitpos; 3800 3801 /* Make the mask to be used against the extracted field. */ 3802 mask = build_int_cst_type (unsigned_type, -1); 3803 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize)); 3804 mask = const_binop (RSHIFT_EXPR, mask, 3805 size_int (nbitsize - lbitsize - lbitpos)); 3806 3807 if (! const_p) 3808 /* If not comparing with constant, just rework the comparison 3809 and return. */ 3810 return fold_build2_loc (loc, code, compare_type, 3811 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, 3812 make_bit_field_ref (loc, linner, 3813 unsigned_type, 3814 nbitsize, nbitpos, 3815 1), 3816 mask), 3817 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, 3818 make_bit_field_ref (loc, rinner, 3819 unsigned_type, 3820 nbitsize, nbitpos, 3821 1), 3822 mask)); 3823 3824 /* Otherwise, we are handling the constant case. See if the constant is too 3825 big for the field. Warn and return a tree of for 0 (false) if so. We do 3826 this not only for its own sake, but to avoid having to test for this 3827 error case below. If we didn't, we might generate wrong code. 3828 3829 For unsigned fields, the constant shifted right by the field length should 3830 be all zero. For signed fields, the high-order bits should agree with 3831 the sign bit. */ 3832 3833 if (lunsignedp) 3834 { 3835 if (wi::lrshift (rhs, lbitsize) != 0) 3836 { 3837 warning (0, "comparison is always %d due to width of bit-field", 3838 code == NE_EXPR); 3839 return constant_boolean_node (code == NE_EXPR, compare_type); 3840 } 3841 } 3842 else 3843 { 3844 wide_int tem = wi::arshift (rhs, lbitsize - 1); 3845 if (tem != 0 && tem != -1) 3846 { 3847 warning (0, "comparison is always %d due to width of bit-field", 3848 code == NE_EXPR); 3849 return constant_boolean_node (code == NE_EXPR, compare_type); 3850 } 3851 } 3852 3853 /* Single-bit compares should always be against zero. */ 3854 if (lbitsize == 1 && ! integer_zerop (rhs)) 3855 { 3856 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR; 3857 rhs = build_int_cst (type, 0); 3858 } 3859 3860 /* Make a new bitfield reference, shift the constant over the 3861 appropriate number of bits and mask it with the computed mask 3862 (in case this was a signed field). If we changed it, make a new one. */ 3863 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1); 3864 3865 rhs = const_binop (BIT_AND_EXPR, 3866 const_binop (LSHIFT_EXPR, 3867 fold_convert_loc (loc, unsigned_type, rhs), 3868 size_int (lbitpos)), 3869 mask); 3870 3871 lhs = build2_loc (loc, code, compare_type, 3872 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs); 3873 return lhs; 3874} 3875 3876/* Subroutine for fold_truth_andor_1: decode a field reference. 3877 3878 If EXP is a comparison reference, we return the innermost reference. 3879 3880 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is 3881 set to the starting bit number. 3882 3883 If the innermost field can be completely contained in a mode-sized 3884 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode. 3885 3886 *PVOLATILEP is set to 1 if the any expression encountered is volatile; 3887 otherwise it is not changed. 3888 3889 *PUNSIGNEDP is set to the signedness of the field. 3890 3891 *PMASK is set to the mask used. This is either contained in a 3892 BIT_AND_EXPR or derived from the width of the field. 3893 3894 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any. 3895 3896 Return 0 if this is not a component reference or is one that we can't 3897 do anything with. */ 3898 3899static tree 3900decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize, 3901 HOST_WIDE_INT *pbitpos, machine_mode *pmode, 3902 int *punsignedp, int *pvolatilep, 3903 tree *pmask, tree *pand_mask) 3904{ 3905 tree outer_type = 0; 3906 tree and_mask = 0; 3907 tree mask, inner, offset; 3908 tree unsigned_type; 3909 unsigned int precision; 3910 3911 /* All the optimizations using this function assume integer fields. 3912 There are problems with FP fields since the type_for_size call 3913 below can fail for, e.g., XFmode. */ 3914 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp))) 3915 return 0; 3916 3917 /* We are interested in the bare arrangement of bits, so strip everything 3918 that doesn't affect the machine mode. However, record the type of the 3919 outermost expression if it may matter below. */ 3920 if (CONVERT_EXPR_P (exp) 3921 || TREE_CODE (exp) == NON_LVALUE_EXPR) 3922 outer_type = TREE_TYPE (exp); 3923 STRIP_NOPS (exp); 3924 3925 if (TREE_CODE (exp) == BIT_AND_EXPR) 3926 { 3927 and_mask = TREE_OPERAND (exp, 1); 3928 exp = TREE_OPERAND (exp, 0); 3929 STRIP_NOPS (exp); STRIP_NOPS (and_mask); 3930 if (TREE_CODE (and_mask) != INTEGER_CST) 3931 return 0; 3932 } 3933 3934 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode, 3935 punsignedp, pvolatilep, false); 3936 if ((inner == exp && and_mask == 0) 3937 || *pbitsize < 0 || offset != 0 3938 || TREE_CODE (inner) == PLACEHOLDER_EXPR) 3939 return 0; 3940 3941 /* If the number of bits in the reference is the same as the bitsize of 3942 the outer type, then the outer type gives the signedness. Otherwise 3943 (in case of a small bitfield) the signedness is unchanged. */ 3944 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type)) 3945 *punsignedp = TYPE_UNSIGNED (outer_type); 3946 3947 /* Compute the mask to access the bitfield. */ 3948 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1); 3949 precision = TYPE_PRECISION (unsigned_type); 3950 3951 mask = build_int_cst_type (unsigned_type, -1); 3952 3953 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize)); 3954 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize)); 3955 3956 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */ 3957 if (and_mask != 0) 3958 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, 3959 fold_convert_loc (loc, unsigned_type, and_mask), mask); 3960 3961 *pmask = mask; 3962 *pand_mask = and_mask; 3963 return inner; 3964} 3965 3966/* Return nonzero if MASK represents a mask of SIZE ones in the low-order 3967 bit positions and MASK is SIGNED. */ 3968 3969static int 3970all_ones_mask_p (const_tree mask, unsigned int size) 3971{ 3972 tree type = TREE_TYPE (mask); 3973 unsigned int precision = TYPE_PRECISION (type); 3974 3975 /* If this function returns true when the type of the mask is 3976 UNSIGNED, then there will be errors. In particular see 3977 gcc.c-torture/execute/990326-1.c. There does not appear to be 3978 any documentation paper trail as to why this is so. But the pre 3979 wide-int worked with that restriction and it has been preserved 3980 here. */ 3981 if (size > precision || TYPE_SIGN (type) == UNSIGNED) 3982 return false; 3983 3984 return wi::mask (size, false, precision) == mask; 3985} 3986 3987/* Subroutine for fold: determine if VAL is the INTEGER_CONST that 3988 represents the sign bit of EXP's type. If EXP represents a sign 3989 or zero extension, also test VAL against the unextended type. 3990 The return value is the (sub)expression whose sign bit is VAL, 3991 or NULL_TREE otherwise. */ 3992 3993tree 3994sign_bit_p (tree exp, const_tree val) 3995{ 3996 int width; 3997 tree t; 3998 3999 /* Tree EXP must have an integral type. */ 4000 t = TREE_TYPE (exp); 4001 if (! INTEGRAL_TYPE_P (t)) 4002 return NULL_TREE; 4003 4004 /* Tree VAL must be an integer constant. */ 4005 if (TREE_CODE (val) != INTEGER_CST 4006 || TREE_OVERFLOW (val)) 4007 return NULL_TREE; 4008 4009 width = TYPE_PRECISION (t); 4010 if (wi::only_sign_bit_p (val, width)) 4011 return exp; 4012 4013 /* Handle extension from a narrower type. */ 4014 if (TREE_CODE (exp) == NOP_EXPR 4015 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width) 4016 return sign_bit_p (TREE_OPERAND (exp, 0), val); 4017 4018 return NULL_TREE; 4019} 4020 4021/* Subroutine for fold_truth_andor_1: determine if an operand is simple enough 4022 to be evaluated unconditionally. */ 4023 4024static int 4025simple_operand_p (const_tree exp) 4026{ 4027 /* Strip any conversions that don't change the machine mode. */ 4028 STRIP_NOPS (exp); 4029 4030 return (CONSTANT_CLASS_P (exp) 4031 || TREE_CODE (exp) == SSA_NAME 4032 || (DECL_P (exp) 4033 && ! TREE_ADDRESSABLE (exp) 4034 && ! TREE_THIS_VOLATILE (exp) 4035 && ! DECL_NONLOCAL (exp) 4036 /* Don't regard global variables as simple. They may be 4037 allocated in ways unknown to the compiler (shared memory, 4038 #pragma weak, etc). */ 4039 && ! TREE_PUBLIC (exp) 4040 && ! DECL_EXTERNAL (exp) 4041 /* Weakrefs are not safe to be read, since they can be NULL. 4042 They are !TREE_PUBLIC && !DECL_EXTERNAL but still 4043 have DECL_WEAK flag set. */ 4044 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp)) 4045 /* Loading a static variable is unduly expensive, but global 4046 registers aren't expensive. */ 4047 && (! TREE_STATIC (exp) || DECL_REGISTER (exp)))); 4048} 4049 4050/* Subroutine for fold_truth_andor: determine if an operand is simple enough 4051 to be evaluated unconditionally. 4052 I addition to simple_operand_p, we assume that comparisons, conversions, 4053 and logic-not operations are simple, if their operands are simple, too. */ 4054 4055static bool 4056simple_operand_p_2 (tree exp) 4057{ 4058 enum tree_code code; 4059 4060 if (TREE_SIDE_EFFECTS (exp) 4061 || tree_could_trap_p (exp)) 4062 return false; 4063 4064 while (CONVERT_EXPR_P (exp)) 4065 exp = TREE_OPERAND (exp, 0); 4066 4067 code = TREE_CODE (exp); 4068 4069 if (TREE_CODE_CLASS (code) == tcc_comparison) 4070 return (simple_operand_p (TREE_OPERAND (exp, 0)) 4071 && simple_operand_p (TREE_OPERAND (exp, 1))); 4072 4073 if (code == TRUTH_NOT_EXPR) 4074 return simple_operand_p_2 (TREE_OPERAND (exp, 0)); 4075 4076 return simple_operand_p (exp); 4077} 4078 4079 4080/* The following functions are subroutines to fold_range_test and allow it to 4081 try to change a logical combination of comparisons into a range test. 4082 4083 For example, both 4084 X == 2 || X == 3 || X == 4 || X == 5 4085 and 4086 X >= 2 && X <= 5 4087 are converted to 4088 (unsigned) (X - 2) <= 3 4089 4090 We describe each set of comparisons as being either inside or outside 4091 a range, using a variable named like IN_P, and then describe the 4092 range with a lower and upper bound. If one of the bounds is omitted, 4093 it represents either the highest or lowest value of the type. 4094 4095 In the comments below, we represent a range by two numbers in brackets 4096 preceded by a "+" to designate being inside that range, or a "-" to 4097 designate being outside that range, so the condition can be inverted by 4098 flipping the prefix. An omitted bound is represented by a "-". For 4099 example, "- [-, 10]" means being outside the range starting at the lowest 4100 possible value and ending at 10, in other words, being greater than 10. 4101 The range "+ [-, -]" is always true and hence the range "- [-, -]" is 4102 always false. 4103 4104 We set up things so that the missing bounds are handled in a consistent 4105 manner so neither a missing bound nor "true" and "false" need to be 4106 handled using a special case. */ 4107 4108/* Return the result of applying CODE to ARG0 and ARG1, but handle the case 4109 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P 4110 and UPPER1_P are nonzero if the respective argument is an upper bound 4111 and zero for a lower. TYPE, if nonzero, is the type of the result; it 4112 must be specified for a comparison. ARG1 will be converted to ARG0's 4113 type if both are specified. */ 4114 4115static tree 4116range_binop (enum tree_code code, tree type, tree arg0, int upper0_p, 4117 tree arg1, int upper1_p) 4118{ 4119 tree tem; 4120 int result; 4121 int sgn0, sgn1; 4122 4123 /* If neither arg represents infinity, do the normal operation. 4124 Else, if not a comparison, return infinity. Else handle the special 4125 comparison rules. Note that most of the cases below won't occur, but 4126 are handled for consistency. */ 4127 4128 if (arg0 != 0 && arg1 != 0) 4129 { 4130 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0), 4131 arg0, fold_convert (TREE_TYPE (arg0), arg1)); 4132 STRIP_NOPS (tem); 4133 return TREE_CODE (tem) == INTEGER_CST ? tem : 0; 4134 } 4135 4136 if (TREE_CODE_CLASS (code) != tcc_comparison) 4137 return 0; 4138 4139 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0 4140 for neither. In real maths, we cannot assume open ended ranges are 4141 the same. But, this is computer arithmetic, where numbers are finite. 4142 We can therefore make the transformation of any unbounded range with 4143 the value Z, Z being greater than any representable number. This permits 4144 us to treat unbounded ranges as equal. */ 4145 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1); 4146 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1); 4147 switch (code) 4148 { 4149 case EQ_EXPR: 4150 result = sgn0 == sgn1; 4151 break; 4152 case NE_EXPR: 4153 result = sgn0 != sgn1; 4154 break; 4155 case LT_EXPR: 4156 result = sgn0 < sgn1; 4157 break; 4158 case LE_EXPR: 4159 result = sgn0 <= sgn1; 4160 break; 4161 case GT_EXPR: 4162 result = sgn0 > sgn1; 4163 break; 4164 case GE_EXPR: 4165 result = sgn0 >= sgn1; 4166 break; 4167 default: 4168 gcc_unreachable (); 4169 } 4170 4171 return constant_boolean_node (result, type); 4172} 4173 4174/* Helper routine for make_range. Perform one step for it, return 4175 new expression if the loop should continue or NULL_TREE if it should 4176 stop. */ 4177 4178tree 4179make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1, 4180 tree exp_type, tree *p_low, tree *p_high, int *p_in_p, 4181 bool *strict_overflow_p) 4182{ 4183 tree arg0_type = TREE_TYPE (arg0); 4184 tree n_low, n_high, low = *p_low, high = *p_high; 4185 int in_p = *p_in_p, n_in_p; 4186 4187 switch (code) 4188 { 4189 case TRUTH_NOT_EXPR: 4190 /* We can only do something if the range is testing for zero. */ 4191 if (low == NULL_TREE || high == NULL_TREE 4192 || ! integer_zerop (low) || ! integer_zerop (high)) 4193 return NULL_TREE; 4194 *p_in_p = ! in_p; 4195 return arg0; 4196 4197 case EQ_EXPR: case NE_EXPR: 4198 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR: 4199 /* We can only do something if the range is testing for zero 4200 and if the second operand is an integer constant. Note that 4201 saying something is "in" the range we make is done by 4202 complementing IN_P since it will set in the initial case of 4203 being not equal to zero; "out" is leaving it alone. */ 4204 if (low == NULL_TREE || high == NULL_TREE 4205 || ! integer_zerop (low) || ! integer_zerop (high) 4206 || TREE_CODE (arg1) != INTEGER_CST) 4207 return NULL_TREE; 4208 4209 switch (code) 4210 { 4211 case NE_EXPR: /* - [c, c] */ 4212 low = high = arg1; 4213 break; 4214 case EQ_EXPR: /* + [c, c] */ 4215 in_p = ! in_p, low = high = arg1; 4216 break; 4217 case GT_EXPR: /* - [-, c] */ 4218 low = 0, high = arg1; 4219 break; 4220 case GE_EXPR: /* + [c, -] */ 4221 in_p = ! in_p, low = arg1, high = 0; 4222 break; 4223 case LT_EXPR: /* - [c, -] */ 4224 low = arg1, high = 0; 4225 break; 4226 case LE_EXPR: /* + [-, c] */ 4227 in_p = ! in_p, low = 0, high = arg1; 4228 break; 4229 default: 4230 gcc_unreachable (); 4231 } 4232 4233 /* If this is an unsigned comparison, we also know that EXP is 4234 greater than or equal to zero. We base the range tests we make 4235 on that fact, so we record it here so we can parse existing 4236 range tests. We test arg0_type since often the return type 4237 of, e.g. EQ_EXPR, is boolean. */ 4238 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0)) 4239 { 4240 if (! merge_ranges (&n_in_p, &n_low, &n_high, 4241 in_p, low, high, 1, 4242 build_int_cst (arg0_type, 0), 4243 NULL_TREE)) 4244 return NULL_TREE; 4245 4246 in_p = n_in_p, low = n_low, high = n_high; 4247 4248 /* If the high bound is missing, but we have a nonzero low 4249 bound, reverse the range so it goes from zero to the low bound 4250 minus 1. */ 4251 if (high == 0 && low && ! integer_zerop (low)) 4252 { 4253 in_p = ! in_p; 4254 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0, 4255 build_int_cst (TREE_TYPE (low), 1), 0); 4256 low = build_int_cst (arg0_type, 0); 4257 } 4258 } 4259 4260 *p_low = low; 4261 *p_high = high; 4262 *p_in_p = in_p; 4263 return arg0; 4264 4265 case NEGATE_EXPR: 4266 /* If flag_wrapv and ARG0_TYPE is signed, make sure 4267 low and high are non-NULL, then normalize will DTRT. */ 4268 if (!TYPE_UNSIGNED (arg0_type) 4269 && !TYPE_OVERFLOW_UNDEFINED (arg0_type)) 4270 { 4271 if (low == NULL_TREE) 4272 low = TYPE_MIN_VALUE (arg0_type); 4273 if (high == NULL_TREE) 4274 high = TYPE_MAX_VALUE (arg0_type); 4275 } 4276 4277 /* (-x) IN [a,b] -> x in [-b, -a] */ 4278 n_low = range_binop (MINUS_EXPR, exp_type, 4279 build_int_cst (exp_type, 0), 4280 0, high, 1); 4281 n_high = range_binop (MINUS_EXPR, exp_type, 4282 build_int_cst (exp_type, 0), 4283 0, low, 0); 4284 if (n_high != 0 && TREE_OVERFLOW (n_high)) 4285 return NULL_TREE; 4286 goto normalize; 4287 4288 case BIT_NOT_EXPR: 4289 /* ~ X -> -X - 1 */ 4290 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0), 4291 build_int_cst (exp_type, 1)); 4292 4293 case PLUS_EXPR: 4294 case MINUS_EXPR: 4295 if (TREE_CODE (arg1) != INTEGER_CST) 4296 return NULL_TREE; 4297 4298 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot 4299 move a constant to the other side. */ 4300 if (!TYPE_UNSIGNED (arg0_type) 4301 && !TYPE_OVERFLOW_UNDEFINED (arg0_type)) 4302 return NULL_TREE; 4303 4304 /* If EXP is signed, any overflow in the computation is undefined, 4305 so we don't worry about it so long as our computations on 4306 the bounds don't overflow. For unsigned, overflow is defined 4307 and this is exactly the right thing. */ 4308 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, 4309 arg0_type, low, 0, arg1, 0); 4310 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, 4311 arg0_type, high, 1, arg1, 0); 4312 if ((n_low != 0 && TREE_OVERFLOW (n_low)) 4313 || (n_high != 0 && TREE_OVERFLOW (n_high))) 4314 return NULL_TREE; 4315 4316 if (TYPE_OVERFLOW_UNDEFINED (arg0_type)) 4317 *strict_overflow_p = true; 4318 4319 normalize: 4320 /* Check for an unsigned range which has wrapped around the maximum 4321 value thus making n_high < n_low, and normalize it. */ 4322 if (n_low && n_high && tree_int_cst_lt (n_high, n_low)) 4323 { 4324 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0, 4325 build_int_cst (TREE_TYPE (n_high), 1), 0); 4326 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0, 4327 build_int_cst (TREE_TYPE (n_low), 1), 0); 4328 4329 /* If the range is of the form +/- [ x+1, x ], we won't 4330 be able to normalize it. But then, it represents the 4331 whole range or the empty set, so make it 4332 +/- [ -, - ]. */ 4333 if (tree_int_cst_equal (n_low, low) 4334 && tree_int_cst_equal (n_high, high)) 4335 low = high = 0; 4336 else 4337 in_p = ! in_p; 4338 } 4339 else 4340 low = n_low, high = n_high; 4341 4342 *p_low = low; 4343 *p_high = high; 4344 *p_in_p = in_p; 4345 return arg0; 4346 4347 CASE_CONVERT: 4348 case NON_LVALUE_EXPR: 4349 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type)) 4350 return NULL_TREE; 4351 4352 if (! INTEGRAL_TYPE_P (arg0_type) 4353 || (low != 0 && ! int_fits_type_p (low, arg0_type)) 4354 || (high != 0 && ! int_fits_type_p (high, arg0_type))) 4355 return NULL_TREE; 4356 4357 n_low = low, n_high = high; 4358 4359 if (n_low != 0) 4360 n_low = fold_convert_loc (loc, arg0_type, n_low); 4361 4362 if (n_high != 0) 4363 n_high = fold_convert_loc (loc, arg0_type, n_high); 4364 4365 /* If we're converting arg0 from an unsigned type, to exp, 4366 a signed type, we will be doing the comparison as unsigned. 4367 The tests above have already verified that LOW and HIGH 4368 are both positive. 4369 4370 So we have to ensure that we will handle large unsigned 4371 values the same way that the current signed bounds treat 4372 negative values. */ 4373 4374 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type)) 4375 { 4376 tree high_positive; 4377 tree equiv_type; 4378 /* For fixed-point modes, we need to pass the saturating flag 4379 as the 2nd parameter. */ 4380 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type))) 4381 equiv_type 4382 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 4383 TYPE_SATURATING (arg0_type)); 4384 else 4385 equiv_type 4386 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1); 4387 4388 /* A range without an upper bound is, naturally, unbounded. 4389 Since convert would have cropped a very large value, use 4390 the max value for the destination type. */ 4391 high_positive 4392 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type) 4393 : TYPE_MAX_VALUE (arg0_type); 4394 4395 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type)) 4396 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type, 4397 fold_convert_loc (loc, arg0_type, 4398 high_positive), 4399 build_int_cst (arg0_type, 1)); 4400 4401 /* If the low bound is specified, "and" the range with the 4402 range for which the original unsigned value will be 4403 positive. */ 4404 if (low != 0) 4405 { 4406 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high, 4407 1, fold_convert_loc (loc, arg0_type, 4408 integer_zero_node), 4409 high_positive)) 4410 return NULL_TREE; 4411 4412 in_p = (n_in_p == in_p); 4413 } 4414 else 4415 { 4416 /* Otherwise, "or" the range with the range of the input 4417 that will be interpreted as negative. */ 4418 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high, 4419 1, fold_convert_loc (loc, arg0_type, 4420 integer_zero_node), 4421 high_positive)) 4422 return NULL_TREE; 4423 4424 in_p = (in_p != n_in_p); 4425 } 4426 } 4427 4428 *p_low = n_low; 4429 *p_high = n_high; 4430 *p_in_p = in_p; 4431 return arg0; 4432 4433 default: 4434 return NULL_TREE; 4435 } 4436} 4437 4438/* Given EXP, a logical expression, set the range it is testing into 4439 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression 4440 actually being tested. *PLOW and *PHIGH will be made of the same 4441 type as the returned expression. If EXP is not a comparison, we 4442 will most likely not be returning a useful value and range. Set 4443 *STRICT_OVERFLOW_P to true if the return value is only valid 4444 because signed overflow is undefined; otherwise, do not change 4445 *STRICT_OVERFLOW_P. */ 4446 4447tree 4448make_range (tree exp, int *pin_p, tree *plow, tree *phigh, 4449 bool *strict_overflow_p) 4450{ 4451 enum tree_code code; 4452 tree arg0, arg1 = NULL_TREE; 4453 tree exp_type, nexp; 4454 int in_p; 4455 tree low, high; 4456 location_t loc = EXPR_LOCATION (exp); 4457 4458 /* Start with simply saying "EXP != 0" and then look at the code of EXP 4459 and see if we can refine the range. Some of the cases below may not 4460 happen, but it doesn't seem worth worrying about this. We "continue" 4461 the outer loop when we've changed something; otherwise we "break" 4462 the switch, which will "break" the while. */ 4463 4464 in_p = 0; 4465 low = high = build_int_cst (TREE_TYPE (exp), 0); 4466 4467 while (1) 4468 { 4469 code = TREE_CODE (exp); 4470 exp_type = TREE_TYPE (exp); 4471 arg0 = NULL_TREE; 4472 4473 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) 4474 { 4475 if (TREE_OPERAND_LENGTH (exp) > 0) 4476 arg0 = TREE_OPERAND (exp, 0); 4477 if (TREE_CODE_CLASS (code) == tcc_binary 4478 || TREE_CODE_CLASS (code) == tcc_comparison 4479 || (TREE_CODE_CLASS (code) == tcc_expression 4480 && TREE_OPERAND_LENGTH (exp) > 1)) 4481 arg1 = TREE_OPERAND (exp, 1); 4482 } 4483 if (arg0 == NULL_TREE) 4484 break; 4485 4486 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low, 4487 &high, &in_p, strict_overflow_p); 4488 if (nexp == NULL_TREE) 4489 break; 4490 exp = nexp; 4491 } 4492 4493 /* If EXP is a constant, we can evaluate whether this is true or false. */ 4494 if (TREE_CODE (exp) == INTEGER_CST) 4495 { 4496 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node, 4497 exp, 0, low, 0)) 4498 && integer_onep (range_binop (LE_EXPR, integer_type_node, 4499 exp, 1, high, 1))); 4500 low = high = 0; 4501 exp = 0; 4502 } 4503 4504 *pin_p = in_p, *plow = low, *phigh = high; 4505 return exp; 4506} 4507 4508/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result 4509 type, TYPE, return an expression to test if EXP is in (or out of, depending 4510 on IN_P) the range. Return 0 if the test couldn't be created. */ 4511 4512tree 4513build_range_check (location_t loc, tree type, tree exp, int in_p, 4514 tree low, tree high) 4515{ 4516 tree etype = TREE_TYPE (exp), value; 4517 4518#ifdef HAVE_canonicalize_funcptr_for_compare 4519 /* Disable this optimization for function pointer expressions 4520 on targets that require function pointer canonicalization. */ 4521 if (HAVE_canonicalize_funcptr_for_compare 4522 && TREE_CODE (etype) == POINTER_TYPE 4523 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE) 4524 return NULL_TREE; 4525#endif 4526 4527 if (! in_p) 4528 { 4529 value = build_range_check (loc, type, exp, 1, low, high); 4530 if (value != 0) 4531 return invert_truthvalue_loc (loc, value); 4532 4533 return 0; 4534 } 4535 4536 if (low == 0 && high == 0) 4537 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp); 4538 4539 if (low == 0) 4540 return fold_build2_loc (loc, LE_EXPR, type, exp, 4541 fold_convert_loc (loc, etype, high)); 4542 4543 if (high == 0) 4544 return fold_build2_loc (loc, GE_EXPR, type, exp, 4545 fold_convert_loc (loc, etype, low)); 4546 4547 if (operand_equal_p (low, high, 0)) 4548 return fold_build2_loc (loc, EQ_EXPR, type, exp, 4549 fold_convert_loc (loc, etype, low)); 4550 4551 if (integer_zerop (low)) 4552 { 4553 if (! TYPE_UNSIGNED (etype)) 4554 { 4555 etype = unsigned_type_for (etype); 4556 high = fold_convert_loc (loc, etype, high); 4557 exp = fold_convert_loc (loc, etype, exp); 4558 } 4559 return build_range_check (loc, type, exp, 1, 0, high); 4560 } 4561 4562 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */ 4563 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST) 4564 { 4565 int prec = TYPE_PRECISION (etype); 4566 4567 if (wi::mask (prec - 1, false, prec) == high) 4568 { 4569 if (TYPE_UNSIGNED (etype)) 4570 { 4571 tree signed_etype = signed_type_for (etype); 4572 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype)) 4573 etype 4574 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0); 4575 else 4576 etype = signed_etype; 4577 exp = fold_convert_loc (loc, etype, exp); 4578 } 4579 return fold_build2_loc (loc, GT_EXPR, type, exp, 4580 build_int_cst (etype, 0)); 4581 } 4582 } 4583 4584 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low). 4585 This requires wrap-around arithmetics for the type of the expression. 4586 First make sure that arithmetics in this type is valid, then make sure 4587 that it wraps around. */ 4588 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE) 4589 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 4590 TYPE_UNSIGNED (etype)); 4591 4592 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype)) 4593 { 4594 tree utype, minv, maxv; 4595 4596 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN 4597 for the type in question, as we rely on this here. */ 4598 utype = unsigned_type_for (etype); 4599 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype)); 4600 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1, 4601 build_int_cst (TREE_TYPE (maxv), 1), 1); 4602 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype)); 4603 4604 if (integer_zerop (range_binop (NE_EXPR, integer_type_node, 4605 minv, 1, maxv, 1))) 4606 etype = utype; 4607 else 4608 return 0; 4609 } 4610 4611 high = fold_convert_loc (loc, etype, high); 4612 low = fold_convert_loc (loc, etype, low); 4613 exp = fold_convert_loc (loc, etype, exp); 4614 4615 value = const_binop (MINUS_EXPR, high, low); 4616 4617 4618 if (POINTER_TYPE_P (etype)) 4619 { 4620 if (value != 0 && !TREE_OVERFLOW (value)) 4621 { 4622 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low); 4623 return build_range_check (loc, type, 4624 fold_build_pointer_plus_loc (loc, exp, low), 4625 1, build_int_cst (etype, 0), value); 4626 } 4627 return 0; 4628 } 4629 4630 if (value != 0 && !TREE_OVERFLOW (value)) 4631 return build_range_check (loc, type, 4632 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low), 4633 1, build_int_cst (etype, 0), value); 4634 4635 return 0; 4636} 4637 4638/* Return the predecessor of VAL in its type, handling the infinite case. */ 4639 4640static tree 4641range_predecessor (tree val) 4642{ 4643 tree type = TREE_TYPE (val); 4644 4645 if (INTEGRAL_TYPE_P (type) 4646 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0)) 4647 return 0; 4648 else 4649 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, 4650 build_int_cst (TREE_TYPE (val), 1), 0); 4651} 4652 4653/* Return the successor of VAL in its type, handling the infinite case. */ 4654 4655static tree 4656range_successor (tree val) 4657{ 4658 tree type = TREE_TYPE (val); 4659 4660 if (INTEGRAL_TYPE_P (type) 4661 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0)) 4662 return 0; 4663 else 4664 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, 4665 build_int_cst (TREE_TYPE (val), 1), 0); 4666} 4667 4668/* Given two ranges, see if we can merge them into one. Return 1 if we 4669 can, 0 if we can't. Set the output range into the specified parameters. */ 4670 4671bool 4672merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0, 4673 tree high0, int in1_p, tree low1, tree high1) 4674{ 4675 int no_overlap; 4676 int subset; 4677 int temp; 4678 tree tem; 4679 int in_p; 4680 tree low, high; 4681 int lowequal = ((low0 == 0 && low1 == 0) 4682 || integer_onep (range_binop (EQ_EXPR, integer_type_node, 4683 low0, 0, low1, 0))); 4684 int highequal = ((high0 == 0 && high1 == 0) 4685 || integer_onep (range_binop (EQ_EXPR, integer_type_node, 4686 high0, 1, high1, 1))); 4687 4688 /* Make range 0 be the range that starts first, or ends last if they 4689 start at the same value. Swap them if it isn't. */ 4690 if (integer_onep (range_binop (GT_EXPR, integer_type_node, 4691 low0, 0, low1, 0)) 4692 || (lowequal 4693 && integer_onep (range_binop (GT_EXPR, integer_type_node, 4694 high1, 1, high0, 1)))) 4695 { 4696 temp = in0_p, in0_p = in1_p, in1_p = temp; 4697 tem = low0, low0 = low1, low1 = tem; 4698 tem = high0, high0 = high1, high1 = tem; 4699 } 4700 4701 /* Now flag two cases, whether the ranges are disjoint or whether the 4702 second range is totally subsumed in the first. Note that the tests 4703 below are simplified by the ones above. */ 4704 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node, 4705 high0, 1, low1, 0)); 4706 subset = integer_onep (range_binop (LE_EXPR, integer_type_node, 4707 high1, 1, high0, 1)); 4708 4709 /* We now have four cases, depending on whether we are including or 4710 excluding the two ranges. */ 4711 if (in0_p && in1_p) 4712 { 4713 /* If they don't overlap, the result is false. If the second range 4714 is a subset it is the result. Otherwise, the range is from the start 4715 of the second to the end of the first. */ 4716 if (no_overlap) 4717 in_p = 0, low = high = 0; 4718 else if (subset) 4719 in_p = 1, low = low1, high = high1; 4720 else 4721 in_p = 1, low = low1, high = high0; 4722 } 4723 4724 else if (in0_p && ! in1_p) 4725 { 4726 /* If they don't overlap, the result is the first range. If they are 4727 equal, the result is false. If the second range is a subset of the 4728 first, and the ranges begin at the same place, we go from just after 4729 the end of the second range to the end of the first. If the second 4730 range is not a subset of the first, or if it is a subset and both 4731 ranges end at the same place, the range starts at the start of the 4732 first range and ends just before the second range. 4733 Otherwise, we can't describe this as a single range. */ 4734 if (no_overlap) 4735 in_p = 1, low = low0, high = high0; 4736 else if (lowequal && highequal) 4737 in_p = 0, low = high = 0; 4738 else if (subset && lowequal) 4739 { 4740 low = range_successor (high1); 4741 high = high0; 4742 in_p = 1; 4743 if (low == 0) 4744 { 4745 /* We are in the weird situation where high0 > high1 but 4746 high1 has no successor. Punt. */ 4747 return 0; 4748 } 4749 } 4750 else if (! subset || highequal) 4751 { 4752 low = low0; 4753 high = range_predecessor (low1); 4754 in_p = 1; 4755 if (high == 0) 4756 { 4757 /* low0 < low1 but low1 has no predecessor. Punt. */ 4758 return 0; 4759 } 4760 } 4761 else 4762 return 0; 4763 } 4764 4765 else if (! in0_p && in1_p) 4766 { 4767 /* If they don't overlap, the result is the second range. If the second 4768 is a subset of the first, the result is false. Otherwise, 4769 the range starts just after the first range and ends at the 4770 end of the second. */ 4771 if (no_overlap) 4772 in_p = 1, low = low1, high = high1; 4773 else if (subset || highequal) 4774 in_p = 0, low = high = 0; 4775 else 4776 { 4777 low = range_successor (high0); 4778 high = high1; 4779 in_p = 1; 4780 if (low == 0) 4781 { 4782 /* high1 > high0 but high0 has no successor. Punt. */ 4783 return 0; 4784 } 4785 } 4786 } 4787 4788 else 4789 { 4790 /* The case where we are excluding both ranges. Here the complex case 4791 is if they don't overlap. In that case, the only time we have a 4792 range is if they are adjacent. If the second is a subset of the 4793 first, the result is the first. Otherwise, the range to exclude 4794 starts at the beginning of the first range and ends at the end of the 4795 second. */ 4796 if (no_overlap) 4797 { 4798 if (integer_onep (range_binop (EQ_EXPR, integer_type_node, 4799 range_successor (high0), 4800 1, low1, 0))) 4801 in_p = 0, low = low0, high = high1; 4802 else 4803 { 4804 /* Canonicalize - [min, x] into - [-, x]. */ 4805 if (low0 && TREE_CODE (low0) == INTEGER_CST) 4806 switch (TREE_CODE (TREE_TYPE (low0))) 4807 { 4808 case ENUMERAL_TYPE: 4809 if (TYPE_PRECISION (TREE_TYPE (low0)) 4810 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0)))) 4811 break; 4812 /* FALLTHROUGH */ 4813 case INTEGER_TYPE: 4814 if (tree_int_cst_equal (low0, 4815 TYPE_MIN_VALUE (TREE_TYPE (low0)))) 4816 low0 = 0; 4817 break; 4818 case POINTER_TYPE: 4819 if (TYPE_UNSIGNED (TREE_TYPE (low0)) 4820 && integer_zerop (low0)) 4821 low0 = 0; 4822 break; 4823 default: 4824 break; 4825 } 4826 4827 /* Canonicalize - [x, max] into - [x, -]. */ 4828 if (high1 && TREE_CODE (high1) == INTEGER_CST) 4829 switch (TREE_CODE (TREE_TYPE (high1))) 4830 { 4831 case ENUMERAL_TYPE: 4832 if (TYPE_PRECISION (TREE_TYPE (high1)) 4833 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1)))) 4834 break; 4835 /* FALLTHROUGH */ 4836 case INTEGER_TYPE: 4837 if (tree_int_cst_equal (high1, 4838 TYPE_MAX_VALUE (TREE_TYPE (high1)))) 4839 high1 = 0; 4840 break; 4841 case POINTER_TYPE: 4842 if (TYPE_UNSIGNED (TREE_TYPE (high1)) 4843 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE, 4844 high1, 1, 4845 build_int_cst (TREE_TYPE (high1), 1), 4846 1))) 4847 high1 = 0; 4848 break; 4849 default: 4850 break; 4851 } 4852 4853 /* The ranges might be also adjacent between the maximum and 4854 minimum values of the given type. For 4855 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y 4856 return + [x + 1, y - 1]. */ 4857 if (low0 == 0 && high1 == 0) 4858 { 4859 low = range_successor (high0); 4860 high = range_predecessor (low1); 4861 if (low == 0 || high == 0) 4862 return 0; 4863 4864 in_p = 1; 4865 } 4866 else 4867 return 0; 4868 } 4869 } 4870 else if (subset) 4871 in_p = 0, low = low0, high = high0; 4872 else 4873 in_p = 0, low = low0, high = high1; 4874 } 4875 4876 *pin_p = in_p, *plow = low, *phigh = high; 4877 return 1; 4878} 4879 4880 4881/* Subroutine of fold, looking inside expressions of the form 4882 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands 4883 of the COND_EXPR. This function is being used also to optimize 4884 A op B ? C : A, by reversing the comparison first. 4885 4886 Return a folded expression whose code is not a COND_EXPR 4887 anymore, or NULL_TREE if no folding opportunity is found. */ 4888 4889static tree 4890fold_cond_expr_with_comparison (location_t loc, tree type, 4891 tree arg0, tree arg1, tree arg2) 4892{ 4893 enum tree_code comp_code = TREE_CODE (arg0); 4894 tree arg00 = TREE_OPERAND (arg0, 0); 4895 tree arg01 = TREE_OPERAND (arg0, 1); 4896 tree arg1_type = TREE_TYPE (arg1); 4897 tree tem; 4898 4899 STRIP_NOPS (arg1); 4900 STRIP_NOPS (arg2); 4901 4902 /* If we have A op 0 ? A : -A, consider applying the following 4903 transformations: 4904 4905 A == 0? A : -A same as -A 4906 A != 0? A : -A same as A 4907 A >= 0? A : -A same as abs (A) 4908 A > 0? A : -A same as abs (A) 4909 A <= 0? A : -A same as -abs (A) 4910 A < 0? A : -A same as -abs (A) 4911 4912 None of these transformations work for modes with signed 4913 zeros. If A is +/-0, the first two transformations will 4914 change the sign of the result (from +0 to -0, or vice 4915 versa). The last four will fix the sign of the result, 4916 even though the original expressions could be positive or 4917 negative, depending on the sign of A. 4918 4919 Note that all these transformations are correct if A is 4920 NaN, since the two alternatives (A and -A) are also NaNs. */ 4921 if (!HONOR_SIGNED_ZEROS (element_mode (type)) 4922 && (FLOAT_TYPE_P (TREE_TYPE (arg01)) 4923 ? real_zerop (arg01) 4924 : integer_zerop (arg01)) 4925 && ((TREE_CODE (arg2) == NEGATE_EXPR 4926 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0)) 4927 /* In the case that A is of the form X-Y, '-A' (arg2) may 4928 have already been folded to Y-X, check for that. */ 4929 || (TREE_CODE (arg1) == MINUS_EXPR 4930 && TREE_CODE (arg2) == MINUS_EXPR 4931 && operand_equal_p (TREE_OPERAND (arg1, 0), 4932 TREE_OPERAND (arg2, 1), 0) 4933 && operand_equal_p (TREE_OPERAND (arg1, 1), 4934 TREE_OPERAND (arg2, 0), 0)))) 4935 switch (comp_code) 4936 { 4937 case EQ_EXPR: 4938 case UNEQ_EXPR: 4939 tem = fold_convert_loc (loc, arg1_type, arg1); 4940 return pedantic_non_lvalue_loc (loc, 4941 fold_convert_loc (loc, type, 4942 negate_expr (tem))); 4943 case NE_EXPR: 4944 case LTGT_EXPR: 4945 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 4946 case UNGE_EXPR: 4947 case UNGT_EXPR: 4948 if (flag_trapping_math) 4949 break; 4950 /* Fall through. */ 4951 case GE_EXPR: 4952 case GT_EXPR: 4953 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) 4954 break; 4955 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); 4956 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 4957 case UNLE_EXPR: 4958 case UNLT_EXPR: 4959 if (flag_trapping_math) 4960 break; 4961 case LE_EXPR: 4962 case LT_EXPR: 4963 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) 4964 break; 4965 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); 4966 return negate_expr (fold_convert_loc (loc, type, tem)); 4967 default: 4968 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); 4969 break; 4970 } 4971 4972 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise 4973 A == 0 ? A : 0 is always 0 unless A is -0. Note that 4974 both transformations are correct when A is NaN: A != 0 4975 is then true, and A == 0 is false. */ 4976 4977 if (!HONOR_SIGNED_ZEROS (element_mode (type)) 4978 && integer_zerop (arg01) && integer_zerop (arg2)) 4979 { 4980 if (comp_code == NE_EXPR) 4981 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 4982 else if (comp_code == EQ_EXPR) 4983 return build_zero_cst (type); 4984 } 4985 4986 /* Try some transformations of A op B ? A : B. 4987 4988 A == B? A : B same as B 4989 A != B? A : B same as A 4990 A >= B? A : B same as max (A, B) 4991 A > B? A : B same as max (B, A) 4992 A <= B? A : B same as min (A, B) 4993 A < B? A : B same as min (B, A) 4994 4995 As above, these transformations don't work in the presence 4996 of signed zeros. For example, if A and B are zeros of 4997 opposite sign, the first two transformations will change 4998 the sign of the result. In the last four, the original 4999 expressions give different results for (A=+0, B=-0) and 5000 (A=-0, B=+0), but the transformed expressions do not. 5001 5002 The first two transformations are correct if either A or B 5003 is a NaN. In the first transformation, the condition will 5004 be false, and B will indeed be chosen. In the case of the 5005 second transformation, the condition A != B will be true, 5006 and A will be chosen. 5007 5008 The conversions to max() and min() are not correct if B is 5009 a number and A is not. The conditions in the original 5010 expressions will be false, so all four give B. The min() 5011 and max() versions would give a NaN instead. */ 5012 if (!HONOR_SIGNED_ZEROS (element_mode (type)) 5013 && operand_equal_for_comparison_p (arg01, arg2, arg00) 5014 /* Avoid these transformations if the COND_EXPR may be used 5015 as an lvalue in the C++ front-end. PR c++/19199. */ 5016 && (in_gimple_form 5017 || VECTOR_TYPE_P (type) 5018 || (! lang_GNU_CXX () 5019 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0) 5020 || ! maybe_lvalue_p (arg1) 5021 || ! maybe_lvalue_p (arg2))) 5022 { 5023 tree comp_op0 = arg00; 5024 tree comp_op1 = arg01; 5025 tree comp_type = TREE_TYPE (comp_op0); 5026 5027 /* Avoid adding NOP_EXPRs in case this is an lvalue. */ 5028 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type)) 5029 { 5030 comp_type = type; 5031 comp_op0 = arg1; 5032 comp_op1 = arg2; 5033 } 5034 5035 switch (comp_code) 5036 { 5037 case EQ_EXPR: 5038 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2)); 5039 case NE_EXPR: 5040 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 5041 case LE_EXPR: 5042 case LT_EXPR: 5043 case UNLE_EXPR: 5044 case UNLT_EXPR: 5045 /* In C++ a ?: expression can be an lvalue, so put the 5046 operand which will be used if they are equal first 5047 so that we can convert this back to the 5048 corresponding COND_EXPR. */ 5049 if (!HONOR_NANS (arg1)) 5050 { 5051 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); 5052 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); 5053 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR) 5054 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1) 5055 : fold_build2_loc (loc, MIN_EXPR, comp_type, 5056 comp_op1, comp_op0); 5057 return pedantic_non_lvalue_loc (loc, 5058 fold_convert_loc (loc, type, tem)); 5059 } 5060 break; 5061 case GE_EXPR: 5062 case GT_EXPR: 5063 case UNGE_EXPR: 5064 case UNGT_EXPR: 5065 if (!HONOR_NANS (arg1)) 5066 { 5067 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); 5068 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); 5069 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR) 5070 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1) 5071 : fold_build2_loc (loc, MAX_EXPR, comp_type, 5072 comp_op1, comp_op0); 5073 return pedantic_non_lvalue_loc (loc, 5074 fold_convert_loc (loc, type, tem)); 5075 } 5076 break; 5077 case UNEQ_EXPR: 5078 if (!HONOR_NANS (arg1)) 5079 return pedantic_non_lvalue_loc (loc, 5080 fold_convert_loc (loc, type, arg2)); 5081 break; 5082 case LTGT_EXPR: 5083 if (!HONOR_NANS (arg1)) 5084 return pedantic_non_lvalue_loc (loc, 5085 fold_convert_loc (loc, type, arg1)); 5086 break; 5087 default: 5088 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); 5089 break; 5090 } 5091 } 5092 5093 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers, 5094 we might still be able to simplify this. For example, 5095 if C1 is one less or one more than C2, this might have started 5096 out as a MIN or MAX and been transformed by this function. 5097 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */ 5098 5099 if (INTEGRAL_TYPE_P (type) 5100 && TREE_CODE (arg01) == INTEGER_CST 5101 && TREE_CODE (arg2) == INTEGER_CST) 5102 switch (comp_code) 5103 { 5104 case EQ_EXPR: 5105 if (TREE_CODE (arg1) == INTEGER_CST) 5106 break; 5107 /* We can replace A with C1 in this case. */ 5108 arg1 = fold_convert_loc (loc, type, arg01); 5109 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2); 5110 5111 case LT_EXPR: 5112 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for 5113 MIN_EXPR, to preserve the signedness of the comparison. */ 5114 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 5115 OEP_ONLY_CONST) 5116 && operand_equal_p (arg01, 5117 const_binop (PLUS_EXPR, arg2, 5118 build_int_cst (type, 1)), 5119 OEP_ONLY_CONST)) 5120 { 5121 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, 5122 fold_convert_loc (loc, TREE_TYPE (arg00), 5123 arg2)); 5124 return pedantic_non_lvalue_loc (loc, 5125 fold_convert_loc (loc, type, tem)); 5126 } 5127 break; 5128 5129 case LE_EXPR: 5130 /* If C1 is C2 - 1, this is min(A, C2), with the same care 5131 as above. */ 5132 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 5133 OEP_ONLY_CONST) 5134 && operand_equal_p (arg01, 5135 const_binop (MINUS_EXPR, arg2, 5136 build_int_cst (type, 1)), 5137 OEP_ONLY_CONST)) 5138 { 5139 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, 5140 fold_convert_loc (loc, TREE_TYPE (arg00), 5141 arg2)); 5142 return pedantic_non_lvalue_loc (loc, 5143 fold_convert_loc (loc, type, tem)); 5144 } 5145 break; 5146 5147 case GT_EXPR: 5148 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for 5149 MAX_EXPR, to preserve the signedness of the comparison. */ 5150 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 5151 OEP_ONLY_CONST) 5152 && operand_equal_p (arg01, 5153 const_binop (MINUS_EXPR, arg2, 5154 build_int_cst (type, 1)), 5155 OEP_ONLY_CONST)) 5156 { 5157 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, 5158 fold_convert_loc (loc, TREE_TYPE (arg00), 5159 arg2)); 5160 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 5161 } 5162 break; 5163 5164 case GE_EXPR: 5165 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */ 5166 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 5167 OEP_ONLY_CONST) 5168 && operand_equal_p (arg01, 5169 const_binop (PLUS_EXPR, arg2, 5170 build_int_cst (type, 1)), 5171 OEP_ONLY_CONST)) 5172 { 5173 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, 5174 fold_convert_loc (loc, TREE_TYPE (arg00), 5175 arg2)); 5176 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 5177 } 5178 break; 5179 case NE_EXPR: 5180 break; 5181 default: 5182 gcc_unreachable (); 5183 } 5184 5185 return NULL_TREE; 5186} 5187 5188 5189 5190#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT 5191#define LOGICAL_OP_NON_SHORT_CIRCUIT \ 5192 (BRANCH_COST (optimize_function_for_speed_p (cfun), \ 5193 false) >= 2) 5194#endif 5195 5196/* EXP is some logical combination of boolean tests. See if we can 5197 merge it into some range test. Return the new tree if so. */ 5198 5199static tree 5200fold_range_test (location_t loc, enum tree_code code, tree type, 5201 tree op0, tree op1) 5202{ 5203 int or_op = (code == TRUTH_ORIF_EXPR 5204 || code == TRUTH_OR_EXPR); 5205 int in0_p, in1_p, in_p; 5206 tree low0, low1, low, high0, high1, high; 5207 bool strict_overflow_p = false; 5208 tree tem, lhs, rhs; 5209 const char * const warnmsg = G_("assuming signed overflow does not occur " 5210 "when simplifying range test"); 5211 5212 if (!INTEGRAL_TYPE_P (type)) 5213 return 0; 5214 5215 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p); 5216 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p); 5217 5218 /* If this is an OR operation, invert both sides; we will invert 5219 again at the end. */ 5220 if (or_op) 5221 in0_p = ! in0_p, in1_p = ! in1_p; 5222 5223 /* If both expressions are the same, if we can merge the ranges, and we 5224 can build the range test, return it or it inverted. If one of the 5225 ranges is always true or always false, consider it to be the same 5226 expression as the other. */ 5227 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0)) 5228 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0, 5229 in1_p, low1, high1) 5230 && 0 != (tem = (build_range_check (loc, type, 5231 lhs != 0 ? lhs 5232 : rhs != 0 ? rhs : integer_zero_node, 5233 in_p, low, high)))) 5234 { 5235 if (strict_overflow_p) 5236 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 5237 return or_op ? invert_truthvalue_loc (loc, tem) : tem; 5238 } 5239 5240 /* On machines where the branch cost is expensive, if this is a 5241 short-circuited branch and the underlying object on both sides 5242 is the same, make a non-short-circuit operation. */ 5243 else if (LOGICAL_OP_NON_SHORT_CIRCUIT 5244 && lhs != 0 && rhs != 0 5245 && (code == TRUTH_ANDIF_EXPR 5246 || code == TRUTH_ORIF_EXPR) 5247 && operand_equal_p (lhs, rhs, 0)) 5248 { 5249 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR 5250 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in 5251 which cases we can't do this. */ 5252 if (simple_operand_p (lhs)) 5253 return build2_loc (loc, code == TRUTH_ANDIF_EXPR 5254 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, 5255 type, op0, op1); 5256 5257 else if (!lang_hooks.decls.global_bindings_p () 5258 && !CONTAINS_PLACEHOLDER_P (lhs)) 5259 { 5260 tree common = save_expr (lhs); 5261 5262 if (0 != (lhs = build_range_check (loc, type, common, 5263 or_op ? ! in0_p : in0_p, 5264 low0, high0)) 5265 && (0 != (rhs = build_range_check (loc, type, common, 5266 or_op ? ! in1_p : in1_p, 5267 low1, high1)))) 5268 { 5269 if (strict_overflow_p) 5270 fold_overflow_warning (warnmsg, 5271 WARN_STRICT_OVERFLOW_COMPARISON); 5272 return build2_loc (loc, code == TRUTH_ANDIF_EXPR 5273 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, 5274 type, lhs, rhs); 5275 } 5276 } 5277 } 5278 5279 return 0; 5280} 5281 5282/* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P 5283 bit value. Arrange things so the extra bits will be set to zero if and 5284 only if C is signed-extended to its full width. If MASK is nonzero, 5285 it is an INTEGER_CST that should be AND'ed with the extra bits. */ 5286 5287static tree 5288unextend (tree c, int p, int unsignedp, tree mask) 5289{ 5290 tree type = TREE_TYPE (c); 5291 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type)); 5292 tree temp; 5293 5294 if (p == modesize || unsignedp) 5295 return c; 5296 5297 /* We work by getting just the sign bit into the low-order bit, then 5298 into the high-order bit, then sign-extend. We then XOR that value 5299 with C. */ 5300 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1)); 5301 5302 /* We must use a signed type in order to get an arithmetic right shift. 5303 However, we must also avoid introducing accidental overflows, so that 5304 a subsequent call to integer_zerop will work. Hence we must 5305 do the type conversion here. At this point, the constant is either 5306 zero or one, and the conversion to a signed type can never overflow. 5307 We could get an overflow if this conversion is done anywhere else. */ 5308 if (TYPE_UNSIGNED (type)) 5309 temp = fold_convert (signed_type_for (type), temp); 5310 5311 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1)); 5312 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1)); 5313 if (mask != 0) 5314 temp = const_binop (BIT_AND_EXPR, temp, 5315 fold_convert (TREE_TYPE (c), mask)); 5316 /* If necessary, convert the type back to match the type of C. */ 5317 if (TYPE_UNSIGNED (type)) 5318 temp = fold_convert (type, temp); 5319 5320 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp)); 5321} 5322 5323/* For an expression that has the form 5324 (A && B) || ~B 5325 or 5326 (A || B) && ~B, 5327 we can drop one of the inner expressions and simplify to 5328 A || ~B 5329 or 5330 A && ~B 5331 LOC is the location of the resulting expression. OP is the inner 5332 logical operation; the left-hand side in the examples above, while CMPOP 5333 is the right-hand side. RHS_ONLY is used to prevent us from accidentally 5334 removing a condition that guards another, as in 5335 (A != NULL && A->...) || A == NULL 5336 which we must not transform. If RHS_ONLY is true, only eliminate the 5337 right-most operand of the inner logical operation. */ 5338 5339static tree 5340merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop, 5341 bool rhs_only) 5342{ 5343 tree type = TREE_TYPE (cmpop); 5344 enum tree_code code = TREE_CODE (cmpop); 5345 enum tree_code truthop_code = TREE_CODE (op); 5346 tree lhs = TREE_OPERAND (op, 0); 5347 tree rhs = TREE_OPERAND (op, 1); 5348 tree orig_lhs = lhs, orig_rhs = rhs; 5349 enum tree_code rhs_code = TREE_CODE (rhs); 5350 enum tree_code lhs_code = TREE_CODE (lhs); 5351 enum tree_code inv_code; 5352 5353 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop)) 5354 return NULL_TREE; 5355 5356 if (TREE_CODE_CLASS (code) != tcc_comparison) 5357 return NULL_TREE; 5358 5359 if (rhs_code == truthop_code) 5360 { 5361 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only); 5362 if (newrhs != NULL_TREE) 5363 { 5364 rhs = newrhs; 5365 rhs_code = TREE_CODE (rhs); 5366 } 5367 } 5368 if (lhs_code == truthop_code && !rhs_only) 5369 { 5370 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false); 5371 if (newlhs != NULL_TREE) 5372 { 5373 lhs = newlhs; 5374 lhs_code = TREE_CODE (lhs); 5375 } 5376 } 5377 5378 inv_code = invert_tree_comparison (code, HONOR_NANS (type)); 5379 if (inv_code == rhs_code 5380 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0) 5381 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0)) 5382 return lhs; 5383 if (!rhs_only && inv_code == lhs_code 5384 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0) 5385 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0)) 5386 return rhs; 5387 if (rhs != orig_rhs || lhs != orig_lhs) 5388 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop), 5389 lhs, rhs); 5390 return NULL_TREE; 5391} 5392 5393/* Find ways of folding logical expressions of LHS and RHS: 5394 Try to merge two comparisons to the same innermost item. 5395 Look for range tests like "ch >= '0' && ch <= '9'". 5396 Look for combinations of simple terms on machines with expensive branches 5397 and evaluate the RHS unconditionally. 5398 5399 For example, if we have p->a == 2 && p->b == 4 and we can make an 5400 object large enough to span both A and B, we can do this with a comparison 5401 against the object ANDed with the a mask. 5402 5403 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking 5404 operations to do this with one comparison. 5405 5406 We check for both normal comparisons and the BIT_AND_EXPRs made this by 5407 function and the one above. 5408 5409 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR, 5410 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR. 5411 5412 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its 5413 two operands. 5414 5415 We return the simplified tree or 0 if no optimization is possible. */ 5416 5417static tree 5418fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type, 5419 tree lhs, tree rhs) 5420{ 5421 /* If this is the "or" of two comparisons, we can do something if 5422 the comparisons are NE_EXPR. If this is the "and", we can do something 5423 if the comparisons are EQ_EXPR. I.e., 5424 (a->b == 2 && a->c == 4) can become (a->new == NEW). 5425 5426 WANTED_CODE is this operation code. For single bit fields, we can 5427 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong" 5428 comparison for one-bit fields. */ 5429 5430 enum tree_code wanted_code; 5431 enum tree_code lcode, rcode; 5432 tree ll_arg, lr_arg, rl_arg, rr_arg; 5433 tree ll_inner, lr_inner, rl_inner, rr_inner; 5434 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos; 5435 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos; 5436 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos; 5437 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos; 5438 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp; 5439 machine_mode ll_mode, lr_mode, rl_mode, rr_mode; 5440 machine_mode lnmode, rnmode; 5441 tree ll_mask, lr_mask, rl_mask, rr_mask; 5442 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask; 5443 tree l_const, r_const; 5444 tree lntype, rntype, result; 5445 HOST_WIDE_INT first_bit, end_bit; 5446 int volatilep; 5447 5448 /* Start by getting the comparison codes. Fail if anything is volatile. 5449 If one operand is a BIT_AND_EXPR with the constant one, treat it as if 5450 it were surrounded with a NE_EXPR. */ 5451 5452 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs)) 5453 return 0; 5454 5455 lcode = TREE_CODE (lhs); 5456 rcode = TREE_CODE (rhs); 5457 5458 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1))) 5459 { 5460 lhs = build2 (NE_EXPR, truth_type, lhs, 5461 build_int_cst (TREE_TYPE (lhs), 0)); 5462 lcode = NE_EXPR; 5463 } 5464 5465 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1))) 5466 { 5467 rhs = build2 (NE_EXPR, truth_type, rhs, 5468 build_int_cst (TREE_TYPE (rhs), 0)); 5469 rcode = NE_EXPR; 5470 } 5471 5472 if (TREE_CODE_CLASS (lcode) != tcc_comparison 5473 || TREE_CODE_CLASS (rcode) != tcc_comparison) 5474 return 0; 5475 5476 ll_arg = TREE_OPERAND (lhs, 0); 5477 lr_arg = TREE_OPERAND (lhs, 1); 5478 rl_arg = TREE_OPERAND (rhs, 0); 5479 rr_arg = TREE_OPERAND (rhs, 1); 5480 5481 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */ 5482 if (simple_operand_p (ll_arg) 5483 && simple_operand_p (lr_arg)) 5484 { 5485 if (operand_equal_p (ll_arg, rl_arg, 0) 5486 && operand_equal_p (lr_arg, rr_arg, 0)) 5487 { 5488 result = combine_comparisons (loc, code, lcode, rcode, 5489 truth_type, ll_arg, lr_arg); 5490 if (result) 5491 return result; 5492 } 5493 else if (operand_equal_p (ll_arg, rr_arg, 0) 5494 && operand_equal_p (lr_arg, rl_arg, 0)) 5495 { 5496 result = combine_comparisons (loc, code, lcode, 5497 swap_tree_comparison (rcode), 5498 truth_type, ll_arg, lr_arg); 5499 if (result) 5500 return result; 5501 } 5502 } 5503 5504 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR) 5505 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR); 5506 5507 /* If the RHS can be evaluated unconditionally and its operands are 5508 simple, it wins to evaluate the RHS unconditionally on machines 5509 with expensive branches. In this case, this isn't a comparison 5510 that can be merged. */ 5511 5512 if (BRANCH_COST (optimize_function_for_speed_p (cfun), 5513 false) >= 2 5514 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg)) 5515 && simple_operand_p (rl_arg) 5516 && simple_operand_p (rr_arg)) 5517 { 5518 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */ 5519 if (code == TRUTH_OR_EXPR 5520 && lcode == NE_EXPR && integer_zerop (lr_arg) 5521 && rcode == NE_EXPR && integer_zerop (rr_arg) 5522 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg) 5523 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))) 5524 return build2_loc (loc, NE_EXPR, truth_type, 5525 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), 5526 ll_arg, rl_arg), 5527 build_int_cst (TREE_TYPE (ll_arg), 0)); 5528 5529 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */ 5530 if (code == TRUTH_AND_EXPR 5531 && lcode == EQ_EXPR && integer_zerop (lr_arg) 5532 && rcode == EQ_EXPR && integer_zerop (rr_arg) 5533 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg) 5534 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))) 5535 return build2_loc (loc, EQ_EXPR, truth_type, 5536 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), 5537 ll_arg, rl_arg), 5538 build_int_cst (TREE_TYPE (ll_arg), 0)); 5539 } 5540 5541 /* See if the comparisons can be merged. Then get all the parameters for 5542 each side. */ 5543 5544 if ((lcode != EQ_EXPR && lcode != NE_EXPR) 5545 || (rcode != EQ_EXPR && rcode != NE_EXPR)) 5546 return 0; 5547 5548 volatilep = 0; 5549 ll_inner = decode_field_reference (loc, ll_arg, 5550 &ll_bitsize, &ll_bitpos, &ll_mode, 5551 &ll_unsignedp, &volatilep, &ll_mask, 5552 &ll_and_mask); 5553 lr_inner = decode_field_reference (loc, lr_arg, 5554 &lr_bitsize, &lr_bitpos, &lr_mode, 5555 &lr_unsignedp, &volatilep, &lr_mask, 5556 &lr_and_mask); 5557 rl_inner = decode_field_reference (loc, rl_arg, 5558 &rl_bitsize, &rl_bitpos, &rl_mode, 5559 &rl_unsignedp, &volatilep, &rl_mask, 5560 &rl_and_mask); 5561 rr_inner = decode_field_reference (loc, rr_arg, 5562 &rr_bitsize, &rr_bitpos, &rr_mode, 5563 &rr_unsignedp, &volatilep, &rr_mask, 5564 &rr_and_mask); 5565 5566 /* It must be true that the inner operation on the lhs of each 5567 comparison must be the same if we are to be able to do anything. 5568 Then see if we have constants. If not, the same must be true for 5569 the rhs's. */ 5570 if (volatilep || ll_inner == 0 || rl_inner == 0 5571 || ! operand_equal_p (ll_inner, rl_inner, 0)) 5572 return 0; 5573 5574 if (TREE_CODE (lr_arg) == INTEGER_CST 5575 && TREE_CODE (rr_arg) == INTEGER_CST) 5576 l_const = lr_arg, r_const = rr_arg; 5577 else if (lr_inner == 0 || rr_inner == 0 5578 || ! operand_equal_p (lr_inner, rr_inner, 0)) 5579 return 0; 5580 else 5581 l_const = r_const = 0; 5582 5583 /* If either comparison code is not correct for our logical operation, 5584 fail. However, we can convert a one-bit comparison against zero into 5585 the opposite comparison against that bit being set in the field. */ 5586 5587 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR); 5588 if (lcode != wanted_code) 5589 { 5590 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask)) 5591 { 5592 /* Make the left operand unsigned, since we are only interested 5593 in the value of one bit. Otherwise we are doing the wrong 5594 thing below. */ 5595 ll_unsignedp = 1; 5596 l_const = ll_mask; 5597 } 5598 else 5599 return 0; 5600 } 5601 5602 /* This is analogous to the code for l_const above. */ 5603 if (rcode != wanted_code) 5604 { 5605 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask)) 5606 { 5607 rl_unsignedp = 1; 5608 r_const = rl_mask; 5609 } 5610 else 5611 return 0; 5612 } 5613 5614 /* See if we can find a mode that contains both fields being compared on 5615 the left. If we can't, fail. Otherwise, update all constants and masks 5616 to be relative to a field of that size. */ 5617 first_bit = MIN (ll_bitpos, rl_bitpos); 5618 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize); 5619 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0, 5620 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode, 5621 volatilep); 5622 if (lnmode == VOIDmode) 5623 return 0; 5624 5625 lnbitsize = GET_MODE_BITSIZE (lnmode); 5626 lnbitpos = first_bit & ~ (lnbitsize - 1); 5627 lntype = lang_hooks.types.type_for_size (lnbitsize, 1); 5628 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos; 5629 5630 if (BYTES_BIG_ENDIAN) 5631 { 5632 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize; 5633 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize; 5634 } 5635 5636 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask), 5637 size_int (xll_bitpos)); 5638 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask), 5639 size_int (xrl_bitpos)); 5640 5641 if (l_const) 5642 { 5643 l_const = fold_convert_loc (loc, lntype, l_const); 5644 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask); 5645 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos)); 5646 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const, 5647 fold_build1_loc (loc, BIT_NOT_EXPR, 5648 lntype, ll_mask)))) 5649 { 5650 warning (0, "comparison is always %d", wanted_code == NE_EXPR); 5651 5652 return constant_boolean_node (wanted_code == NE_EXPR, truth_type); 5653 } 5654 } 5655 if (r_const) 5656 { 5657 r_const = fold_convert_loc (loc, lntype, r_const); 5658 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask); 5659 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos)); 5660 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const, 5661 fold_build1_loc (loc, BIT_NOT_EXPR, 5662 lntype, rl_mask)))) 5663 { 5664 warning (0, "comparison is always %d", wanted_code == NE_EXPR); 5665 5666 return constant_boolean_node (wanted_code == NE_EXPR, truth_type); 5667 } 5668 } 5669 5670 /* If the right sides are not constant, do the same for it. Also, 5671 disallow this optimization if a size or signedness mismatch occurs 5672 between the left and right sides. */ 5673 if (l_const == 0) 5674 { 5675 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize 5676 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp 5677 /* Make sure the two fields on the right 5678 correspond to the left without being swapped. */ 5679 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos) 5680 return 0; 5681 5682 first_bit = MIN (lr_bitpos, rr_bitpos); 5683 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize); 5684 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0, 5685 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode, 5686 volatilep); 5687 if (rnmode == VOIDmode) 5688 return 0; 5689 5690 rnbitsize = GET_MODE_BITSIZE (rnmode); 5691 rnbitpos = first_bit & ~ (rnbitsize - 1); 5692 rntype = lang_hooks.types.type_for_size (rnbitsize, 1); 5693 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos; 5694 5695 if (BYTES_BIG_ENDIAN) 5696 { 5697 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize; 5698 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize; 5699 } 5700 5701 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, 5702 rntype, lr_mask), 5703 size_int (xlr_bitpos)); 5704 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, 5705 rntype, rr_mask), 5706 size_int (xrr_bitpos)); 5707 5708 /* Make a mask that corresponds to both fields being compared. 5709 Do this for both items being compared. If the operands are the 5710 same size and the bits being compared are in the same position 5711 then we can do this by masking both and comparing the masked 5712 results. */ 5713 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); 5714 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask); 5715 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos) 5716 { 5717 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, 5718 ll_unsignedp || rl_unsignedp); 5719 if (! all_ones_mask_p (ll_mask, lnbitsize)) 5720 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask); 5721 5722 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos, 5723 lr_unsignedp || rr_unsignedp); 5724 if (! all_ones_mask_p (lr_mask, rnbitsize)) 5725 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask); 5726 5727 return build2_loc (loc, wanted_code, truth_type, lhs, rhs); 5728 } 5729 5730 /* There is still another way we can do something: If both pairs of 5731 fields being compared are adjacent, we may be able to make a wider 5732 field containing them both. 5733 5734 Note that we still must mask the lhs/rhs expressions. Furthermore, 5735 the mask must be shifted to account for the shift done by 5736 make_bit_field_ref. */ 5737 if ((ll_bitsize + ll_bitpos == rl_bitpos 5738 && lr_bitsize + lr_bitpos == rr_bitpos) 5739 || (ll_bitpos == rl_bitpos + rl_bitsize 5740 && lr_bitpos == rr_bitpos + rr_bitsize)) 5741 { 5742 tree type; 5743 5744 lhs = make_bit_field_ref (loc, ll_inner, lntype, 5745 ll_bitsize + rl_bitsize, 5746 MIN (ll_bitpos, rl_bitpos), ll_unsignedp); 5747 rhs = make_bit_field_ref (loc, lr_inner, rntype, 5748 lr_bitsize + rr_bitsize, 5749 MIN (lr_bitpos, rr_bitpos), lr_unsignedp); 5750 5751 ll_mask = const_binop (RSHIFT_EXPR, ll_mask, 5752 size_int (MIN (xll_bitpos, xrl_bitpos))); 5753 lr_mask = const_binop (RSHIFT_EXPR, lr_mask, 5754 size_int (MIN (xlr_bitpos, xrr_bitpos))); 5755 5756 /* Convert to the smaller type before masking out unwanted bits. */ 5757 type = lntype; 5758 if (lntype != rntype) 5759 { 5760 if (lnbitsize > rnbitsize) 5761 { 5762 lhs = fold_convert_loc (loc, rntype, lhs); 5763 ll_mask = fold_convert_loc (loc, rntype, ll_mask); 5764 type = rntype; 5765 } 5766 else if (lnbitsize < rnbitsize) 5767 { 5768 rhs = fold_convert_loc (loc, lntype, rhs); 5769 lr_mask = fold_convert_loc (loc, lntype, lr_mask); 5770 type = lntype; 5771 } 5772 } 5773 5774 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize)) 5775 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask); 5776 5777 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize)) 5778 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask); 5779 5780 return build2_loc (loc, wanted_code, truth_type, lhs, rhs); 5781 } 5782 5783 return 0; 5784 } 5785 5786 /* Handle the case of comparisons with constants. If there is something in 5787 common between the masks, those bits of the constants must be the same. 5788 If not, the condition is always false. Test for this to avoid generating 5789 incorrect code below. */ 5790 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask); 5791 if (! integer_zerop (result) 5792 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const), 5793 const_binop (BIT_AND_EXPR, result, r_const)) != 1) 5794 { 5795 if (wanted_code == NE_EXPR) 5796 { 5797 warning (0, "%<or%> of unmatched not-equal tests is always 1"); 5798 return constant_boolean_node (true, truth_type); 5799 } 5800 else 5801 { 5802 warning (0, "%<and%> of mutually exclusive equal-tests is always 0"); 5803 return constant_boolean_node (false, truth_type); 5804 } 5805 } 5806 5807 /* Construct the expression we will return. First get the component 5808 reference we will make. Unless the mask is all ones the width of 5809 that field, perform the mask operation. Then compare with the 5810 merged constant. */ 5811 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, 5812 ll_unsignedp || rl_unsignedp); 5813 5814 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); 5815 if (! all_ones_mask_p (ll_mask, lnbitsize)) 5816 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask); 5817 5818 return build2_loc (loc, wanted_code, truth_type, result, 5819 const_binop (BIT_IOR_EXPR, l_const, r_const)); 5820} 5821 5822/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a 5823 constant. */ 5824 5825static tree 5826optimize_minmax_comparison (location_t loc, enum tree_code code, tree type, 5827 tree op0, tree op1) 5828{ 5829 tree arg0 = op0; 5830 enum tree_code op_code; 5831 tree comp_const; 5832 tree minmax_const; 5833 int consts_equal, consts_lt; 5834 tree inner; 5835 5836 STRIP_SIGN_NOPS (arg0); 5837 5838 op_code = TREE_CODE (arg0); 5839 minmax_const = TREE_OPERAND (arg0, 1); 5840 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1); 5841 consts_equal = tree_int_cst_equal (minmax_const, comp_const); 5842 consts_lt = tree_int_cst_lt (minmax_const, comp_const); 5843 inner = TREE_OPERAND (arg0, 0); 5844 5845 /* If something does not permit us to optimize, return the original tree. */ 5846 if ((op_code != MIN_EXPR && op_code != MAX_EXPR) 5847 || TREE_CODE (comp_const) != INTEGER_CST 5848 || TREE_OVERFLOW (comp_const) 5849 || TREE_CODE (minmax_const) != INTEGER_CST 5850 || TREE_OVERFLOW (minmax_const)) 5851 return NULL_TREE; 5852 5853 /* Now handle all the various comparison codes. We only handle EQ_EXPR 5854 and GT_EXPR, doing the rest with recursive calls using logical 5855 simplifications. */ 5856 switch (code) 5857 { 5858 case NE_EXPR: case LT_EXPR: case LE_EXPR: 5859 { 5860 tree tem 5861 = optimize_minmax_comparison (loc, 5862 invert_tree_comparison (code, false), 5863 type, op0, op1); 5864 if (tem) 5865 return invert_truthvalue_loc (loc, tem); 5866 return NULL_TREE; 5867 } 5868 5869 case GE_EXPR: 5870 return 5871 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, 5872 optimize_minmax_comparison 5873 (loc, EQ_EXPR, type, arg0, comp_const), 5874 optimize_minmax_comparison 5875 (loc, GT_EXPR, type, arg0, comp_const)); 5876 5877 case EQ_EXPR: 5878 if (op_code == MAX_EXPR && consts_equal) 5879 /* MAX (X, 0) == 0 -> X <= 0 */ 5880 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const); 5881 5882 else if (op_code == MAX_EXPR && consts_lt) 5883 /* MAX (X, 0) == 5 -> X == 5 */ 5884 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const); 5885 5886 else if (op_code == MAX_EXPR) 5887 /* MAX (X, 0) == -1 -> false */ 5888 return omit_one_operand_loc (loc, type, integer_zero_node, inner); 5889 5890 else if (consts_equal) 5891 /* MIN (X, 0) == 0 -> X >= 0 */ 5892 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const); 5893 5894 else if (consts_lt) 5895 /* MIN (X, 0) == 5 -> false */ 5896 return omit_one_operand_loc (loc, type, integer_zero_node, inner); 5897 5898 else 5899 /* MIN (X, 0) == -1 -> X == -1 */ 5900 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const); 5901 5902 case GT_EXPR: 5903 if (op_code == MAX_EXPR && (consts_equal || consts_lt)) 5904 /* MAX (X, 0) > 0 -> X > 0 5905 MAX (X, 0) > 5 -> X > 5 */ 5906 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const); 5907 5908 else if (op_code == MAX_EXPR) 5909 /* MAX (X, 0) > -1 -> true */ 5910 return omit_one_operand_loc (loc, type, integer_one_node, inner); 5911 5912 else if (op_code == MIN_EXPR && (consts_equal || consts_lt)) 5913 /* MIN (X, 0) > 0 -> false 5914 MIN (X, 0) > 5 -> false */ 5915 return omit_one_operand_loc (loc, type, integer_zero_node, inner); 5916 5917 else 5918 /* MIN (X, 0) > -1 -> X > -1 */ 5919 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const); 5920 5921 default: 5922 return NULL_TREE; 5923 } 5924} 5925 5926/* T is an integer expression that is being multiplied, divided, or taken a 5927 modulus (CODE says which and what kind of divide or modulus) by a 5928 constant C. See if we can eliminate that operation by folding it with 5929 other operations already in T. WIDE_TYPE, if non-null, is a type that 5930 should be used for the computation if wider than our type. 5931 5932 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return 5933 (X * 2) + (Y * 4). We must, however, be assured that either the original 5934 expression would not overflow or that overflow is undefined for the type 5935 in the language in question. 5936 5937 If we return a non-null expression, it is an equivalent form of the 5938 original computation, but need not be in the original type. 5939 5940 We set *STRICT_OVERFLOW_P to true if the return values depends on 5941 signed overflow being undefined. Otherwise we do not change 5942 *STRICT_OVERFLOW_P. */ 5943 5944static tree 5945extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type, 5946 bool *strict_overflow_p) 5947{ 5948 /* To avoid exponential search depth, refuse to allow recursion past 5949 three levels. Beyond that (1) it's highly unlikely that we'll find 5950 something interesting and (2) we've probably processed it before 5951 when we built the inner expression. */ 5952 5953 static int depth; 5954 tree ret; 5955 5956 if (depth > 3) 5957 return NULL; 5958 5959 depth++; 5960 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p); 5961 depth--; 5962 5963 return ret; 5964} 5965 5966static tree 5967extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, 5968 bool *strict_overflow_p) 5969{ 5970 tree type = TREE_TYPE (t); 5971 enum tree_code tcode = TREE_CODE (t); 5972 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type)) 5973 > GET_MODE_SIZE (TYPE_MODE (type))) 5974 ? wide_type : type); 5975 tree t1, t2; 5976 int same_p = tcode == code; 5977 tree op0 = NULL_TREE, op1 = NULL_TREE; 5978 bool sub_strict_overflow_p; 5979 5980 /* Don't deal with constants of zero here; they confuse the code below. */ 5981 if (integer_zerop (c)) 5982 return NULL_TREE; 5983 5984 if (TREE_CODE_CLASS (tcode) == tcc_unary) 5985 op0 = TREE_OPERAND (t, 0); 5986 5987 if (TREE_CODE_CLASS (tcode) == tcc_binary) 5988 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1); 5989 5990 /* Note that we need not handle conditional operations here since fold 5991 already handles those cases. So just do arithmetic here. */ 5992 switch (tcode) 5993 { 5994 case INTEGER_CST: 5995 /* For a constant, we can always simplify if we are a multiply 5996 or (for divide and modulus) if it is a multiple of our constant. */ 5997 if (code == MULT_EXPR 5998 || wi::multiple_of_p (t, c, TYPE_SIGN (type))) 5999 { 6000 tree tem = const_binop (code, fold_convert (ctype, t), 6001 fold_convert (ctype, c)); 6002 /* If the multiplication overflowed, we lost information on it. 6003 See PR68142 and PR69845. */ 6004 if (TREE_OVERFLOW (tem)) 6005 return NULL_TREE; 6006 return tem; 6007 } 6008 break; 6009 6010 CASE_CONVERT: case NON_LVALUE_EXPR: 6011 /* If op0 is an expression ... */ 6012 if ((COMPARISON_CLASS_P (op0) 6013 || UNARY_CLASS_P (op0) 6014 || BINARY_CLASS_P (op0) 6015 || VL_EXP_CLASS_P (op0) 6016 || EXPRESSION_CLASS_P (op0)) 6017 /* ... and has wrapping overflow, and its type is smaller 6018 than ctype, then we cannot pass through as widening. */ 6019 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0)) 6020 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))) 6021 && (TYPE_PRECISION (ctype) 6022 > TYPE_PRECISION (TREE_TYPE (op0)))) 6023 /* ... or this is a truncation (t is narrower than op0), 6024 then we cannot pass through this narrowing. */ 6025 || (TYPE_PRECISION (type) 6026 < TYPE_PRECISION (TREE_TYPE (op0))) 6027 /* ... or signedness changes for division or modulus, 6028 then we cannot pass through this conversion. */ 6029 || (code != MULT_EXPR 6030 && (TYPE_UNSIGNED (ctype) 6031 != TYPE_UNSIGNED (TREE_TYPE (op0)))) 6032 /* ... or has undefined overflow while the converted to 6033 type has not, we cannot do the operation in the inner type 6034 as that would introduce undefined overflow. */ 6035 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0)) 6036 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))) 6037 && !TYPE_OVERFLOW_UNDEFINED (type)))) 6038 break; 6039 6040 /* Pass the constant down and see if we can make a simplification. If 6041 we can, replace this expression with the inner simplification for 6042 possible later conversion to our or some other type. */ 6043 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0 6044 && TREE_CODE (t2) == INTEGER_CST 6045 && !TREE_OVERFLOW (t2) 6046 && (0 != (t1 = extract_muldiv (op0, t2, code, 6047 code == MULT_EXPR 6048 ? ctype : NULL_TREE, 6049 strict_overflow_p)))) 6050 return t1; 6051 break; 6052 6053 case ABS_EXPR: 6054 /* If widening the type changes it from signed to unsigned, then we 6055 must avoid building ABS_EXPR itself as unsigned. */ 6056 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type)) 6057 { 6058 tree cstype = (*signed_type_for) (ctype); 6059 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p)) 6060 != 0) 6061 { 6062 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1)); 6063 return fold_convert (ctype, t1); 6064 } 6065 break; 6066 } 6067 /* If the constant is negative, we cannot simplify this. */ 6068 if (tree_int_cst_sgn (c) == -1) 6069 break; 6070 /* FALLTHROUGH */ 6071 case NEGATE_EXPR: 6072 /* For division and modulus, type can't be unsigned, as e.g. 6073 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2. 6074 For signed types, even with wrapping overflow, this is fine. */ 6075 if (code != MULT_EXPR && TYPE_UNSIGNED (type)) 6076 break; 6077 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p)) 6078 != 0) 6079 return fold_build1 (tcode, ctype, fold_convert (ctype, t1)); 6080 break; 6081 6082 case MIN_EXPR: case MAX_EXPR: 6083 /* If widening the type changes the signedness, then we can't perform 6084 this optimization as that changes the result. */ 6085 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type)) 6086 break; 6087 6088 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */ 6089 sub_strict_overflow_p = false; 6090 if ((t1 = extract_muldiv (op0, c, code, wide_type, 6091 &sub_strict_overflow_p)) != 0 6092 && (t2 = extract_muldiv (op1, c, code, wide_type, 6093 &sub_strict_overflow_p)) != 0) 6094 { 6095 if (tree_int_cst_sgn (c) < 0) 6096 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR); 6097 if (sub_strict_overflow_p) 6098 *strict_overflow_p = true; 6099 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 6100 fold_convert (ctype, t2)); 6101 } 6102 break; 6103 6104 case LSHIFT_EXPR: case RSHIFT_EXPR: 6105 /* If the second operand is constant, this is a multiplication 6106 or floor division, by a power of two, so we can treat it that 6107 way unless the multiplier or divisor overflows. Signed 6108 left-shift overflow is implementation-defined rather than 6109 undefined in C90, so do not convert signed left shift into 6110 multiplication. */ 6111 if (TREE_CODE (op1) == INTEGER_CST 6112 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0))) 6113 /* const_binop may not detect overflow correctly, 6114 so check for it explicitly here. */ 6115 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1) 6116 && 0 != (t1 = fold_convert (ctype, 6117 const_binop (LSHIFT_EXPR, 6118 size_one_node, 6119 op1))) 6120 && !TREE_OVERFLOW (t1)) 6121 return extract_muldiv (build2 (tcode == LSHIFT_EXPR 6122 ? MULT_EXPR : FLOOR_DIV_EXPR, 6123 ctype, 6124 fold_convert (ctype, op0), 6125 t1), 6126 c, code, wide_type, strict_overflow_p); 6127 break; 6128 6129 case PLUS_EXPR: case MINUS_EXPR: 6130 /* See if we can eliminate the operation on both sides. If we can, we 6131 can return a new PLUS or MINUS. If we can't, the only remaining 6132 cases where we can do anything are if the second operand is a 6133 constant. */ 6134 sub_strict_overflow_p = false; 6135 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p); 6136 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p); 6137 if (t1 != 0 && t2 != 0 6138 && (code == MULT_EXPR 6139 /* If not multiplication, we can only do this if both operands 6140 are divisible by c. */ 6141 || (multiple_of_p (ctype, op0, c) 6142 && multiple_of_p (ctype, op1, c)))) 6143 { 6144 if (sub_strict_overflow_p) 6145 *strict_overflow_p = true; 6146 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 6147 fold_convert (ctype, t2)); 6148 } 6149 6150 /* If this was a subtraction, negate OP1 and set it to be an addition. 6151 This simplifies the logic below. */ 6152 if (tcode == MINUS_EXPR) 6153 { 6154 tcode = PLUS_EXPR, op1 = negate_expr (op1); 6155 /* If OP1 was not easily negatable, the constant may be OP0. */ 6156 if (TREE_CODE (op0) == INTEGER_CST) 6157 { 6158 tree tem = op0; 6159 op0 = op1; 6160 op1 = tem; 6161 tem = t1; 6162 t1 = t2; 6163 t2 = tem; 6164 } 6165 } 6166 6167 if (TREE_CODE (op1) != INTEGER_CST) 6168 break; 6169 6170 /* If either OP1 or C are negative, this optimization is not safe for 6171 some of the division and remainder types while for others we need 6172 to change the code. */ 6173 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0) 6174 { 6175 if (code == CEIL_DIV_EXPR) 6176 code = FLOOR_DIV_EXPR; 6177 else if (code == FLOOR_DIV_EXPR) 6178 code = CEIL_DIV_EXPR; 6179 else if (code != MULT_EXPR 6180 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR) 6181 break; 6182 } 6183 6184 /* If it's a multiply or a division/modulus operation of a multiple 6185 of our constant, do the operation and verify it doesn't overflow. */ 6186 if (code == MULT_EXPR 6187 || wi::multiple_of_p (op1, c, TYPE_SIGN (type))) 6188 { 6189 op1 = const_binop (code, fold_convert (ctype, op1), 6190 fold_convert (ctype, c)); 6191 /* We allow the constant to overflow with wrapping semantics. */ 6192 if (op1 == 0 6193 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype))) 6194 break; 6195 } 6196 else 6197 break; 6198 6199 /* If we have an unsigned type, we cannot widen the operation since it 6200 will change the result if the original computation overflowed. */ 6201 if (TYPE_UNSIGNED (ctype) && ctype != type) 6202 break; 6203 6204 /* If we were able to eliminate our operation from the first side, 6205 apply our operation to the second side and reform the PLUS. */ 6206 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR)) 6207 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1); 6208 6209 /* The last case is if we are a multiply. In that case, we can 6210 apply the distributive law to commute the multiply and addition 6211 if the multiplication of the constants doesn't overflow 6212 and overflow is defined. With undefined overflow 6213 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */ 6214 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype)) 6215 return fold_build2 (tcode, ctype, 6216 fold_build2 (code, ctype, 6217 fold_convert (ctype, op0), 6218 fold_convert (ctype, c)), 6219 op1); 6220 6221 break; 6222 6223 case MULT_EXPR: 6224 /* We have a special case here if we are doing something like 6225 (C * 8) % 4 since we know that's zero. */ 6226 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR 6227 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR) 6228 /* If the multiplication can overflow we cannot optimize this. */ 6229 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)) 6230 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST 6231 && wi::multiple_of_p (op1, c, TYPE_SIGN (type))) 6232 { 6233 *strict_overflow_p = true; 6234 return omit_one_operand (type, integer_zero_node, op0); 6235 } 6236 6237 /* ... fall through ... */ 6238 6239 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR: 6240 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR: 6241 /* If we can extract our operation from the LHS, do so and return a 6242 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise, 6243 do something only if the second operand is a constant. */ 6244 if (same_p 6245 && (t1 = extract_muldiv (op0, c, code, wide_type, 6246 strict_overflow_p)) != 0) 6247 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 6248 fold_convert (ctype, op1)); 6249 else if (tcode == MULT_EXPR && code == MULT_EXPR 6250 && (t1 = extract_muldiv (op1, c, code, wide_type, 6251 strict_overflow_p)) != 0) 6252 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 6253 fold_convert (ctype, t1)); 6254 else if (TREE_CODE (op1) != INTEGER_CST) 6255 return 0; 6256 6257 /* If these are the same operation types, we can associate them 6258 assuming no overflow. */ 6259 if (tcode == code) 6260 { 6261 bool overflow_p = false; 6262 bool overflow_mul_p; 6263 signop sign = TYPE_SIGN (ctype); 6264 unsigned prec = TYPE_PRECISION (ctype); 6265 wide_int mul = wi::mul (wide_int::from (op1, prec, 6266 TYPE_SIGN (TREE_TYPE (op1))), 6267 wide_int::from (c, prec, 6268 TYPE_SIGN (TREE_TYPE (c))), 6269 sign, &overflow_mul_p); 6270 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1); 6271 if (overflow_mul_p 6272 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED)) 6273 overflow_p = true; 6274 if (!overflow_p) 6275 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 6276 wide_int_to_tree (ctype, mul)); 6277 } 6278 6279 /* If these operations "cancel" each other, we have the main 6280 optimizations of this pass, which occur when either constant is a 6281 multiple of the other, in which case we replace this with either an 6282 operation or CODE or TCODE. 6283 6284 If we have an unsigned type, we cannot do this since it will change 6285 the result if the original computation overflowed. */ 6286 if (TYPE_OVERFLOW_UNDEFINED (ctype) 6287 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR) 6288 || (tcode == MULT_EXPR 6289 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR 6290 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR 6291 && code != MULT_EXPR))) 6292 { 6293 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type))) 6294 { 6295 if (TYPE_OVERFLOW_UNDEFINED (ctype)) 6296 *strict_overflow_p = true; 6297 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 6298 fold_convert (ctype, 6299 const_binop (TRUNC_DIV_EXPR, 6300 op1, c))); 6301 } 6302 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type))) 6303 { 6304 if (TYPE_OVERFLOW_UNDEFINED (ctype)) 6305 *strict_overflow_p = true; 6306 return fold_build2 (code, ctype, fold_convert (ctype, op0), 6307 fold_convert (ctype, 6308 const_binop (TRUNC_DIV_EXPR, 6309 c, op1))); 6310 } 6311 } 6312 break; 6313 6314 default: 6315 break; 6316 } 6317 6318 return 0; 6319} 6320 6321/* Return a node which has the indicated constant VALUE (either 0 or 6322 1 for scalars or {-1,-1,..} or {0,0,...} for vectors), 6323 and is of the indicated TYPE. */ 6324 6325tree 6326constant_boolean_node (bool value, tree type) 6327{ 6328 if (type == integer_type_node) 6329 return value ? integer_one_node : integer_zero_node; 6330 else if (type == boolean_type_node) 6331 return value ? boolean_true_node : boolean_false_node; 6332 else if (TREE_CODE (type) == VECTOR_TYPE) 6333 return build_vector_from_val (type, 6334 build_int_cst (TREE_TYPE (type), 6335 value ? -1 : 0)); 6336 else 6337 return fold_convert (type, value ? integer_one_node : integer_zero_node); 6338} 6339 6340 6341/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'. 6342 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here 6343 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)' 6344 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the 6345 COND is the first argument to CODE; otherwise (as in the example 6346 given here), it is the second argument. TYPE is the type of the 6347 original expression. Return NULL_TREE if no simplification is 6348 possible. */ 6349 6350static tree 6351fold_binary_op_with_conditional_arg (location_t loc, 6352 enum tree_code code, 6353 tree type, tree op0, tree op1, 6354 tree cond, tree arg, int cond_first_p) 6355{ 6356 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1); 6357 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0); 6358 tree test, true_value, false_value; 6359 tree lhs = NULL_TREE; 6360 tree rhs = NULL_TREE; 6361 enum tree_code cond_code = COND_EXPR; 6362 6363 if (TREE_CODE (cond) == COND_EXPR 6364 || TREE_CODE (cond) == VEC_COND_EXPR) 6365 { 6366 test = TREE_OPERAND (cond, 0); 6367 true_value = TREE_OPERAND (cond, 1); 6368 false_value = TREE_OPERAND (cond, 2); 6369 /* If this operand throws an expression, then it does not make 6370 sense to try to perform a logical or arithmetic operation 6371 involving it. */ 6372 if (VOID_TYPE_P (TREE_TYPE (true_value))) 6373 lhs = true_value; 6374 if (VOID_TYPE_P (TREE_TYPE (false_value))) 6375 rhs = false_value; 6376 } 6377 else 6378 { 6379 tree testtype = TREE_TYPE (cond); 6380 test = cond; 6381 true_value = constant_boolean_node (true, testtype); 6382 false_value = constant_boolean_node (false, testtype); 6383 } 6384 6385 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE) 6386 cond_code = VEC_COND_EXPR; 6387 6388 /* This transformation is only worthwhile if we don't have to wrap ARG 6389 in a SAVE_EXPR and the operation can be simplified without recursing 6390 on at least one of the branches once its pushed inside the COND_EXPR. */ 6391 if (!TREE_CONSTANT (arg) 6392 && (TREE_SIDE_EFFECTS (arg) 6393 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR 6394 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value))) 6395 return NULL_TREE; 6396 6397 arg = fold_convert_loc (loc, arg_type, arg); 6398 if (lhs == 0) 6399 { 6400 true_value = fold_convert_loc (loc, cond_type, true_value); 6401 if (cond_first_p) 6402 lhs = fold_build2_loc (loc, code, type, true_value, arg); 6403 else 6404 lhs = fold_build2_loc (loc, code, type, arg, true_value); 6405 } 6406 if (rhs == 0) 6407 { 6408 false_value = fold_convert_loc (loc, cond_type, false_value); 6409 if (cond_first_p) 6410 rhs = fold_build2_loc (loc, code, type, false_value, arg); 6411 else 6412 rhs = fold_build2_loc (loc, code, type, arg, false_value); 6413 } 6414 6415 /* Check that we have simplified at least one of the branches. */ 6416 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs)) 6417 return NULL_TREE; 6418 6419 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs); 6420} 6421 6422 6423/* Subroutine of fold() that checks for the addition of +/- 0.0. 6424 6425 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type 6426 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X - 6427 ADDEND is the same as X. 6428 6429 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero 6430 and finite. The problematic cases are when X is zero, and its mode 6431 has signed zeros. In the case of rounding towards -infinity, 6432 X - 0 is not the same as X because 0 - 0 is -0. In other rounding 6433 modes, X + 0 is not the same as X because -0 + 0 is 0. */ 6434 6435bool 6436fold_real_zero_addition_p (const_tree type, const_tree addend, int negate) 6437{ 6438 if (!real_zerop (addend)) 6439 return false; 6440 6441 /* Don't allow the fold with -fsignaling-nans. */ 6442 if (HONOR_SNANS (element_mode (type))) 6443 return false; 6444 6445 /* Allow the fold if zeros aren't signed, or their sign isn't important. */ 6446 if (!HONOR_SIGNED_ZEROS (element_mode (type))) 6447 return true; 6448 6449 /* In a vector or complex, we would need to check the sign of all zeros. */ 6450 if (TREE_CODE (addend) != REAL_CST) 6451 return false; 6452 6453 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */ 6454 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend))) 6455 negate = !negate; 6456 6457 /* The mode has signed zeros, and we have to honor their sign. 6458 In this situation, there is only one case we can return true for. 6459 X - 0 is the same as X unless rounding towards -infinity is 6460 supported. */ 6461 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)); 6462} 6463 6464/* Subroutine of fold() that checks comparisons of built-in math 6465 functions against real constants. 6466 6467 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison 6468 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE 6469 is the type of the result and ARG0 and ARG1 are the operands of the 6470 comparison. ARG1 must be a TREE_REAL_CST. 6471 6472 The function returns the constant folded tree if a simplification 6473 can be made, and NULL_TREE otherwise. */ 6474 6475static tree 6476fold_mathfn_compare (location_t loc, 6477 enum built_in_function fcode, enum tree_code code, 6478 tree type, tree arg0, tree arg1) 6479{ 6480 REAL_VALUE_TYPE c; 6481 6482 if (BUILTIN_SQRT_P (fcode)) 6483 { 6484 tree arg = CALL_EXPR_ARG (arg0, 0); 6485 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0)); 6486 6487 c = TREE_REAL_CST (arg1); 6488 if (REAL_VALUE_NEGATIVE (c)) 6489 { 6490 /* sqrt(x) < y is always false, if y is negative. */ 6491 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR) 6492 return omit_one_operand_loc (loc, type, integer_zero_node, arg); 6493 6494 /* sqrt(x) > y is always true, if y is negative and we 6495 don't care about NaNs, i.e. negative values of x. */ 6496 if (code == NE_EXPR || !HONOR_NANS (mode)) 6497 return omit_one_operand_loc (loc, type, integer_one_node, arg); 6498 6499 /* sqrt(x) > y is the same as x >= 0, if y is negative. */ 6500 return fold_build2_loc (loc, GE_EXPR, type, arg, 6501 build_real (TREE_TYPE (arg), dconst0)); 6502 } 6503 else if (code == GT_EXPR || code == GE_EXPR) 6504 { 6505 REAL_VALUE_TYPE c2; 6506 6507 REAL_ARITHMETIC (c2, MULT_EXPR, c, c); 6508 real_convert (&c2, mode, &c2); 6509 6510 if (REAL_VALUE_ISINF (c2)) 6511 { 6512 /* sqrt(x) > y is x == +Inf, when y is very large. */ 6513 if (HONOR_INFINITIES (mode)) 6514 return fold_build2_loc (loc, EQ_EXPR, type, arg, 6515 build_real (TREE_TYPE (arg), c2)); 6516 6517 /* sqrt(x) > y is always false, when y is very large 6518 and we don't care about infinities. */ 6519 return omit_one_operand_loc (loc, type, integer_zero_node, arg); 6520 } 6521 6522 /* sqrt(x) > c is the same as x > c*c. */ 6523 return fold_build2_loc (loc, code, type, arg, 6524 build_real (TREE_TYPE (arg), c2)); 6525 } 6526 else if (code == LT_EXPR || code == LE_EXPR) 6527 { 6528 REAL_VALUE_TYPE c2; 6529 6530 REAL_ARITHMETIC (c2, MULT_EXPR, c, c); 6531 real_convert (&c2, mode, &c2); 6532 6533 if (REAL_VALUE_ISINF (c2)) 6534 { 6535 /* sqrt(x) < y is always true, when y is a very large 6536 value and we don't care about NaNs or Infinities. */ 6537 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode)) 6538 return omit_one_operand_loc (loc, type, integer_one_node, arg); 6539 6540 /* sqrt(x) < y is x != +Inf when y is very large and we 6541 don't care about NaNs. */ 6542 if (! HONOR_NANS (mode)) 6543 return fold_build2_loc (loc, NE_EXPR, type, arg, 6544 build_real (TREE_TYPE (arg), c2)); 6545 6546 /* sqrt(x) < y is x >= 0 when y is very large and we 6547 don't care about Infinities. */ 6548 if (! HONOR_INFINITIES (mode)) 6549 return fold_build2_loc (loc, GE_EXPR, type, arg, 6550 build_real (TREE_TYPE (arg), dconst0)); 6551 6552 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */ 6553 arg = save_expr (arg); 6554 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 6555 fold_build2_loc (loc, GE_EXPR, type, arg, 6556 build_real (TREE_TYPE (arg), 6557 dconst0)), 6558 fold_build2_loc (loc, NE_EXPR, type, arg, 6559 build_real (TREE_TYPE (arg), 6560 c2))); 6561 } 6562 6563 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */ 6564 if (! HONOR_NANS (mode)) 6565 return fold_build2_loc (loc, code, type, arg, 6566 build_real (TREE_TYPE (arg), c2)); 6567 6568 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */ 6569 arg = save_expr (arg); 6570 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 6571 fold_build2_loc (loc, GE_EXPR, type, arg, 6572 build_real (TREE_TYPE (arg), 6573 dconst0)), 6574 fold_build2_loc (loc, code, type, arg, 6575 build_real (TREE_TYPE (arg), 6576 c2))); 6577 } 6578 } 6579 6580 return NULL_TREE; 6581} 6582 6583/* Subroutine of fold() that optimizes comparisons against Infinities, 6584 either +Inf or -Inf. 6585 6586 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, 6587 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 6588 are the operands of the comparison. ARG1 must be a TREE_REAL_CST. 6589 6590 The function returns the constant folded tree if a simplification 6591 can be made, and NULL_TREE otherwise. */ 6592 6593static tree 6594fold_inf_compare (location_t loc, enum tree_code code, tree type, 6595 tree arg0, tree arg1) 6596{ 6597 machine_mode mode; 6598 REAL_VALUE_TYPE max; 6599 tree temp; 6600 bool neg; 6601 6602 mode = TYPE_MODE (TREE_TYPE (arg0)); 6603 6604 /* For negative infinity swap the sense of the comparison. */ 6605 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)); 6606 if (neg) 6607 code = swap_tree_comparison (code); 6608 6609 switch (code) 6610 { 6611 case GT_EXPR: 6612 /* x > +Inf is always false, if with ignore sNANs. */ 6613 if (HONOR_SNANS (mode)) 6614 return NULL_TREE; 6615 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 6616 6617 case LE_EXPR: 6618 /* x <= +Inf is always true, if we don't case about NaNs. */ 6619 if (! HONOR_NANS (mode)) 6620 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 6621 6622 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */ 6623 arg0 = save_expr (arg0); 6624 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0); 6625 6626 case EQ_EXPR: 6627 case GE_EXPR: 6628 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */ 6629 real_maxval (&max, neg, mode); 6630 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type, 6631 arg0, build_real (TREE_TYPE (arg0), max)); 6632 6633 case LT_EXPR: 6634 /* x < +Inf is always equal to x <= DBL_MAX. */ 6635 real_maxval (&max, neg, mode); 6636 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type, 6637 arg0, build_real (TREE_TYPE (arg0), max)); 6638 6639 case NE_EXPR: 6640 /* x != +Inf is always equal to !(x > DBL_MAX). */ 6641 real_maxval (&max, neg, mode); 6642 if (! HONOR_NANS (mode)) 6643 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type, 6644 arg0, build_real (TREE_TYPE (arg0), max)); 6645 6646 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type, 6647 arg0, build_real (TREE_TYPE (arg0), max)); 6648 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp); 6649 6650 default: 6651 break; 6652 } 6653 6654 return NULL_TREE; 6655} 6656 6657/* Subroutine of fold() that optimizes comparisons of a division by 6658 a nonzero integer constant against an integer constant, i.e. 6659 X/C1 op C2. 6660 6661 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, 6662 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 6663 are the operands of the comparison. ARG1 must be a TREE_REAL_CST. 6664 6665 The function returns the constant folded tree if a simplification 6666 can be made, and NULL_TREE otherwise. */ 6667 6668static tree 6669fold_div_compare (location_t loc, 6670 enum tree_code code, tree type, tree arg0, tree arg1) 6671{ 6672 tree prod, tmp, hi, lo; 6673 tree arg00 = TREE_OPERAND (arg0, 0); 6674 tree arg01 = TREE_OPERAND (arg0, 1); 6675 signop sign = TYPE_SIGN (TREE_TYPE (arg0)); 6676 bool neg_overflow = false; 6677 bool overflow; 6678 6679 /* We have to do this the hard way to detect unsigned overflow. 6680 prod = int_const_binop (MULT_EXPR, arg01, arg1); */ 6681 wide_int val = wi::mul (arg01, arg1, sign, &overflow); 6682 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow); 6683 neg_overflow = false; 6684 6685 if (sign == UNSIGNED) 6686 { 6687 tmp = int_const_binop (MINUS_EXPR, arg01, 6688 build_int_cst (TREE_TYPE (arg01), 1)); 6689 lo = prod; 6690 6691 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */ 6692 val = wi::add (prod, tmp, sign, &overflow); 6693 hi = force_fit_type (TREE_TYPE (arg00), val, 6694 -1, overflow | TREE_OVERFLOW (prod)); 6695 } 6696 else if (tree_int_cst_sgn (arg01) >= 0) 6697 { 6698 tmp = int_const_binop (MINUS_EXPR, arg01, 6699 build_int_cst (TREE_TYPE (arg01), 1)); 6700 switch (tree_int_cst_sgn (arg1)) 6701 { 6702 case -1: 6703 neg_overflow = true; 6704 lo = int_const_binop (MINUS_EXPR, prod, tmp); 6705 hi = prod; 6706 break; 6707 6708 case 0: 6709 lo = fold_negate_const (tmp, TREE_TYPE (arg0)); 6710 hi = tmp; 6711 break; 6712 6713 case 1: 6714 hi = int_const_binop (PLUS_EXPR, prod, tmp); 6715 lo = prod; 6716 break; 6717 6718 default: 6719 gcc_unreachable (); 6720 } 6721 } 6722 else 6723 { 6724 /* A negative divisor reverses the relational operators. */ 6725 code = swap_tree_comparison (code); 6726 6727 tmp = int_const_binop (PLUS_EXPR, arg01, 6728 build_int_cst (TREE_TYPE (arg01), 1)); 6729 switch (tree_int_cst_sgn (arg1)) 6730 { 6731 case -1: 6732 hi = int_const_binop (MINUS_EXPR, prod, tmp); 6733 lo = prod; 6734 break; 6735 6736 case 0: 6737 hi = fold_negate_const (tmp, TREE_TYPE (arg0)); 6738 lo = tmp; 6739 break; 6740 6741 case 1: 6742 neg_overflow = true; 6743 lo = int_const_binop (PLUS_EXPR, prod, tmp); 6744 hi = prod; 6745 break; 6746 6747 default: 6748 gcc_unreachable (); 6749 } 6750 } 6751 6752 switch (code) 6753 { 6754 case EQ_EXPR: 6755 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) 6756 return omit_one_operand_loc (loc, type, integer_zero_node, arg00); 6757 if (TREE_OVERFLOW (hi)) 6758 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo); 6759 if (TREE_OVERFLOW (lo)) 6760 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi); 6761 return build_range_check (loc, type, arg00, 1, lo, hi); 6762 6763 case NE_EXPR: 6764 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) 6765 return omit_one_operand_loc (loc, type, integer_one_node, arg00); 6766 if (TREE_OVERFLOW (hi)) 6767 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo); 6768 if (TREE_OVERFLOW (lo)) 6769 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi); 6770 return build_range_check (loc, type, arg00, 0, lo, hi); 6771 6772 case LT_EXPR: 6773 if (TREE_OVERFLOW (lo)) 6774 { 6775 tmp = neg_overflow ? integer_zero_node : integer_one_node; 6776 return omit_one_operand_loc (loc, type, tmp, arg00); 6777 } 6778 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo); 6779 6780 case LE_EXPR: 6781 if (TREE_OVERFLOW (hi)) 6782 { 6783 tmp = neg_overflow ? integer_zero_node : integer_one_node; 6784 return omit_one_operand_loc (loc, type, tmp, arg00); 6785 } 6786 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi); 6787 6788 case GT_EXPR: 6789 if (TREE_OVERFLOW (hi)) 6790 { 6791 tmp = neg_overflow ? integer_one_node : integer_zero_node; 6792 return omit_one_operand_loc (loc, type, tmp, arg00); 6793 } 6794 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi); 6795 6796 case GE_EXPR: 6797 if (TREE_OVERFLOW (lo)) 6798 { 6799 tmp = neg_overflow ? integer_one_node : integer_zero_node; 6800 return omit_one_operand_loc (loc, type, tmp, arg00); 6801 } 6802 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo); 6803 6804 default: 6805 break; 6806 } 6807 6808 return NULL_TREE; 6809} 6810 6811 6812/* If CODE with arguments ARG0 and ARG1 represents a single bit 6813 equality/inequality test, then return a simplified form of the test 6814 using a sign testing. Otherwise return NULL. TYPE is the desired 6815 result type. */ 6816 6817static tree 6818fold_single_bit_test_into_sign_test (location_t loc, 6819 enum tree_code code, tree arg0, tree arg1, 6820 tree result_type) 6821{ 6822 /* If this is testing a single bit, we can optimize the test. */ 6823 if ((code == NE_EXPR || code == EQ_EXPR) 6824 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) 6825 && integer_pow2p (TREE_OPERAND (arg0, 1))) 6826 { 6827 /* If we have (A & C) != 0 where C is the sign bit of A, convert 6828 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */ 6829 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); 6830 6831 if (arg00 != NULL_TREE 6832 /* This is only a win if casting to a signed type is cheap, 6833 i.e. when arg00's type is not a partial mode. */ 6834 && TYPE_PRECISION (TREE_TYPE (arg00)) 6835 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00)))) 6836 { 6837 tree stype = signed_type_for (TREE_TYPE (arg00)); 6838 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, 6839 result_type, 6840 fold_convert_loc (loc, stype, arg00), 6841 build_int_cst (stype, 0)); 6842 } 6843 } 6844 6845 return NULL_TREE; 6846} 6847 6848/* If CODE with arguments ARG0 and ARG1 represents a single bit 6849 equality/inequality test, then return a simplified form of 6850 the test using shifts and logical operations. Otherwise return 6851 NULL. TYPE is the desired result type. */ 6852 6853tree 6854fold_single_bit_test (location_t loc, enum tree_code code, 6855 tree arg0, tree arg1, tree result_type) 6856{ 6857 /* If this is testing a single bit, we can optimize the test. */ 6858 if ((code == NE_EXPR || code == EQ_EXPR) 6859 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) 6860 && integer_pow2p (TREE_OPERAND (arg0, 1))) 6861 { 6862 tree inner = TREE_OPERAND (arg0, 0); 6863 tree type = TREE_TYPE (arg0); 6864 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1)); 6865 machine_mode operand_mode = TYPE_MODE (type); 6866 int ops_unsigned; 6867 tree signed_type, unsigned_type, intermediate_type; 6868 tree tem, one; 6869 6870 /* First, see if we can fold the single bit test into a sign-bit 6871 test. */ 6872 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, 6873 result_type); 6874 if (tem) 6875 return tem; 6876 6877 /* Otherwise we have (A & C) != 0 where C is a single bit, 6878 convert that into ((A >> C2) & 1). Where C2 = log2(C). 6879 Similarly for (A & C) == 0. */ 6880 6881 /* If INNER is a right shift of a constant and it plus BITNUM does 6882 not overflow, adjust BITNUM and INNER. */ 6883 if (TREE_CODE (inner) == RSHIFT_EXPR 6884 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST 6885 && bitnum < TYPE_PRECISION (type) 6886 && wi::ltu_p (TREE_OPERAND (inner, 1), 6887 TYPE_PRECISION (type) - bitnum)) 6888 { 6889 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1)); 6890 inner = TREE_OPERAND (inner, 0); 6891 } 6892 6893 /* If we are going to be able to omit the AND below, we must do our 6894 operations as unsigned. If we must use the AND, we have a choice. 6895 Normally unsigned is faster, but for some machines signed is. */ 6896#ifdef LOAD_EXTEND_OP 6897 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND 6898 && !flag_syntax_only) ? 0 : 1; 6899#else 6900 ops_unsigned = 1; 6901#endif 6902 6903 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0); 6904 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1); 6905 intermediate_type = ops_unsigned ? unsigned_type : signed_type; 6906 inner = fold_convert_loc (loc, intermediate_type, inner); 6907 6908 if (bitnum != 0) 6909 inner = build2 (RSHIFT_EXPR, intermediate_type, 6910 inner, size_int (bitnum)); 6911 6912 one = build_int_cst (intermediate_type, 1); 6913 6914 if (code == EQ_EXPR) 6915 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one); 6916 6917 /* Put the AND last so it can combine with more things. */ 6918 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one); 6919 6920 /* Make sure to return the proper type. */ 6921 inner = fold_convert_loc (loc, result_type, inner); 6922 6923 return inner; 6924 } 6925 return NULL_TREE; 6926} 6927 6928/* Check whether we are allowed to reorder operands arg0 and arg1, 6929 such that the evaluation of arg1 occurs before arg0. */ 6930 6931static bool 6932reorder_operands_p (const_tree arg0, const_tree arg1) 6933{ 6934 if (! flag_evaluation_order) 6935 return true; 6936 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1)) 6937 return true; 6938 return ! TREE_SIDE_EFFECTS (arg0) 6939 && ! TREE_SIDE_EFFECTS (arg1); 6940} 6941 6942/* Test whether it is preferable two swap two operands, ARG0 and 6943 ARG1, for example because ARG0 is an integer constant and ARG1 6944 isn't. If REORDER is true, only recommend swapping if we can 6945 evaluate the operands in reverse order. */ 6946 6947bool 6948tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder) 6949{ 6950 if (CONSTANT_CLASS_P (arg1)) 6951 return 0; 6952 if (CONSTANT_CLASS_P (arg0)) 6953 return 1; 6954 6955 STRIP_NOPS (arg0); 6956 STRIP_NOPS (arg1); 6957 6958 if (TREE_CONSTANT (arg1)) 6959 return 0; 6960 if (TREE_CONSTANT (arg0)) 6961 return 1; 6962 6963 if (reorder && flag_evaluation_order 6964 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))) 6965 return 0; 6966 6967 /* It is preferable to swap two SSA_NAME to ensure a canonical form 6968 for commutative and comparison operators. Ensuring a canonical 6969 form allows the optimizers to find additional redundancies without 6970 having to explicitly check for both orderings. */ 6971 if (TREE_CODE (arg0) == SSA_NAME 6972 && TREE_CODE (arg1) == SSA_NAME 6973 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1)) 6974 return 1; 6975 6976 /* Put SSA_NAMEs last. */ 6977 if (TREE_CODE (arg1) == SSA_NAME) 6978 return 0; 6979 if (TREE_CODE (arg0) == SSA_NAME) 6980 return 1; 6981 6982 /* Put variables last. */ 6983 if (DECL_P (arg1)) 6984 return 0; 6985 if (DECL_P (arg0)) 6986 return 1; 6987 6988 return 0; 6989} 6990 6991/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where 6992 ARG0 is extended to a wider type. */ 6993 6994static tree 6995fold_widened_comparison (location_t loc, enum tree_code code, 6996 tree type, tree arg0, tree arg1) 6997{ 6998 tree arg0_unw = get_unwidened (arg0, NULL_TREE); 6999 tree arg1_unw; 7000 tree shorter_type, outer_type; 7001 tree min, max; 7002 bool above, below; 7003 7004 if (arg0_unw == arg0) 7005 return NULL_TREE; 7006 shorter_type = TREE_TYPE (arg0_unw); 7007 7008#ifdef HAVE_canonicalize_funcptr_for_compare 7009 /* Disable this optimization if we're casting a function pointer 7010 type on targets that require function pointer canonicalization. */ 7011 if (HAVE_canonicalize_funcptr_for_compare 7012 && TREE_CODE (shorter_type) == POINTER_TYPE 7013 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE) 7014 return NULL_TREE; 7015#endif 7016 7017 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type)) 7018 return NULL_TREE; 7019 7020 arg1_unw = get_unwidened (arg1, NULL_TREE); 7021 7022 /* If possible, express the comparison in the shorter mode. */ 7023 if ((code == EQ_EXPR || code == NE_EXPR 7024 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type)) 7025 && (TREE_TYPE (arg1_unw) == shorter_type 7026 || ((TYPE_PRECISION (shorter_type) 7027 >= TYPE_PRECISION (TREE_TYPE (arg1_unw))) 7028 && (TYPE_UNSIGNED (shorter_type) 7029 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw)))) 7030 || (TREE_CODE (arg1_unw) == INTEGER_CST 7031 && (TREE_CODE (shorter_type) == INTEGER_TYPE 7032 || TREE_CODE (shorter_type) == BOOLEAN_TYPE) 7033 && int_fits_type_p (arg1_unw, shorter_type)))) 7034 return fold_build2_loc (loc, code, type, arg0_unw, 7035 fold_convert_loc (loc, shorter_type, arg1_unw)); 7036 7037 if (TREE_CODE (arg1_unw) != INTEGER_CST 7038 || TREE_CODE (shorter_type) != INTEGER_TYPE 7039 || !int_fits_type_p (arg1_unw, shorter_type)) 7040 return NULL_TREE; 7041 7042 /* If we are comparing with the integer that does not fit into the range 7043 of the shorter type, the result is known. */ 7044 outer_type = TREE_TYPE (arg1_unw); 7045 min = lower_bound_in_type (outer_type, shorter_type); 7046 max = upper_bound_in_type (outer_type, shorter_type); 7047 7048 above = integer_nonzerop (fold_relational_const (LT_EXPR, type, 7049 max, arg1_unw)); 7050 below = integer_nonzerop (fold_relational_const (LT_EXPR, type, 7051 arg1_unw, min)); 7052 7053 switch (code) 7054 { 7055 case EQ_EXPR: 7056 if (above || below) 7057 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 7058 break; 7059 7060 case NE_EXPR: 7061 if (above || below) 7062 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 7063 break; 7064 7065 case LT_EXPR: 7066 case LE_EXPR: 7067 if (above) 7068 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 7069 else if (below) 7070 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 7071 7072 case GT_EXPR: 7073 case GE_EXPR: 7074 if (above) 7075 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 7076 else if (below) 7077 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 7078 7079 default: 7080 break; 7081 } 7082 7083 return NULL_TREE; 7084} 7085 7086/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for 7087 ARG0 just the signedness is changed. */ 7088 7089static tree 7090fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type, 7091 tree arg0, tree arg1) 7092{ 7093 tree arg0_inner; 7094 tree inner_type, outer_type; 7095 7096 if (!CONVERT_EXPR_P (arg0)) 7097 return NULL_TREE; 7098 7099 outer_type = TREE_TYPE (arg0); 7100 arg0_inner = TREE_OPERAND (arg0, 0); 7101 inner_type = TREE_TYPE (arg0_inner); 7102 7103#ifdef HAVE_canonicalize_funcptr_for_compare 7104 /* Disable this optimization if we're casting a function pointer 7105 type on targets that require function pointer canonicalization. */ 7106 if (HAVE_canonicalize_funcptr_for_compare 7107 && TREE_CODE (inner_type) == POINTER_TYPE 7108 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE) 7109 return NULL_TREE; 7110#endif 7111 7112 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) 7113 return NULL_TREE; 7114 7115 if (TREE_CODE (arg1) != INTEGER_CST 7116 && !(CONVERT_EXPR_P (arg1) 7117 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type)) 7118 return NULL_TREE; 7119 7120 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type) 7121 && code != NE_EXPR 7122 && code != EQ_EXPR) 7123 return NULL_TREE; 7124 7125 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type)) 7126 return NULL_TREE; 7127 7128 if (TREE_CODE (arg1) == INTEGER_CST) 7129 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0, 7130 TREE_OVERFLOW (arg1)); 7131 else 7132 arg1 = fold_convert_loc (loc, inner_type, arg1); 7133 7134 return fold_build2_loc (loc, code, type, arg0_inner, arg1); 7135} 7136 7137 7138/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y 7139 means A >= Y && A != MAX, but in this case we know that 7140 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */ 7141 7142static tree 7143fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound) 7144{ 7145 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y; 7146 7147 if (TREE_CODE (bound) == LT_EXPR) 7148 a = TREE_OPERAND (bound, 0); 7149 else if (TREE_CODE (bound) == GT_EXPR) 7150 a = TREE_OPERAND (bound, 1); 7151 else 7152 return NULL_TREE; 7153 7154 typea = TREE_TYPE (a); 7155 if (!INTEGRAL_TYPE_P (typea) 7156 && !POINTER_TYPE_P (typea)) 7157 return NULL_TREE; 7158 7159 if (TREE_CODE (ineq) == LT_EXPR) 7160 { 7161 a1 = TREE_OPERAND (ineq, 1); 7162 y = TREE_OPERAND (ineq, 0); 7163 } 7164 else if (TREE_CODE (ineq) == GT_EXPR) 7165 { 7166 a1 = TREE_OPERAND (ineq, 0); 7167 y = TREE_OPERAND (ineq, 1); 7168 } 7169 else 7170 return NULL_TREE; 7171 7172 if (TREE_TYPE (a1) != typea) 7173 return NULL_TREE; 7174 7175 if (POINTER_TYPE_P (typea)) 7176 { 7177 /* Convert the pointer types into integer before taking the difference. */ 7178 tree ta = fold_convert_loc (loc, ssizetype, a); 7179 tree ta1 = fold_convert_loc (loc, ssizetype, a1); 7180 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta); 7181 } 7182 else 7183 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a); 7184 7185 if (!diff || !integer_onep (diff)) 7186 return NULL_TREE; 7187 7188 return fold_build2_loc (loc, GE_EXPR, type, a, y); 7189} 7190 7191/* Fold a sum or difference of at least one multiplication. 7192 Returns the folded tree or NULL if no simplification could be made. */ 7193 7194static tree 7195fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type, 7196 tree arg0, tree arg1) 7197{ 7198 tree arg00, arg01, arg10, arg11; 7199 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same; 7200 7201 /* (A * C) +- (B * C) -> (A+-B) * C. 7202 (A * C) +- A -> A * (C+-1). 7203 We are most concerned about the case where C is a constant, 7204 but other combinations show up during loop reduction. Since 7205 it is not difficult, try all four possibilities. */ 7206 7207 if (TREE_CODE (arg0) == MULT_EXPR) 7208 { 7209 arg00 = TREE_OPERAND (arg0, 0); 7210 arg01 = TREE_OPERAND (arg0, 1); 7211 } 7212 else if (TREE_CODE (arg0) == INTEGER_CST) 7213 { 7214 arg00 = build_one_cst (type); 7215 arg01 = arg0; 7216 } 7217 else 7218 { 7219 /* We cannot generate constant 1 for fract. */ 7220 if (ALL_FRACT_MODE_P (TYPE_MODE (type))) 7221 return NULL_TREE; 7222 arg00 = arg0; 7223 arg01 = build_one_cst (type); 7224 } 7225 if (TREE_CODE (arg1) == MULT_EXPR) 7226 { 7227 arg10 = TREE_OPERAND (arg1, 0); 7228 arg11 = TREE_OPERAND (arg1, 1); 7229 } 7230 else if (TREE_CODE (arg1) == INTEGER_CST) 7231 { 7232 arg10 = build_one_cst (type); 7233 /* As we canonicalize A - 2 to A + -2 get rid of that sign for 7234 the purpose of this canonicalization. */ 7235 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1))) 7236 && negate_expr_p (arg1) 7237 && code == PLUS_EXPR) 7238 { 7239 arg11 = negate_expr (arg1); 7240 code = MINUS_EXPR; 7241 } 7242 else 7243 arg11 = arg1; 7244 } 7245 else 7246 { 7247 /* We cannot generate constant 1 for fract. */ 7248 if (ALL_FRACT_MODE_P (TYPE_MODE (type))) 7249 return NULL_TREE; 7250 arg10 = arg1; 7251 arg11 = build_one_cst (type); 7252 } 7253 same = NULL_TREE; 7254 7255 if (operand_equal_p (arg01, arg11, 0)) 7256 same = arg01, alt0 = arg00, alt1 = arg10; 7257 else if (operand_equal_p (arg00, arg10, 0)) 7258 same = arg00, alt0 = arg01, alt1 = arg11; 7259 else if (operand_equal_p (arg00, arg11, 0)) 7260 same = arg00, alt0 = arg01, alt1 = arg10; 7261 else if (operand_equal_p (arg01, arg10, 0)) 7262 same = arg01, alt0 = arg00, alt1 = arg11; 7263 7264 /* No identical multiplicands; see if we can find a common 7265 power-of-two factor in non-power-of-two multiplies. This 7266 can help in multi-dimensional array access. */ 7267 else if (tree_fits_shwi_p (arg01) 7268 && tree_fits_shwi_p (arg11)) 7269 { 7270 HOST_WIDE_INT int01, int11, tmp; 7271 bool swap = false; 7272 tree maybe_same; 7273 int01 = tree_to_shwi (arg01); 7274 int11 = tree_to_shwi (arg11); 7275 7276 /* Move min of absolute values to int11. */ 7277 if (absu_hwi (int01) < absu_hwi (int11)) 7278 { 7279 tmp = int01, int01 = int11, int11 = tmp; 7280 alt0 = arg00, arg00 = arg10, arg10 = alt0; 7281 maybe_same = arg01; 7282 swap = true; 7283 } 7284 else 7285 maybe_same = arg11; 7286 7287 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0 7288 /* The remainder should not be a constant, otherwise we 7289 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has 7290 increased the number of multiplications necessary. */ 7291 && TREE_CODE (arg10) != INTEGER_CST) 7292 { 7293 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00, 7294 build_int_cst (TREE_TYPE (arg00), 7295 int01 / int11)); 7296 alt1 = arg10; 7297 same = maybe_same; 7298 if (swap) 7299 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same; 7300 } 7301 } 7302 7303 if (same) 7304 return fold_build2_loc (loc, MULT_EXPR, type, 7305 fold_build2_loc (loc, code, type, 7306 fold_convert_loc (loc, type, alt0), 7307 fold_convert_loc (loc, type, alt1)), 7308 fold_convert_loc (loc, type, same)); 7309 7310 return NULL_TREE; 7311} 7312 7313/* Subroutine of native_encode_expr. Encode the INTEGER_CST 7314 specified by EXPR into the buffer PTR of length LEN bytes. 7315 Return the number of bytes placed in the buffer, or zero 7316 upon failure. */ 7317 7318static int 7319native_encode_int (const_tree expr, unsigned char *ptr, int len, int off) 7320{ 7321 tree type = TREE_TYPE (expr); 7322 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7323 int byte, offset, word, words; 7324 unsigned char value; 7325 7326 if ((off == -1 && total_bytes > len) 7327 || off >= total_bytes) 7328 return 0; 7329 if (off == -1) 7330 off = 0; 7331 words = total_bytes / UNITS_PER_WORD; 7332 7333 for (byte = 0; byte < total_bytes; byte++) 7334 { 7335 int bitpos = byte * BITS_PER_UNIT; 7336 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole 7337 number of bytes. */ 7338 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT); 7339 7340 if (total_bytes > UNITS_PER_WORD) 7341 { 7342 word = byte / UNITS_PER_WORD; 7343 if (WORDS_BIG_ENDIAN) 7344 word = (words - 1) - word; 7345 offset = word * UNITS_PER_WORD; 7346 if (BYTES_BIG_ENDIAN) 7347 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7348 else 7349 offset += byte % UNITS_PER_WORD; 7350 } 7351 else 7352 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; 7353 if (offset >= off 7354 && offset - off < len) 7355 ptr[offset - off] = value; 7356 } 7357 return MIN (len, total_bytes - off); 7358} 7359 7360 7361/* Subroutine of native_encode_expr. Encode the FIXED_CST 7362 specified by EXPR into the buffer PTR of length LEN bytes. 7363 Return the number of bytes placed in the buffer, or zero 7364 upon failure. */ 7365 7366static int 7367native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off) 7368{ 7369 tree type = TREE_TYPE (expr); 7370 machine_mode mode = TYPE_MODE (type); 7371 int total_bytes = GET_MODE_SIZE (mode); 7372 FIXED_VALUE_TYPE value; 7373 tree i_value, i_type; 7374 7375 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT) 7376 return 0; 7377 7378 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1); 7379 7380 if (NULL_TREE == i_type 7381 || TYPE_PRECISION (i_type) != total_bytes) 7382 return 0; 7383 7384 value = TREE_FIXED_CST (expr); 7385 i_value = double_int_to_tree (i_type, value.data); 7386 7387 return native_encode_int (i_value, ptr, len, off); 7388} 7389 7390 7391/* Subroutine of native_encode_expr. Encode the REAL_CST 7392 specified by EXPR into the buffer PTR of length LEN bytes. 7393 Return the number of bytes placed in the buffer, or zero 7394 upon failure. */ 7395 7396static int 7397native_encode_real (const_tree expr, unsigned char *ptr, int len, int off) 7398{ 7399 tree type = TREE_TYPE (expr); 7400 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7401 int byte, offset, word, words, bitpos; 7402 unsigned char value; 7403 7404 /* There are always 32 bits in each long, no matter the size of 7405 the hosts long. We handle floating point representations with 7406 up to 192 bits. */ 7407 long tmp[6]; 7408 7409 if ((off == -1 && total_bytes > len) 7410 || off >= total_bytes) 7411 return 0; 7412 if (off == -1) 7413 off = 0; 7414 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD; 7415 7416 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type)); 7417 7418 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT; 7419 bitpos += BITS_PER_UNIT) 7420 { 7421 byte = (bitpos / BITS_PER_UNIT) & 3; 7422 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31)); 7423 7424 if (UNITS_PER_WORD < 4) 7425 { 7426 word = byte / UNITS_PER_WORD; 7427 if (WORDS_BIG_ENDIAN) 7428 word = (words - 1) - word; 7429 offset = word * UNITS_PER_WORD; 7430 if (BYTES_BIG_ENDIAN) 7431 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7432 else 7433 offset += byte % UNITS_PER_WORD; 7434 } 7435 else 7436 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte; 7437 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3); 7438 if (offset >= off 7439 && offset - off < len) 7440 ptr[offset - off] = value; 7441 } 7442 return MIN (len, total_bytes - off); 7443} 7444 7445/* Subroutine of native_encode_expr. Encode the COMPLEX_CST 7446 specified by EXPR into the buffer PTR of length LEN bytes. 7447 Return the number of bytes placed in the buffer, or zero 7448 upon failure. */ 7449 7450static int 7451native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off) 7452{ 7453 int rsize, isize; 7454 tree part; 7455 7456 part = TREE_REALPART (expr); 7457 rsize = native_encode_expr (part, ptr, len, off); 7458 if (off == -1 7459 && rsize == 0) 7460 return 0; 7461 part = TREE_IMAGPART (expr); 7462 if (off != -1) 7463 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part)))); 7464 isize = native_encode_expr (part, ptr+rsize, len-rsize, off); 7465 if (off == -1 7466 && isize != rsize) 7467 return 0; 7468 return rsize + isize; 7469} 7470 7471 7472/* Subroutine of native_encode_expr. Encode the VECTOR_CST 7473 specified by EXPR into the buffer PTR of length LEN bytes. 7474 Return the number of bytes placed in the buffer, or zero 7475 upon failure. */ 7476 7477static int 7478native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off) 7479{ 7480 unsigned i, count; 7481 int size, offset; 7482 tree itype, elem; 7483 7484 offset = 0; 7485 count = VECTOR_CST_NELTS (expr); 7486 itype = TREE_TYPE (TREE_TYPE (expr)); 7487 size = GET_MODE_SIZE (TYPE_MODE (itype)); 7488 for (i = 0; i < count; i++) 7489 { 7490 if (off >= size) 7491 { 7492 off -= size; 7493 continue; 7494 } 7495 elem = VECTOR_CST_ELT (expr, i); 7496 int res = native_encode_expr (elem, ptr+offset, len-offset, off); 7497 if ((off == -1 && res != size) 7498 || res == 0) 7499 return 0; 7500 offset += res; 7501 if (offset >= len) 7502 return offset; 7503 if (off != -1) 7504 off = 0; 7505 } 7506 return offset; 7507} 7508 7509 7510/* Subroutine of native_encode_expr. Encode the STRING_CST 7511 specified by EXPR into the buffer PTR of length LEN bytes. 7512 Return the number of bytes placed in the buffer, or zero 7513 upon failure. */ 7514 7515static int 7516native_encode_string (const_tree expr, unsigned char *ptr, int len, int off) 7517{ 7518 tree type = TREE_TYPE (expr); 7519 HOST_WIDE_INT total_bytes; 7520 7521 if (TREE_CODE (type) != ARRAY_TYPE 7522 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE 7523 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT 7524 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type))) 7525 return 0; 7526 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type)); 7527 if ((off == -1 && total_bytes > len) 7528 || off >= total_bytes) 7529 return 0; 7530 if (off == -1) 7531 off = 0; 7532 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len)) 7533 { 7534 int written = 0; 7535 if (off < TREE_STRING_LENGTH (expr)) 7536 { 7537 written = MIN (len, TREE_STRING_LENGTH (expr) - off); 7538 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written); 7539 } 7540 memset (ptr + written, 0, 7541 MIN (total_bytes - written, len - written)); 7542 } 7543 else 7544 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len)); 7545 return MIN (total_bytes - off, len); 7546} 7547 7548 7549/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, 7550 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the 7551 buffer PTR of length LEN bytes. If OFF is not -1 then start 7552 the encoding at byte offset OFF and encode at most LEN bytes. 7553 Return the number of bytes placed in the buffer, or zero upon failure. */ 7554 7555int 7556native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off) 7557{ 7558 /* We don't support starting at negative offset and -1 is special. */ 7559 if (off < -1) 7560 return 0; 7561 7562 switch (TREE_CODE (expr)) 7563 { 7564 case INTEGER_CST: 7565 return native_encode_int (expr, ptr, len, off); 7566 7567 case REAL_CST: 7568 return native_encode_real (expr, ptr, len, off); 7569 7570 case FIXED_CST: 7571 return native_encode_fixed (expr, ptr, len, off); 7572 7573 case COMPLEX_CST: 7574 return native_encode_complex (expr, ptr, len, off); 7575 7576 case VECTOR_CST: 7577 return native_encode_vector (expr, ptr, len, off); 7578 7579 case STRING_CST: 7580 return native_encode_string (expr, ptr, len, off); 7581 7582 default: 7583 return 0; 7584 } 7585} 7586 7587 7588/* Subroutine of native_interpret_expr. Interpret the contents of 7589 the buffer PTR of length LEN as an INTEGER_CST of type TYPE. 7590 If the buffer cannot be interpreted, return NULL_TREE. */ 7591 7592static tree 7593native_interpret_int (tree type, const unsigned char *ptr, int len) 7594{ 7595 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7596 7597 if (total_bytes > len 7598 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT) 7599 return NULL_TREE; 7600 7601 wide_int result = wi::from_buffer (ptr, total_bytes); 7602 7603 return wide_int_to_tree (type, result); 7604} 7605 7606 7607/* Subroutine of native_interpret_expr. Interpret the contents of 7608 the buffer PTR of length LEN as a FIXED_CST of type TYPE. 7609 If the buffer cannot be interpreted, return NULL_TREE. */ 7610 7611static tree 7612native_interpret_fixed (tree type, const unsigned char *ptr, int len) 7613{ 7614 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7615 double_int result; 7616 FIXED_VALUE_TYPE fixed_value; 7617 7618 if (total_bytes > len 7619 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT) 7620 return NULL_TREE; 7621 7622 result = double_int::from_buffer (ptr, total_bytes); 7623 fixed_value = fixed_from_double_int (result, TYPE_MODE (type)); 7624 7625 return build_fixed (type, fixed_value); 7626} 7627 7628 7629/* Subroutine of native_interpret_expr. Interpret the contents of 7630 the buffer PTR of length LEN as a REAL_CST of type TYPE. 7631 If the buffer cannot be interpreted, return NULL_TREE. */ 7632 7633static tree 7634native_interpret_real (tree type, const unsigned char *ptr, int len) 7635{ 7636 machine_mode mode = TYPE_MODE (type); 7637 int total_bytes = GET_MODE_SIZE (mode); 7638 int byte, offset, word, words, bitpos; 7639 unsigned char value; 7640 /* There are always 32 bits in each long, no matter the size of 7641 the hosts long. We handle floating point representations with 7642 up to 192 bits. */ 7643 REAL_VALUE_TYPE r; 7644 long tmp[6]; 7645 7646 total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7647 if (total_bytes > len || total_bytes > 24) 7648 return NULL_TREE; 7649 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD; 7650 7651 memset (tmp, 0, sizeof (tmp)); 7652 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT; 7653 bitpos += BITS_PER_UNIT) 7654 { 7655 byte = (bitpos / BITS_PER_UNIT) & 3; 7656 if (UNITS_PER_WORD < 4) 7657 { 7658 word = byte / UNITS_PER_WORD; 7659 if (WORDS_BIG_ENDIAN) 7660 word = (words - 1) - word; 7661 offset = word * UNITS_PER_WORD; 7662 if (BYTES_BIG_ENDIAN) 7663 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7664 else 7665 offset += byte % UNITS_PER_WORD; 7666 } 7667 else 7668 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte; 7669 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)]; 7670 7671 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31); 7672 } 7673 7674 real_from_target (&r, tmp, mode); 7675 return build_real (type, r); 7676} 7677 7678 7679/* Subroutine of native_interpret_expr. Interpret the contents of 7680 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE. 7681 If the buffer cannot be interpreted, return NULL_TREE. */ 7682 7683static tree 7684native_interpret_complex (tree type, const unsigned char *ptr, int len) 7685{ 7686 tree etype, rpart, ipart; 7687 int size; 7688 7689 etype = TREE_TYPE (type); 7690 size = GET_MODE_SIZE (TYPE_MODE (etype)); 7691 if (size * 2 > len) 7692 return NULL_TREE; 7693 rpart = native_interpret_expr (etype, ptr, size); 7694 if (!rpart) 7695 return NULL_TREE; 7696 ipart = native_interpret_expr (etype, ptr+size, size); 7697 if (!ipart) 7698 return NULL_TREE; 7699 return build_complex (type, rpart, ipart); 7700} 7701 7702 7703/* Subroutine of native_interpret_expr. Interpret the contents of 7704 the buffer PTR of length LEN as a VECTOR_CST of type TYPE. 7705 If the buffer cannot be interpreted, return NULL_TREE. */ 7706 7707static tree 7708native_interpret_vector (tree type, const unsigned char *ptr, int len) 7709{ 7710 tree etype, elem; 7711 int i, size, count; 7712 tree *elements; 7713 7714 etype = TREE_TYPE (type); 7715 size = GET_MODE_SIZE (TYPE_MODE (etype)); 7716 count = TYPE_VECTOR_SUBPARTS (type); 7717 if (size * count > len) 7718 return NULL_TREE; 7719 7720 elements = XALLOCAVEC (tree, count); 7721 for (i = count - 1; i >= 0; i--) 7722 { 7723 elem = native_interpret_expr (etype, ptr+(i*size), size); 7724 if (!elem) 7725 return NULL_TREE; 7726 elements[i] = elem; 7727 } 7728 return build_vector (type, elements); 7729} 7730 7731 7732/* Subroutine of fold_view_convert_expr. Interpret the contents of 7733 the buffer PTR of length LEN as a constant of type TYPE. For 7734 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P 7735 we return a REAL_CST, etc... If the buffer cannot be interpreted, 7736 return NULL_TREE. */ 7737 7738tree 7739native_interpret_expr (tree type, const unsigned char *ptr, int len) 7740{ 7741 switch (TREE_CODE (type)) 7742 { 7743 case INTEGER_TYPE: 7744 case ENUMERAL_TYPE: 7745 case BOOLEAN_TYPE: 7746 case POINTER_TYPE: 7747 case REFERENCE_TYPE: 7748 return native_interpret_int (type, ptr, len); 7749 7750 case REAL_TYPE: 7751 return native_interpret_real (type, ptr, len); 7752 7753 case FIXED_POINT_TYPE: 7754 return native_interpret_fixed (type, ptr, len); 7755 7756 case COMPLEX_TYPE: 7757 return native_interpret_complex (type, ptr, len); 7758 7759 case VECTOR_TYPE: 7760 return native_interpret_vector (type, ptr, len); 7761 7762 default: 7763 return NULL_TREE; 7764 } 7765} 7766 7767/* Returns true if we can interpret the contents of a native encoding 7768 as TYPE. */ 7769 7770static bool 7771can_native_interpret_type_p (tree type) 7772{ 7773 switch (TREE_CODE (type)) 7774 { 7775 case INTEGER_TYPE: 7776 case ENUMERAL_TYPE: 7777 case BOOLEAN_TYPE: 7778 case POINTER_TYPE: 7779 case REFERENCE_TYPE: 7780 case FIXED_POINT_TYPE: 7781 case REAL_TYPE: 7782 case COMPLEX_TYPE: 7783 case VECTOR_TYPE: 7784 return true; 7785 default: 7786 return false; 7787 } 7788} 7789 7790/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type 7791 TYPE at compile-time. If we're unable to perform the conversion 7792 return NULL_TREE. */ 7793 7794static tree 7795fold_view_convert_expr (tree type, tree expr) 7796{ 7797 /* We support up to 512-bit values (for V8DFmode). */ 7798 unsigned char buffer[64]; 7799 int len; 7800 7801 /* Check that the host and target are sane. */ 7802 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8) 7803 return NULL_TREE; 7804 7805 len = native_encode_expr (expr, buffer, sizeof (buffer)); 7806 if (len == 0) 7807 return NULL_TREE; 7808 7809 return native_interpret_expr (type, buffer, len); 7810} 7811 7812/* Build an expression for the address of T. Folds away INDIRECT_REF 7813 to avoid confusing the gimplify process. */ 7814 7815tree 7816build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype) 7817{ 7818 /* The size of the object is not relevant when talking about its address. */ 7819 if (TREE_CODE (t) == WITH_SIZE_EXPR) 7820 t = TREE_OPERAND (t, 0); 7821 7822 if (TREE_CODE (t) == INDIRECT_REF) 7823 { 7824 t = TREE_OPERAND (t, 0); 7825 7826 if (TREE_TYPE (t) != ptrtype) 7827 t = build1_loc (loc, NOP_EXPR, ptrtype, t); 7828 } 7829 else if (TREE_CODE (t) == MEM_REF 7830 && integer_zerop (TREE_OPERAND (t, 1))) 7831 return TREE_OPERAND (t, 0); 7832 else if (TREE_CODE (t) == MEM_REF 7833 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST) 7834 return fold_binary (POINTER_PLUS_EXPR, ptrtype, 7835 TREE_OPERAND (t, 0), 7836 convert_to_ptrofftype (TREE_OPERAND (t, 1))); 7837 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR) 7838 { 7839 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0)); 7840 7841 if (TREE_TYPE (t) != ptrtype) 7842 t = fold_convert_loc (loc, ptrtype, t); 7843 } 7844 else 7845 t = build1_loc (loc, ADDR_EXPR, ptrtype, t); 7846 7847 return t; 7848} 7849 7850/* Build an expression for the address of T. */ 7851 7852tree 7853build_fold_addr_expr_loc (location_t loc, tree t) 7854{ 7855 tree ptrtype = build_pointer_type (TREE_TYPE (t)); 7856 7857 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype); 7858} 7859 7860/* Fold a unary expression of code CODE and type TYPE with operand 7861 OP0. Return the folded expression if folding is successful. 7862 Otherwise, return NULL_TREE. */ 7863 7864tree 7865fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) 7866{ 7867 tree tem; 7868 tree arg0; 7869 enum tree_code_class kind = TREE_CODE_CLASS (code); 7870 7871 gcc_assert (IS_EXPR_CODE_CLASS (kind) 7872 && TREE_CODE_LENGTH (code) == 1); 7873 7874 arg0 = op0; 7875 if (arg0) 7876 { 7877 if (CONVERT_EXPR_CODE_P (code) 7878 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR) 7879 { 7880 /* Don't use STRIP_NOPS, because signedness of argument type 7881 matters. */ 7882 STRIP_SIGN_NOPS (arg0); 7883 } 7884 else 7885 { 7886 /* Strip any conversions that don't change the mode. This 7887 is safe for every expression, except for a comparison 7888 expression because its signedness is derived from its 7889 operands. 7890 7891 Note that this is done as an internal manipulation within 7892 the constant folder, in order to find the simplest 7893 representation of the arguments so that their form can be 7894 studied. In any cases, the appropriate type conversions 7895 should be put back in the tree that will get out of the 7896 constant folder. */ 7897 STRIP_NOPS (arg0); 7898 } 7899 7900 if (CONSTANT_CLASS_P (arg0)) 7901 { 7902 tree tem = const_unop (code, type, arg0); 7903 if (tem) 7904 { 7905 if (TREE_TYPE (tem) != type) 7906 tem = fold_convert_loc (loc, type, tem); 7907 return tem; 7908 } 7909 } 7910 } 7911 7912 tem = generic_simplify (loc, code, type, op0); 7913 if (tem) 7914 return tem; 7915 7916 if (TREE_CODE_CLASS (code) == tcc_unary) 7917 { 7918 if (TREE_CODE (arg0) == COMPOUND_EXPR) 7919 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), 7920 fold_build1_loc (loc, code, type, 7921 fold_convert_loc (loc, TREE_TYPE (op0), 7922 TREE_OPERAND (arg0, 1)))); 7923 else if (TREE_CODE (arg0) == COND_EXPR) 7924 { 7925 tree arg01 = TREE_OPERAND (arg0, 1); 7926 tree arg02 = TREE_OPERAND (arg0, 2); 7927 if (! VOID_TYPE_P (TREE_TYPE (arg01))) 7928 arg01 = fold_build1_loc (loc, code, type, 7929 fold_convert_loc (loc, 7930 TREE_TYPE (op0), arg01)); 7931 if (! VOID_TYPE_P (TREE_TYPE (arg02))) 7932 arg02 = fold_build1_loc (loc, code, type, 7933 fold_convert_loc (loc, 7934 TREE_TYPE (op0), arg02)); 7935 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0), 7936 arg01, arg02); 7937 7938 /* If this was a conversion, and all we did was to move into 7939 inside the COND_EXPR, bring it back out. But leave it if 7940 it is a conversion from integer to integer and the 7941 result precision is no wider than a word since such a 7942 conversion is cheap and may be optimized away by combine, 7943 while it couldn't if it were outside the COND_EXPR. Then return 7944 so we don't get into an infinite recursion loop taking the 7945 conversion out and then back in. */ 7946 7947 if ((CONVERT_EXPR_CODE_P (code) 7948 || code == NON_LVALUE_EXPR) 7949 && TREE_CODE (tem) == COND_EXPR 7950 && TREE_CODE (TREE_OPERAND (tem, 1)) == code 7951 && TREE_CODE (TREE_OPERAND (tem, 2)) == code 7952 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1)) 7953 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2)) 7954 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)) 7955 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0))) 7956 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem)) 7957 && (INTEGRAL_TYPE_P 7958 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)))) 7959 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD) 7960 || flag_syntax_only)) 7961 tem = build1_loc (loc, code, type, 7962 build3 (COND_EXPR, 7963 TREE_TYPE (TREE_OPERAND 7964 (TREE_OPERAND (tem, 1), 0)), 7965 TREE_OPERAND (tem, 0), 7966 TREE_OPERAND (TREE_OPERAND (tem, 1), 0), 7967 TREE_OPERAND (TREE_OPERAND (tem, 2), 7968 0))); 7969 return tem; 7970 } 7971 } 7972 7973 switch (code) 7974 { 7975 case NON_LVALUE_EXPR: 7976 if (!maybe_lvalue_p (op0)) 7977 return fold_convert_loc (loc, type, op0); 7978 return NULL_TREE; 7979 7980 CASE_CONVERT: 7981 case FLOAT_EXPR: 7982 case FIX_TRUNC_EXPR: 7983 if (COMPARISON_CLASS_P (op0)) 7984 { 7985 /* If we have (type) (a CMP b) and type is an integral type, return 7986 new expression involving the new type. Canonicalize 7987 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for 7988 non-integral type. 7989 Do not fold the result as that would not simplify further, also 7990 folding again results in recursions. */ 7991 if (TREE_CODE (type) == BOOLEAN_TYPE) 7992 return build2_loc (loc, TREE_CODE (op0), type, 7993 TREE_OPERAND (op0, 0), 7994 TREE_OPERAND (op0, 1)); 7995 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type) 7996 && TREE_CODE (type) != VECTOR_TYPE) 7997 return build3_loc (loc, COND_EXPR, type, op0, 7998 constant_boolean_node (true, type), 7999 constant_boolean_node (false, type)); 8000 } 8001 8002 /* Handle (T *)&A.B.C for A being of type T and B and C 8003 living at offset zero. This occurs frequently in 8004 C++ upcasting and then accessing the base. */ 8005 if (TREE_CODE (op0) == ADDR_EXPR 8006 && POINTER_TYPE_P (type) 8007 && handled_component_p (TREE_OPERAND (op0, 0))) 8008 { 8009 HOST_WIDE_INT bitsize, bitpos; 8010 tree offset; 8011 machine_mode mode; 8012 int unsignedp, volatilep; 8013 tree base = TREE_OPERAND (op0, 0); 8014 base = get_inner_reference (base, &bitsize, &bitpos, &offset, 8015 &mode, &unsignedp, &volatilep, false); 8016 /* If the reference was to a (constant) zero offset, we can use 8017 the address of the base if it has the same base type 8018 as the result type and the pointer type is unqualified. */ 8019 if (! offset && bitpos == 0 8020 && (TYPE_MAIN_VARIANT (TREE_TYPE (type)) 8021 == TYPE_MAIN_VARIANT (TREE_TYPE (base))) 8022 && TYPE_QUALS (type) == TYPE_UNQUALIFIED) 8023 return fold_convert_loc (loc, type, 8024 build_fold_addr_expr_loc (loc, base)); 8025 } 8026 8027 if (TREE_CODE (op0) == MODIFY_EXPR 8028 && TREE_CONSTANT (TREE_OPERAND (op0, 1)) 8029 /* Detect assigning a bitfield. */ 8030 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF 8031 && DECL_BIT_FIELD 8032 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1)))) 8033 { 8034 /* Don't leave an assignment inside a conversion 8035 unless assigning a bitfield. */ 8036 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1)); 8037 /* First do the assignment, then return converted constant. */ 8038 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem); 8039 TREE_NO_WARNING (tem) = 1; 8040 TREE_USED (tem) = 1; 8041 return tem; 8042 } 8043 8044 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer 8045 constants (if x has signed type, the sign bit cannot be set 8046 in c). This folds extension into the BIT_AND_EXPR. 8047 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they 8048 very likely don't have maximal range for their precision and this 8049 transformation effectively doesn't preserve non-maximal ranges. */ 8050 if (TREE_CODE (type) == INTEGER_TYPE 8051 && TREE_CODE (op0) == BIT_AND_EXPR 8052 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST) 8053 { 8054 tree and_expr = op0; 8055 tree and0 = TREE_OPERAND (and_expr, 0); 8056 tree and1 = TREE_OPERAND (and_expr, 1); 8057 int change = 0; 8058 8059 if (TYPE_UNSIGNED (TREE_TYPE (and_expr)) 8060 || (TYPE_PRECISION (type) 8061 <= TYPE_PRECISION (TREE_TYPE (and_expr)))) 8062 change = 1; 8063 else if (TYPE_PRECISION (TREE_TYPE (and1)) 8064 <= HOST_BITS_PER_WIDE_INT 8065 && tree_fits_uhwi_p (and1)) 8066 { 8067 unsigned HOST_WIDE_INT cst; 8068 8069 cst = tree_to_uhwi (and1); 8070 cst &= HOST_WIDE_INT_M1U 8071 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1); 8072 change = (cst == 0); 8073#ifdef LOAD_EXTEND_OP 8074 if (change 8075 && !flag_syntax_only 8076 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0))) 8077 == ZERO_EXTEND)) 8078 { 8079 tree uns = unsigned_type_for (TREE_TYPE (and0)); 8080 and0 = fold_convert_loc (loc, uns, and0); 8081 and1 = fold_convert_loc (loc, uns, and1); 8082 } 8083#endif 8084 } 8085 if (change) 8086 { 8087 tem = force_fit_type (type, wi::to_widest (and1), 0, 8088 TREE_OVERFLOW (and1)); 8089 return fold_build2_loc (loc, BIT_AND_EXPR, type, 8090 fold_convert_loc (loc, type, and0), tem); 8091 } 8092 } 8093 8094 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, 8095 when one of the new casts will fold away. Conservatively we assume 8096 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */ 8097 if (POINTER_TYPE_P (type) 8098 && TREE_CODE (arg0) == POINTER_PLUS_EXPR 8099 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0))) 8100 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 8101 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR 8102 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR)) 8103 { 8104 tree arg00 = TREE_OPERAND (arg0, 0); 8105 tree arg01 = TREE_OPERAND (arg0, 1); 8106 8107 return fold_build_pointer_plus_loc 8108 (loc, fold_convert_loc (loc, type, arg00), arg01); 8109 } 8110 8111 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types 8112 of the same precision, and X is an integer type not narrower than 8113 types T1 or T2, i.e. the cast (T2)X isn't an extension. */ 8114 if (INTEGRAL_TYPE_P (type) 8115 && TREE_CODE (op0) == BIT_NOT_EXPR 8116 && INTEGRAL_TYPE_P (TREE_TYPE (op0)) 8117 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0)) 8118 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))) 8119 { 8120 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0); 8121 if (INTEGRAL_TYPE_P (TREE_TYPE (tem)) 8122 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem))) 8123 return fold_build1_loc (loc, BIT_NOT_EXPR, type, 8124 fold_convert_loc (loc, type, tem)); 8125 } 8126 8127 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the 8128 type of X and Y (integer types only). */ 8129 if (INTEGRAL_TYPE_P (type) 8130 && TREE_CODE (op0) == MULT_EXPR 8131 && INTEGRAL_TYPE_P (TREE_TYPE (op0)) 8132 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))) 8133 { 8134 /* Be careful not to introduce new overflows. */ 8135 tree mult_type; 8136 if (TYPE_OVERFLOW_WRAPS (type)) 8137 mult_type = type; 8138 else 8139 mult_type = unsigned_type_for (type); 8140 8141 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0))) 8142 { 8143 tem = fold_build2_loc (loc, MULT_EXPR, mult_type, 8144 fold_convert_loc (loc, mult_type, 8145 TREE_OPERAND (op0, 0)), 8146 fold_convert_loc (loc, mult_type, 8147 TREE_OPERAND (op0, 1))); 8148 return fold_convert_loc (loc, type, tem); 8149 } 8150 } 8151 8152 return NULL_TREE; 8153 8154 case VIEW_CONVERT_EXPR: 8155 if (TREE_CODE (op0) == MEM_REF) 8156 return fold_build2_loc (loc, MEM_REF, type, 8157 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1)); 8158 8159 return NULL_TREE; 8160 8161 case NEGATE_EXPR: 8162 tem = fold_negate_expr (loc, arg0); 8163 if (tem) 8164 return fold_convert_loc (loc, type, tem); 8165 return NULL_TREE; 8166 8167 case ABS_EXPR: 8168 /* Convert fabs((double)float) into (double)fabsf(float). */ 8169 if (TREE_CODE (arg0) == NOP_EXPR 8170 && TREE_CODE (type) == REAL_TYPE) 8171 { 8172 tree targ0 = strip_float_extensions (arg0); 8173 if (targ0 != arg0) 8174 return fold_convert_loc (loc, type, 8175 fold_build1_loc (loc, ABS_EXPR, 8176 TREE_TYPE (targ0), 8177 targ0)); 8178 } 8179 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */ 8180 else if (TREE_CODE (arg0) == ABS_EXPR) 8181 return arg0; 8182 8183 /* Strip sign ops from argument. */ 8184 if (TREE_CODE (type) == REAL_TYPE) 8185 { 8186 tem = fold_strip_sign_ops (arg0); 8187 if (tem) 8188 return fold_build1_loc (loc, ABS_EXPR, type, 8189 fold_convert_loc (loc, type, tem)); 8190 } 8191 return NULL_TREE; 8192 8193 case CONJ_EXPR: 8194 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 8195 return fold_convert_loc (loc, type, arg0); 8196 if (TREE_CODE (arg0) == COMPLEX_EXPR) 8197 { 8198 tree itype = TREE_TYPE (type); 8199 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0)); 8200 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1)); 8201 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, 8202 negate_expr (ipart)); 8203 } 8204 if (TREE_CODE (arg0) == CONJ_EXPR) 8205 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 8206 return NULL_TREE; 8207 8208 case BIT_NOT_EXPR: 8209 /* Convert ~ (-A) to A - 1. */ 8210 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR) 8211 return fold_build2_loc (loc, MINUS_EXPR, type, 8212 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)), 8213 build_int_cst (type, 1)); 8214 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */ 8215 else if (INTEGRAL_TYPE_P (type) 8216 && ((TREE_CODE (arg0) == MINUS_EXPR 8217 && integer_onep (TREE_OPERAND (arg0, 1))) 8218 || (TREE_CODE (arg0) == PLUS_EXPR 8219 && integer_all_onesp (TREE_OPERAND (arg0, 1))))) 8220 { 8221 /* Perform the negation in ARG0's type and only then convert 8222 to TYPE as to avoid introducing undefined behavior. */ 8223 tree t = fold_build1_loc (loc, NEGATE_EXPR, 8224 TREE_TYPE (TREE_OPERAND (arg0, 0)), 8225 TREE_OPERAND (arg0, 0)); 8226 return fold_convert_loc (loc, type, t); 8227 } 8228 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */ 8229 else if (TREE_CODE (arg0) == BIT_XOR_EXPR 8230 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type, 8231 fold_convert_loc (loc, type, 8232 TREE_OPERAND (arg0, 0))))) 8233 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem, 8234 fold_convert_loc (loc, type, 8235 TREE_OPERAND (arg0, 1))); 8236 else if (TREE_CODE (arg0) == BIT_XOR_EXPR 8237 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type, 8238 fold_convert_loc (loc, type, 8239 TREE_OPERAND (arg0, 1))))) 8240 return fold_build2_loc (loc, BIT_XOR_EXPR, type, 8241 fold_convert_loc (loc, type, 8242 TREE_OPERAND (arg0, 0)), tem); 8243 8244 return NULL_TREE; 8245 8246 case TRUTH_NOT_EXPR: 8247 /* Note that the operand of this must be an int 8248 and its values must be 0 or 1. 8249 ("true" is a fixed value perhaps depending on the language, 8250 but we don't handle values other than 1 correctly yet.) */ 8251 tem = fold_truth_not_expr (loc, arg0); 8252 if (!tem) 8253 return NULL_TREE; 8254 return fold_convert_loc (loc, type, tem); 8255 8256 case REALPART_EXPR: 8257 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 8258 return fold_convert_loc (loc, type, arg0); 8259 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 8260 { 8261 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8262 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype, 8263 fold_build1_loc (loc, REALPART_EXPR, itype, 8264 TREE_OPERAND (arg0, 0)), 8265 fold_build1_loc (loc, REALPART_EXPR, itype, 8266 TREE_OPERAND (arg0, 1))); 8267 return fold_convert_loc (loc, type, tem); 8268 } 8269 if (TREE_CODE (arg0) == CONJ_EXPR) 8270 { 8271 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8272 tem = fold_build1_loc (loc, REALPART_EXPR, itype, 8273 TREE_OPERAND (arg0, 0)); 8274 return fold_convert_loc (loc, type, tem); 8275 } 8276 if (TREE_CODE (arg0) == CALL_EXPR) 8277 { 8278 tree fn = get_callee_fndecl (arg0); 8279 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL) 8280 switch (DECL_FUNCTION_CODE (fn)) 8281 { 8282 CASE_FLT_FN (BUILT_IN_CEXPI): 8283 fn = mathfn_built_in (type, BUILT_IN_COS); 8284 if (fn) 8285 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0)); 8286 break; 8287 8288 default: 8289 break; 8290 } 8291 } 8292 return NULL_TREE; 8293 8294 case IMAGPART_EXPR: 8295 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 8296 return build_zero_cst (type); 8297 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 8298 { 8299 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8300 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype, 8301 fold_build1_loc (loc, IMAGPART_EXPR, itype, 8302 TREE_OPERAND (arg0, 0)), 8303 fold_build1_loc (loc, IMAGPART_EXPR, itype, 8304 TREE_OPERAND (arg0, 1))); 8305 return fold_convert_loc (loc, type, tem); 8306 } 8307 if (TREE_CODE (arg0) == CONJ_EXPR) 8308 { 8309 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8310 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0)); 8311 return fold_convert_loc (loc, type, negate_expr (tem)); 8312 } 8313 if (TREE_CODE (arg0) == CALL_EXPR) 8314 { 8315 tree fn = get_callee_fndecl (arg0); 8316 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL) 8317 switch (DECL_FUNCTION_CODE (fn)) 8318 { 8319 CASE_FLT_FN (BUILT_IN_CEXPI): 8320 fn = mathfn_built_in (type, BUILT_IN_SIN); 8321 if (fn) 8322 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0)); 8323 break; 8324 8325 default: 8326 break; 8327 } 8328 } 8329 return NULL_TREE; 8330 8331 case INDIRECT_REF: 8332 /* Fold *&X to X if X is an lvalue. */ 8333 if (TREE_CODE (op0) == ADDR_EXPR) 8334 { 8335 tree op00 = TREE_OPERAND (op0, 0); 8336 if ((TREE_CODE (op00) == VAR_DECL 8337 || TREE_CODE (op00) == PARM_DECL 8338 || TREE_CODE (op00) == RESULT_DECL) 8339 && !TREE_READONLY (op00)) 8340 return op00; 8341 } 8342 return NULL_TREE; 8343 8344 default: 8345 return NULL_TREE; 8346 } /* switch (code) */ 8347} 8348 8349 8350/* If the operation was a conversion do _not_ mark a resulting constant 8351 with TREE_OVERFLOW if the original constant was not. These conversions 8352 have implementation defined behavior and retaining the TREE_OVERFLOW 8353 flag here would confuse later passes such as VRP. */ 8354tree 8355fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code, 8356 tree type, tree op0) 8357{ 8358 tree res = fold_unary_loc (loc, code, type, op0); 8359 if (res 8360 && TREE_CODE (res) == INTEGER_CST 8361 && TREE_CODE (op0) == INTEGER_CST 8362 && CONVERT_EXPR_CODE_P (code)) 8363 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0); 8364 8365 return res; 8366} 8367 8368/* Fold a binary bitwise/truth expression of code CODE and type TYPE with 8369 operands OP0 and OP1. LOC is the location of the resulting expression. 8370 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1. 8371 Return the folded expression if folding is successful. Otherwise, 8372 return NULL_TREE. */ 8373static tree 8374fold_truth_andor (location_t loc, enum tree_code code, tree type, 8375 tree arg0, tree arg1, tree op0, tree op1) 8376{ 8377 tree tem; 8378 8379 /* We only do these simplifications if we are optimizing. */ 8380 if (!optimize) 8381 return NULL_TREE; 8382 8383 /* Check for things like (A || B) && (A || C). We can convert this 8384 to A || (B && C). Note that either operator can be any of the four 8385 truth and/or operations and the transformation will still be 8386 valid. Also note that we only care about order for the 8387 ANDIF and ORIF operators. If B contains side effects, this 8388 might change the truth-value of A. */ 8389 if (TREE_CODE (arg0) == TREE_CODE (arg1) 8390 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR 8391 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR 8392 || TREE_CODE (arg0) == TRUTH_AND_EXPR 8393 || TREE_CODE (arg0) == TRUTH_OR_EXPR) 8394 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1))) 8395 { 8396 tree a00 = TREE_OPERAND (arg0, 0); 8397 tree a01 = TREE_OPERAND (arg0, 1); 8398 tree a10 = TREE_OPERAND (arg1, 0); 8399 tree a11 = TREE_OPERAND (arg1, 1); 8400 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR 8401 || TREE_CODE (arg0) == TRUTH_AND_EXPR) 8402 && (code == TRUTH_AND_EXPR 8403 || code == TRUTH_OR_EXPR)); 8404 8405 if (operand_equal_p (a00, a10, 0)) 8406 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00, 8407 fold_build2_loc (loc, code, type, a01, a11)); 8408 else if (commutative && operand_equal_p (a00, a11, 0)) 8409 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00, 8410 fold_build2_loc (loc, code, type, a01, a10)); 8411 else if (commutative && operand_equal_p (a01, a10, 0)) 8412 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01, 8413 fold_build2_loc (loc, code, type, a00, a11)); 8414 8415 /* This case if tricky because we must either have commutative 8416 operators or else A10 must not have side-effects. */ 8417 8418 else if ((commutative || ! TREE_SIDE_EFFECTS (a10)) 8419 && operand_equal_p (a01, a11, 0)) 8420 return fold_build2_loc (loc, TREE_CODE (arg0), type, 8421 fold_build2_loc (loc, code, type, a00, a10), 8422 a01); 8423 } 8424 8425 /* See if we can build a range comparison. */ 8426 if (0 != (tem = fold_range_test (loc, code, type, op0, op1))) 8427 return tem; 8428 8429 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR) 8430 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR)) 8431 { 8432 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true); 8433 if (tem) 8434 return fold_build2_loc (loc, code, type, tem, arg1); 8435 } 8436 8437 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR) 8438 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR)) 8439 { 8440 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false); 8441 if (tem) 8442 return fold_build2_loc (loc, code, type, arg0, tem); 8443 } 8444 8445 /* Check for the possibility of merging component references. If our 8446 lhs is another similar operation, try to merge its rhs with our 8447 rhs. Then try to merge our lhs and rhs. */ 8448 if (TREE_CODE (arg0) == code 8449 && 0 != (tem = fold_truth_andor_1 (loc, code, type, 8450 TREE_OPERAND (arg0, 1), arg1))) 8451 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 8452 8453 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0) 8454 return tem; 8455 8456 if (LOGICAL_OP_NON_SHORT_CIRCUIT 8457 && (code == TRUTH_AND_EXPR 8458 || code == TRUTH_ANDIF_EXPR 8459 || code == TRUTH_OR_EXPR 8460 || code == TRUTH_ORIF_EXPR)) 8461 { 8462 enum tree_code ncode, icode; 8463 8464 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR) 8465 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR; 8466 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR; 8467 8468 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)), 8469 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C)) 8470 We don't want to pack more than two leafs to a non-IF AND/OR 8471 expression. 8472 If tree-code of left-hand operand isn't an AND/OR-IF code and not 8473 equal to IF-CODE, then we don't want to add right-hand operand. 8474 If the inner right-hand side of left-hand operand has 8475 side-effects, or isn't simple, then we can't add to it, 8476 as otherwise we might destroy if-sequence. */ 8477 if (TREE_CODE (arg0) == icode 8478 && simple_operand_p_2 (arg1) 8479 /* Needed for sequence points to handle trappings, and 8480 side-effects. */ 8481 && simple_operand_p_2 (TREE_OPERAND (arg0, 1))) 8482 { 8483 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1), 8484 arg1); 8485 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0), 8486 tem); 8487 } 8488 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C), 8489 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */ 8490 else if (TREE_CODE (arg1) == icode 8491 && simple_operand_p_2 (arg0) 8492 /* Needed for sequence points to handle trappings, and 8493 side-effects. */ 8494 && simple_operand_p_2 (TREE_OPERAND (arg1, 0))) 8495 { 8496 tem = fold_build2_loc (loc, ncode, type, 8497 arg0, TREE_OPERAND (arg1, 0)); 8498 return fold_build2_loc (loc, icode, type, tem, 8499 TREE_OPERAND (arg1, 1)); 8500 } 8501 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B) 8502 into (A OR B). 8503 For sequence point consistancy, we need to check for trapping, 8504 and side-effects. */ 8505 else if (code == icode && simple_operand_p_2 (arg0) 8506 && simple_operand_p_2 (arg1)) 8507 return fold_build2_loc (loc, ncode, type, arg0, arg1); 8508 } 8509 8510 return NULL_TREE; 8511} 8512 8513/* Fold a binary expression of code CODE and type TYPE with operands 8514 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination. 8515 Return the folded expression if folding is successful. Otherwise, 8516 return NULL_TREE. */ 8517 8518static tree 8519fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1) 8520{ 8521 enum tree_code compl_code; 8522 8523 if (code == MIN_EXPR) 8524 compl_code = MAX_EXPR; 8525 else if (code == MAX_EXPR) 8526 compl_code = MIN_EXPR; 8527 else 8528 gcc_unreachable (); 8529 8530 /* MIN (MAX (a, b), b) == b. */ 8531 if (TREE_CODE (op0) == compl_code 8532 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0)) 8533 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0)); 8534 8535 /* MIN (MAX (b, a), b) == b. */ 8536 if (TREE_CODE (op0) == compl_code 8537 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0) 8538 && reorder_operands_p (TREE_OPERAND (op0, 1), op1)) 8539 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1)); 8540 8541 /* MIN (a, MAX (a, b)) == a. */ 8542 if (TREE_CODE (op1) == compl_code 8543 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0) 8544 && reorder_operands_p (op0, TREE_OPERAND (op1, 1))) 8545 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1)); 8546 8547 /* MIN (a, MAX (b, a)) == a. */ 8548 if (TREE_CODE (op1) == compl_code 8549 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0) 8550 && reorder_operands_p (op0, TREE_OPERAND (op1, 0))) 8551 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0)); 8552 8553 return NULL_TREE; 8554} 8555 8556/* Helper that tries to canonicalize the comparison ARG0 CODE ARG1 8557 by changing CODE to reduce the magnitude of constants involved in 8558 ARG0 of the comparison. 8559 Returns a canonicalized comparison tree if a simplification was 8560 possible, otherwise returns NULL_TREE. 8561 Set *STRICT_OVERFLOW_P to true if the canonicalization is only 8562 valid if signed overflow is undefined. */ 8563 8564static tree 8565maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type, 8566 tree arg0, tree arg1, 8567 bool *strict_overflow_p) 8568{ 8569 enum tree_code code0 = TREE_CODE (arg0); 8570 tree t, cst0 = NULL_TREE; 8571 int sgn0; 8572 bool swap = false; 8573 8574 /* Match A +- CST code arg1 and CST code arg1. We can change the 8575 first form only if overflow is undefined. */ 8576 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 8577 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))) 8578 /* In principle pointers also have undefined overflow behavior, 8579 but that causes problems elsewhere. */ 8580 && !POINTER_TYPE_P (TREE_TYPE (arg0)) 8581 && (code0 == MINUS_EXPR 8582 || code0 == PLUS_EXPR) 8583 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 8584 || code0 == INTEGER_CST)) 8585 return NULL_TREE; 8586 8587 /* Identify the constant in arg0 and its sign. */ 8588 if (code0 == INTEGER_CST) 8589 cst0 = arg0; 8590 else 8591 cst0 = TREE_OPERAND (arg0, 1); 8592 sgn0 = tree_int_cst_sgn (cst0); 8593 8594 /* Overflowed constants and zero will cause problems. */ 8595 if (integer_zerop (cst0) 8596 || TREE_OVERFLOW (cst0)) 8597 return NULL_TREE; 8598 8599 /* See if we can reduce the magnitude of the constant in 8600 arg0 by changing the comparison code. */ 8601 if (code0 == INTEGER_CST) 8602 { 8603 /* CST <= arg1 -> CST-1 < arg1. */ 8604 if (code == LE_EXPR && sgn0 == 1) 8605 code = LT_EXPR; 8606 /* -CST < arg1 -> -CST-1 <= arg1. */ 8607 else if (code == LT_EXPR && sgn0 == -1) 8608 code = LE_EXPR; 8609 /* CST > arg1 -> CST-1 >= arg1. */ 8610 else if (code == GT_EXPR && sgn0 == 1) 8611 code = GE_EXPR; 8612 /* -CST >= arg1 -> -CST-1 > arg1. */ 8613 else if (code == GE_EXPR && sgn0 == -1) 8614 code = GT_EXPR; 8615 else 8616 return NULL_TREE; 8617 /* arg1 code' CST' might be more canonical. */ 8618 swap = true; 8619 } 8620 else 8621 { 8622 /* A - CST < arg1 -> A - CST-1 <= arg1. */ 8623 if (code == LT_EXPR 8624 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR)) 8625 code = LE_EXPR; 8626 /* A + CST > arg1 -> A + CST-1 >= arg1. */ 8627 else if (code == GT_EXPR 8628 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR)) 8629 code = GE_EXPR; 8630 /* A + CST <= arg1 -> A + CST-1 < arg1. */ 8631 else if (code == LE_EXPR 8632 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR)) 8633 code = LT_EXPR; 8634 /* A - CST >= arg1 -> A - CST-1 > arg1. */ 8635 else if (code == GE_EXPR 8636 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR)) 8637 code = GT_EXPR; 8638 else 8639 return NULL_TREE; 8640 *strict_overflow_p = true; 8641 } 8642 8643 /* Now build the constant reduced in magnitude. But not if that 8644 would produce one outside of its types range. */ 8645 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0)) 8646 && ((sgn0 == 1 8647 && TYPE_MIN_VALUE (TREE_TYPE (cst0)) 8648 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0)))) 8649 || (sgn0 == -1 8650 && TYPE_MAX_VALUE (TREE_TYPE (cst0)) 8651 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0)))))) 8652 /* We cannot swap the comparison here as that would cause us to 8653 endlessly recurse. */ 8654 return NULL_TREE; 8655 8656 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR, 8657 cst0, build_int_cst (TREE_TYPE (cst0), 1)); 8658 if (code0 != INTEGER_CST) 8659 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t); 8660 t = fold_convert (TREE_TYPE (arg1), t); 8661 8662 /* If swapping might yield to a more canonical form, do so. */ 8663 if (swap) 8664 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t); 8665 else 8666 return fold_build2_loc (loc, code, type, t, arg1); 8667} 8668 8669/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined 8670 overflow further. Try to decrease the magnitude of constants involved 8671 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa 8672 and put sole constants at the second argument position. 8673 Returns the canonicalized tree if changed, otherwise NULL_TREE. */ 8674 8675static tree 8676maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type, 8677 tree arg0, tree arg1) 8678{ 8679 tree t; 8680 bool strict_overflow_p; 8681 const char * const warnmsg = G_("assuming signed overflow does not occur " 8682 "when reducing constant in comparison"); 8683 8684 /* Try canonicalization by simplifying arg0. */ 8685 strict_overflow_p = false; 8686 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1, 8687 &strict_overflow_p); 8688 if (t) 8689 { 8690 if (strict_overflow_p) 8691 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE); 8692 return t; 8693 } 8694 8695 /* Try canonicalization by simplifying arg1 using the swapped 8696 comparison. */ 8697 code = swap_tree_comparison (code); 8698 strict_overflow_p = false; 8699 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0, 8700 &strict_overflow_p); 8701 if (t && strict_overflow_p) 8702 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE); 8703 return t; 8704} 8705 8706/* Return whether BASE + OFFSET + BITPOS may wrap around the address 8707 space. This is used to avoid issuing overflow warnings for 8708 expressions like &p->x which can not wrap. */ 8709 8710static bool 8711pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos) 8712{ 8713 if (!POINTER_TYPE_P (TREE_TYPE (base))) 8714 return true; 8715 8716 if (bitpos < 0) 8717 return true; 8718 8719 wide_int wi_offset; 8720 int precision = TYPE_PRECISION (TREE_TYPE (base)); 8721 if (offset == NULL_TREE) 8722 wi_offset = wi::zero (precision); 8723 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset)) 8724 return true; 8725 else 8726 wi_offset = offset; 8727 8728 bool overflow; 8729 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision); 8730 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow); 8731 if (overflow) 8732 return true; 8733 8734 if (!wi::fits_uhwi_p (total)) 8735 return true; 8736 8737 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base))); 8738 if (size <= 0) 8739 return true; 8740 8741 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an 8742 array. */ 8743 if (TREE_CODE (base) == ADDR_EXPR) 8744 { 8745 HOST_WIDE_INT base_size; 8746 8747 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0))); 8748 if (base_size > 0 && size < base_size) 8749 size = base_size; 8750 } 8751 8752 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size; 8753} 8754 8755/* Subroutine of fold_binary. This routine performs all of the 8756 transformations that are common to the equality/inequality 8757 operators (EQ_EXPR and NE_EXPR) and the ordering operators 8758 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than 8759 fold_binary should call fold_binary. Fold a comparison with 8760 tree code CODE and type TYPE with operands OP0 and OP1. Return 8761 the folded comparison or NULL_TREE. */ 8762 8763static tree 8764fold_comparison (location_t loc, enum tree_code code, tree type, 8765 tree op0, tree op1) 8766{ 8767 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR); 8768 tree arg0, arg1, tem; 8769 8770 arg0 = op0; 8771 arg1 = op1; 8772 8773 STRIP_SIGN_NOPS (arg0); 8774 STRIP_SIGN_NOPS (arg1); 8775 8776 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */ 8777 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 8778 && (equality_code 8779 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 8780 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))) 8781 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 8782 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) 8783 && TREE_CODE (arg1) == INTEGER_CST 8784 && !TREE_OVERFLOW (arg1)) 8785 { 8786 const enum tree_code 8787 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR; 8788 tree const1 = TREE_OPERAND (arg0, 1); 8789 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1); 8790 tree variable = TREE_OPERAND (arg0, 0); 8791 tree new_const = int_const_binop (reverse_op, const2, const1); 8792 8793 /* If the constant operation overflowed this can be 8794 simplified as a comparison against INT_MAX/INT_MIN. */ 8795 if (TREE_OVERFLOW (new_const) 8796 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))) 8797 { 8798 int const1_sgn = tree_int_cst_sgn (const1); 8799 enum tree_code code2 = code; 8800 8801 /* Get the sign of the constant on the lhs if the 8802 operation were VARIABLE + CONST1. */ 8803 if (TREE_CODE (arg0) == MINUS_EXPR) 8804 const1_sgn = -const1_sgn; 8805 8806 /* The sign of the constant determines if we overflowed 8807 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1). 8808 Canonicalize to the INT_MIN overflow by swapping the comparison 8809 if necessary. */ 8810 if (const1_sgn == -1) 8811 code2 = swap_tree_comparison (code); 8812 8813 /* We now can look at the canonicalized case 8814 VARIABLE + 1 CODE2 INT_MIN 8815 and decide on the result. */ 8816 switch (code2) 8817 { 8818 case EQ_EXPR: 8819 case LT_EXPR: 8820 case LE_EXPR: 8821 return 8822 omit_one_operand_loc (loc, type, boolean_false_node, variable); 8823 8824 case NE_EXPR: 8825 case GE_EXPR: 8826 case GT_EXPR: 8827 return 8828 omit_one_operand_loc (loc, type, boolean_true_node, variable); 8829 8830 default: 8831 gcc_unreachable (); 8832 } 8833 } 8834 else 8835 { 8836 if (!equality_code) 8837 fold_overflow_warning ("assuming signed overflow does not occur " 8838 "when changing X +- C1 cmp C2 to " 8839 "X cmp C2 -+ C1", 8840 WARN_STRICT_OVERFLOW_COMPARISON); 8841 return fold_build2_loc (loc, code, type, variable, new_const); 8842 } 8843 } 8844 8845 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */ 8846 if (TREE_CODE (arg0) == MINUS_EXPR 8847 && equality_code 8848 && integer_zerop (arg1)) 8849 { 8850 /* ??? The transformation is valid for the other operators if overflow 8851 is undefined for the type, but performing it here badly interacts 8852 with the transformation in fold_cond_expr_with_comparison which 8853 attempts to synthetize ABS_EXPR. */ 8854 if (!equality_code) 8855 fold_overflow_warning ("assuming signed overflow does not occur " 8856 "when changing X - Y cmp 0 to X cmp Y", 8857 WARN_STRICT_OVERFLOW_COMPARISON); 8858 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 8859 TREE_OPERAND (arg0, 1)); 8860 } 8861 8862 /* For comparisons of pointers we can decompose it to a compile time 8863 comparison of the base objects and the offsets into the object. 8864 This requires at least one operand being an ADDR_EXPR or a 8865 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */ 8866 if (POINTER_TYPE_P (TREE_TYPE (arg0)) 8867 && (TREE_CODE (arg0) == ADDR_EXPR 8868 || TREE_CODE (arg1) == ADDR_EXPR 8869 || TREE_CODE (arg0) == POINTER_PLUS_EXPR 8870 || TREE_CODE (arg1) == POINTER_PLUS_EXPR)) 8871 { 8872 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE; 8873 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0; 8874 machine_mode mode; 8875 int volatilep, unsignedp; 8876 bool indirect_base0 = false, indirect_base1 = false; 8877 8878 /* Get base and offset for the access. Strip ADDR_EXPR for 8879 get_inner_reference, but put it back by stripping INDIRECT_REF 8880 off the base object if possible. indirect_baseN will be true 8881 if baseN is not an address but refers to the object itself. */ 8882 base0 = arg0; 8883 if (TREE_CODE (arg0) == ADDR_EXPR) 8884 { 8885 base0 = get_inner_reference (TREE_OPERAND (arg0, 0), 8886 &bitsize, &bitpos0, &offset0, &mode, 8887 &unsignedp, &volatilep, false); 8888 if (TREE_CODE (base0) == INDIRECT_REF) 8889 base0 = TREE_OPERAND (base0, 0); 8890 else 8891 indirect_base0 = true; 8892 } 8893 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) 8894 { 8895 base0 = TREE_OPERAND (arg0, 0); 8896 STRIP_SIGN_NOPS (base0); 8897 if (TREE_CODE (base0) == ADDR_EXPR) 8898 { 8899 base0 8900 = get_inner_reference (TREE_OPERAND (base0, 0), 8901 &bitsize, &bitpos0, &offset0, &mode, 8902 &unsignedp, &volatilep, false); 8903 if (TREE_CODE (base0) == INDIRECT_REF) 8904 base0 = TREE_OPERAND (base0, 0); 8905 else 8906 indirect_base0 = true; 8907 } 8908 if (offset0 == NULL_TREE || integer_zerop (offset0)) 8909 offset0 = TREE_OPERAND (arg0, 1); 8910 else 8911 offset0 = size_binop (PLUS_EXPR, offset0, 8912 TREE_OPERAND (arg0, 1)); 8913 if (TREE_CODE (offset0) == INTEGER_CST) 8914 { 8915 offset_int tem = wi::sext (wi::to_offset (offset0), 8916 TYPE_PRECISION (sizetype)); 8917 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT); 8918 tem += bitpos0; 8919 if (wi::fits_shwi_p (tem)) 8920 { 8921 bitpos0 = tem.to_shwi (); 8922 offset0 = NULL_TREE; 8923 } 8924 } 8925 } 8926 8927 base1 = arg1; 8928 if (TREE_CODE (arg1) == ADDR_EXPR) 8929 { 8930 base1 = get_inner_reference (TREE_OPERAND (arg1, 0), 8931 &bitsize, &bitpos1, &offset1, &mode, 8932 &unsignedp, &volatilep, false); 8933 if (TREE_CODE (base1) == INDIRECT_REF) 8934 base1 = TREE_OPERAND (base1, 0); 8935 else 8936 indirect_base1 = true; 8937 } 8938 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR) 8939 { 8940 base1 = TREE_OPERAND (arg1, 0); 8941 STRIP_SIGN_NOPS (base1); 8942 if (TREE_CODE (base1) == ADDR_EXPR) 8943 { 8944 base1 8945 = get_inner_reference (TREE_OPERAND (base1, 0), 8946 &bitsize, &bitpos1, &offset1, &mode, 8947 &unsignedp, &volatilep, false); 8948 if (TREE_CODE (base1) == INDIRECT_REF) 8949 base1 = TREE_OPERAND (base1, 0); 8950 else 8951 indirect_base1 = true; 8952 } 8953 if (offset1 == NULL_TREE || integer_zerop (offset1)) 8954 offset1 = TREE_OPERAND (arg1, 1); 8955 else 8956 offset1 = size_binop (PLUS_EXPR, offset1, 8957 TREE_OPERAND (arg1, 1)); 8958 if (TREE_CODE (offset1) == INTEGER_CST) 8959 { 8960 offset_int tem = wi::sext (wi::to_offset (offset1), 8961 TYPE_PRECISION (sizetype)); 8962 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT); 8963 tem += bitpos1; 8964 if (wi::fits_shwi_p (tem)) 8965 { 8966 bitpos1 = tem.to_shwi (); 8967 offset1 = NULL_TREE; 8968 } 8969 } 8970 } 8971 8972 /* A local variable can never be pointed to by 8973 the default SSA name of an incoming parameter. */ 8974 if ((TREE_CODE (arg0) == ADDR_EXPR 8975 && indirect_base0 8976 && TREE_CODE (base0) == VAR_DECL 8977 && auto_var_in_fn_p (base0, current_function_decl) 8978 && !indirect_base1 8979 && TREE_CODE (base1) == SSA_NAME 8980 && SSA_NAME_IS_DEFAULT_DEF (base1) 8981 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL) 8982 || (TREE_CODE (arg1) == ADDR_EXPR 8983 && indirect_base1 8984 && TREE_CODE (base1) == VAR_DECL 8985 && auto_var_in_fn_p (base1, current_function_decl) 8986 && !indirect_base0 8987 && TREE_CODE (base0) == SSA_NAME 8988 && SSA_NAME_IS_DEFAULT_DEF (base0) 8989 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL)) 8990 { 8991 if (code == NE_EXPR) 8992 return constant_boolean_node (1, type); 8993 else if (code == EQ_EXPR) 8994 return constant_boolean_node (0, type); 8995 } 8996 /* If we have equivalent bases we might be able to simplify. */ 8997 else if (indirect_base0 == indirect_base1 8998 && operand_equal_p (base0, base1, 0)) 8999 { 9000 /* We can fold this expression to a constant if the non-constant 9001 offset parts are equal. */ 9002 if ((offset0 == offset1 9003 || (offset0 && offset1 9004 && operand_equal_p (offset0, offset1, 0))) 9005 && (code == EQ_EXPR 9006 || code == NE_EXPR 9007 || (indirect_base0 && DECL_P (base0)) 9008 || POINTER_TYPE_OVERFLOW_UNDEFINED)) 9009 9010 { 9011 if (!equality_code 9012 && bitpos0 != bitpos1 9013 && (pointer_may_wrap_p (base0, offset0, bitpos0) 9014 || pointer_may_wrap_p (base1, offset1, bitpos1))) 9015 fold_overflow_warning (("assuming pointer wraparound does not " 9016 "occur when comparing P +- C1 with " 9017 "P +- C2"), 9018 WARN_STRICT_OVERFLOW_CONDITIONAL); 9019 9020 switch (code) 9021 { 9022 case EQ_EXPR: 9023 return constant_boolean_node (bitpos0 == bitpos1, type); 9024 case NE_EXPR: 9025 return constant_boolean_node (bitpos0 != bitpos1, type); 9026 case LT_EXPR: 9027 return constant_boolean_node (bitpos0 < bitpos1, type); 9028 case LE_EXPR: 9029 return constant_boolean_node (bitpos0 <= bitpos1, type); 9030 case GE_EXPR: 9031 return constant_boolean_node (bitpos0 >= bitpos1, type); 9032 case GT_EXPR: 9033 return constant_boolean_node (bitpos0 > bitpos1, type); 9034 default:; 9035 } 9036 } 9037 /* We can simplify the comparison to a comparison of the variable 9038 offset parts if the constant offset parts are equal. 9039 Be careful to use signed sizetype here because otherwise we 9040 mess with array offsets in the wrong way. This is possible 9041 because pointer arithmetic is restricted to retain within an 9042 object and overflow on pointer differences is undefined as of 9043 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */ 9044 else if (bitpos0 == bitpos1 9045 && (equality_code 9046 || (indirect_base0 && DECL_P (base0)) 9047 || POINTER_TYPE_OVERFLOW_UNDEFINED)) 9048 { 9049 /* By converting to signed sizetype we cover middle-end pointer 9050 arithmetic which operates on unsigned pointer types of size 9051 type size and ARRAY_REF offsets which are properly sign or 9052 zero extended from their type in case it is narrower than 9053 sizetype. */ 9054 if (offset0 == NULL_TREE) 9055 offset0 = build_int_cst (ssizetype, 0); 9056 else 9057 offset0 = fold_convert_loc (loc, ssizetype, offset0); 9058 if (offset1 == NULL_TREE) 9059 offset1 = build_int_cst (ssizetype, 0); 9060 else 9061 offset1 = fold_convert_loc (loc, ssizetype, offset1); 9062 9063 if (!equality_code 9064 && (pointer_may_wrap_p (base0, offset0, bitpos0) 9065 || pointer_may_wrap_p (base1, offset1, bitpos1))) 9066 fold_overflow_warning (("assuming pointer wraparound does not " 9067 "occur when comparing P +- C1 with " 9068 "P +- C2"), 9069 WARN_STRICT_OVERFLOW_COMPARISON); 9070 9071 return fold_build2_loc (loc, code, type, offset0, offset1); 9072 } 9073 } 9074 /* For non-equal bases we can simplify if they are addresses 9075 declarations with different addresses. */ 9076 else if (indirect_base0 && indirect_base1 9077 /* We know that !operand_equal_p (base0, base1, 0) 9078 because the if condition was false. But make 9079 sure two decls are not the same. */ 9080 && base0 != base1 9081 && TREE_CODE (arg0) == ADDR_EXPR 9082 && TREE_CODE (arg1) == ADDR_EXPR 9083 && DECL_P (base0) 9084 && DECL_P (base1) 9085 /* Watch for aliases. */ 9086 && (!decl_in_symtab_p (base0) 9087 || !decl_in_symtab_p (base1) 9088 || !symtab_node::get_create (base0)->equal_address_to 9089 (symtab_node::get_create (base1)))) 9090 { 9091 if (code == EQ_EXPR) 9092 return omit_two_operands_loc (loc, type, boolean_false_node, 9093 arg0, arg1); 9094 else if (code == NE_EXPR) 9095 return omit_two_operands_loc (loc, type, boolean_true_node, 9096 arg0, arg1); 9097 } 9098 /* For equal offsets we can simplify to a comparison of the 9099 base addresses. */ 9100 else if (bitpos0 == bitpos1 9101 && (indirect_base0 9102 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0) 9103 && (indirect_base1 9104 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1) 9105 && ((offset0 == offset1) 9106 || (offset0 && offset1 9107 && operand_equal_p (offset0, offset1, 0)))) 9108 { 9109 if (indirect_base0) 9110 base0 = build_fold_addr_expr_loc (loc, base0); 9111 if (indirect_base1) 9112 base1 = build_fold_addr_expr_loc (loc, base1); 9113 return fold_build2_loc (loc, code, type, base0, base1); 9114 } 9115 } 9116 9117 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to 9118 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if 9119 the resulting offset is smaller in absolute value than the 9120 original one and has the same sign. */ 9121 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 9122 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) 9123 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 9124 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9125 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))) 9126 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR) 9127 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 9128 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1)))) 9129 { 9130 tree const1 = TREE_OPERAND (arg0, 1); 9131 tree const2 = TREE_OPERAND (arg1, 1); 9132 tree variable1 = TREE_OPERAND (arg0, 0); 9133 tree variable2 = TREE_OPERAND (arg1, 0); 9134 tree cst; 9135 const char * const warnmsg = G_("assuming signed overflow does not " 9136 "occur when combining constants around " 9137 "a comparison"); 9138 9139 /* Put the constant on the side where it doesn't overflow and is 9140 of lower absolute value and of same sign than before. */ 9141 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1) 9142 ? MINUS_EXPR : PLUS_EXPR, 9143 const2, const1); 9144 if (!TREE_OVERFLOW (cst) 9145 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2) 9146 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2)) 9147 { 9148 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 9149 return fold_build2_loc (loc, code, type, 9150 variable1, 9151 fold_build2_loc (loc, TREE_CODE (arg1), 9152 TREE_TYPE (arg1), 9153 variable2, cst)); 9154 } 9155 9156 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1) 9157 ? MINUS_EXPR : PLUS_EXPR, 9158 const1, const2); 9159 if (!TREE_OVERFLOW (cst) 9160 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1) 9161 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1)) 9162 { 9163 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 9164 return fold_build2_loc (loc, code, type, 9165 fold_build2_loc (loc, TREE_CODE (arg0), 9166 TREE_TYPE (arg0), 9167 variable1, cst), 9168 variable2); 9169 } 9170 } 9171 9172 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the 9173 signed arithmetic case. That form is created by the compiler 9174 often enough for folding it to be of value. One example is in 9175 computing loop trip counts after Operator Strength Reduction. */ 9176 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 9177 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) 9178 && TREE_CODE (arg0) == MULT_EXPR 9179 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9180 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))) 9181 && integer_zerop (arg1)) 9182 { 9183 tree const1 = TREE_OPERAND (arg0, 1); 9184 tree const2 = arg1; /* zero */ 9185 tree variable1 = TREE_OPERAND (arg0, 0); 9186 enum tree_code cmp_code = code; 9187 9188 /* Handle unfolded multiplication by zero. */ 9189 if (integer_zerop (const1)) 9190 return fold_build2_loc (loc, cmp_code, type, const1, const2); 9191 9192 fold_overflow_warning (("assuming signed overflow does not occur when " 9193 "eliminating multiplication in comparison " 9194 "with zero"), 9195 WARN_STRICT_OVERFLOW_COMPARISON); 9196 9197 /* If const1 is negative we swap the sense of the comparison. */ 9198 if (tree_int_cst_sgn (const1) < 0) 9199 cmp_code = swap_tree_comparison (cmp_code); 9200 9201 return fold_build2_loc (loc, cmp_code, type, variable1, const2); 9202 } 9203 9204 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1); 9205 if (tem) 9206 return tem; 9207 9208 if (FLOAT_TYPE_P (TREE_TYPE (arg0))) 9209 { 9210 tree targ0 = strip_float_extensions (arg0); 9211 tree targ1 = strip_float_extensions (arg1); 9212 tree newtype = TREE_TYPE (targ0); 9213 9214 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype)) 9215 newtype = TREE_TYPE (targ1); 9216 9217 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */ 9218 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0))) 9219 return fold_build2_loc (loc, code, type, 9220 fold_convert_loc (loc, newtype, targ0), 9221 fold_convert_loc (loc, newtype, targ1)); 9222 9223 /* (-a) CMP (-b) -> b CMP a */ 9224 if (TREE_CODE (arg0) == NEGATE_EXPR 9225 && TREE_CODE (arg1) == NEGATE_EXPR) 9226 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0), 9227 TREE_OPERAND (arg0, 0)); 9228 9229 if (TREE_CODE (arg1) == REAL_CST) 9230 { 9231 REAL_VALUE_TYPE cst; 9232 cst = TREE_REAL_CST (arg1); 9233 9234 /* (-a) CMP CST -> a swap(CMP) (-CST) */ 9235 if (TREE_CODE (arg0) == NEGATE_EXPR) 9236 return fold_build2_loc (loc, swap_tree_comparison (code), type, 9237 TREE_OPERAND (arg0, 0), 9238 build_real (TREE_TYPE (arg1), 9239 real_value_negate (&cst))); 9240 9241 /* IEEE doesn't distinguish +0 and -0 in comparisons. */ 9242 /* a CMP (-0) -> a CMP 0 */ 9243 if (REAL_VALUE_MINUS_ZERO (cst)) 9244 return fold_build2_loc (loc, code, type, arg0, 9245 build_real (TREE_TYPE (arg1), dconst0)); 9246 9247 /* x != NaN is always true, other ops are always false. */ 9248 if (REAL_VALUE_ISNAN (cst) 9249 && ! HONOR_SNANS (arg1)) 9250 { 9251 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node; 9252 return omit_one_operand_loc (loc, type, tem, arg0); 9253 } 9254 9255 /* Fold comparisons against infinity. */ 9256 if (REAL_VALUE_ISINF (cst) 9257 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))) 9258 { 9259 tem = fold_inf_compare (loc, code, type, arg0, arg1); 9260 if (tem != NULL_TREE) 9261 return tem; 9262 } 9263 } 9264 9265 /* If this is a comparison of a real constant with a PLUS_EXPR 9266 or a MINUS_EXPR of a real constant, we can convert it into a 9267 comparison with a revised real constant as long as no overflow 9268 occurs when unsafe_math_optimizations are enabled. */ 9269 if (flag_unsafe_math_optimizations 9270 && TREE_CODE (arg1) == REAL_CST 9271 && (TREE_CODE (arg0) == PLUS_EXPR 9272 || TREE_CODE (arg0) == MINUS_EXPR) 9273 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 9274 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR 9275 ? MINUS_EXPR : PLUS_EXPR, 9276 arg1, TREE_OPERAND (arg0, 1))) 9277 && !TREE_OVERFLOW (tem)) 9278 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 9279 9280 /* Likewise, we can simplify a comparison of a real constant with 9281 a MINUS_EXPR whose first operand is also a real constant, i.e. 9282 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on 9283 floating-point types only if -fassociative-math is set. */ 9284 if (flag_associative_math 9285 && TREE_CODE (arg1) == REAL_CST 9286 && TREE_CODE (arg0) == MINUS_EXPR 9287 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST 9288 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0), 9289 arg1)) 9290 && !TREE_OVERFLOW (tem)) 9291 return fold_build2_loc (loc, swap_tree_comparison (code), type, 9292 TREE_OPERAND (arg0, 1), tem); 9293 9294 /* Fold comparisons against built-in math functions. */ 9295 if (TREE_CODE (arg1) == REAL_CST 9296 && flag_unsafe_math_optimizations 9297 && ! flag_errno_math) 9298 { 9299 enum built_in_function fcode = builtin_mathfn_code (arg0); 9300 9301 if (fcode != END_BUILTINS) 9302 { 9303 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1); 9304 if (tem != NULL_TREE) 9305 return tem; 9306 } 9307 } 9308 } 9309 9310 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE 9311 && CONVERT_EXPR_P (arg0)) 9312 { 9313 /* If we are widening one operand of an integer comparison, 9314 see if the other operand is similarly being widened. Perhaps we 9315 can do the comparison in the narrower type. */ 9316 tem = fold_widened_comparison (loc, code, type, arg0, arg1); 9317 if (tem) 9318 return tem; 9319 9320 /* Or if we are changing signedness. */ 9321 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1); 9322 if (tem) 9323 return tem; 9324 } 9325 9326 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a 9327 constant, we can simplify it. */ 9328 if (TREE_CODE (arg1) == INTEGER_CST 9329 && (TREE_CODE (arg0) == MIN_EXPR 9330 || TREE_CODE (arg0) == MAX_EXPR) 9331 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 9332 { 9333 tem = optimize_minmax_comparison (loc, code, type, op0, op1); 9334 if (tem) 9335 return tem; 9336 } 9337 9338 /* Simplify comparison of something with itself. (For IEEE 9339 floating-point, we can only do some of these simplifications.) */ 9340 if (operand_equal_p (arg0, arg1, 0)) 9341 { 9342 switch (code) 9343 { 9344 case EQ_EXPR: 9345 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)) 9346 || ! HONOR_NANS (arg0)) 9347 return constant_boolean_node (1, type); 9348 break; 9349 9350 case GE_EXPR: 9351 case LE_EXPR: 9352 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)) 9353 || ! HONOR_NANS (arg0)) 9354 return constant_boolean_node (1, type); 9355 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1); 9356 9357 case NE_EXPR: 9358 /* For NE, we can only do this simplification if integer 9359 or we don't honor IEEE floating point NaNs. */ 9360 if (FLOAT_TYPE_P (TREE_TYPE (arg0)) 9361 && HONOR_NANS (arg0)) 9362 break; 9363 /* ... fall through ... */ 9364 case GT_EXPR: 9365 case LT_EXPR: 9366 return constant_boolean_node (0, type); 9367 default: 9368 gcc_unreachable (); 9369 } 9370 } 9371 9372 /* If we are comparing an expression that just has comparisons 9373 of two integer values, arithmetic expressions of those comparisons, 9374 and constants, we can simplify it. There are only three cases 9375 to check: the two values can either be equal, the first can be 9376 greater, or the second can be greater. Fold the expression for 9377 those three values. Since each value must be 0 or 1, we have 9378 eight possibilities, each of which corresponds to the constant 0 9379 or 1 or one of the six possible comparisons. 9380 9381 This handles common cases like (a > b) == 0 but also handles 9382 expressions like ((x > y) - (y > x)) > 0, which supposedly 9383 occur in macroized code. */ 9384 9385 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST) 9386 { 9387 tree cval1 = 0, cval2 = 0; 9388 int save_p = 0; 9389 9390 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p) 9391 /* Don't handle degenerate cases here; they should already 9392 have been handled anyway. */ 9393 && cval1 != 0 && cval2 != 0 9394 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2)) 9395 && TREE_TYPE (cval1) == TREE_TYPE (cval2) 9396 && INTEGRAL_TYPE_P (TREE_TYPE (cval1)) 9397 && TYPE_MAX_VALUE (TREE_TYPE (cval1)) 9398 && TYPE_MAX_VALUE (TREE_TYPE (cval2)) 9399 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)), 9400 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0)) 9401 { 9402 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1)); 9403 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1)); 9404 9405 /* We can't just pass T to eval_subst in case cval1 or cval2 9406 was the same as ARG1. */ 9407 9408 tree high_result 9409 = fold_build2_loc (loc, code, type, 9410 eval_subst (loc, arg0, cval1, maxval, 9411 cval2, minval), 9412 arg1); 9413 tree equal_result 9414 = fold_build2_loc (loc, code, type, 9415 eval_subst (loc, arg0, cval1, maxval, 9416 cval2, maxval), 9417 arg1); 9418 tree low_result 9419 = fold_build2_loc (loc, code, type, 9420 eval_subst (loc, arg0, cval1, minval, 9421 cval2, maxval), 9422 arg1); 9423 9424 /* All three of these results should be 0 or 1. Confirm they are. 9425 Then use those values to select the proper code to use. */ 9426 9427 if (TREE_CODE (high_result) == INTEGER_CST 9428 && TREE_CODE (equal_result) == INTEGER_CST 9429 && TREE_CODE (low_result) == INTEGER_CST) 9430 { 9431 /* Make a 3-bit mask with the high-order bit being the 9432 value for `>', the next for '=', and the low for '<'. */ 9433 switch ((integer_onep (high_result) * 4) 9434 + (integer_onep (equal_result) * 2) 9435 + integer_onep (low_result)) 9436 { 9437 case 0: 9438 /* Always false. */ 9439 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 9440 case 1: 9441 code = LT_EXPR; 9442 break; 9443 case 2: 9444 code = EQ_EXPR; 9445 break; 9446 case 3: 9447 code = LE_EXPR; 9448 break; 9449 case 4: 9450 code = GT_EXPR; 9451 break; 9452 case 5: 9453 code = NE_EXPR; 9454 break; 9455 case 6: 9456 code = GE_EXPR; 9457 break; 9458 case 7: 9459 /* Always true. */ 9460 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 9461 } 9462 9463 if (save_p) 9464 { 9465 tem = save_expr (build2 (code, type, cval1, cval2)); 9466 SET_EXPR_LOCATION (tem, loc); 9467 return tem; 9468 } 9469 return fold_build2_loc (loc, code, type, cval1, cval2); 9470 } 9471 } 9472 } 9473 9474 /* We can fold X/C1 op C2 where C1 and C2 are integer constants 9475 into a single range test. */ 9476 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR 9477 || TREE_CODE (arg0) == EXACT_DIV_EXPR) 9478 && TREE_CODE (arg1) == INTEGER_CST 9479 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9480 && !integer_zerop (TREE_OPERAND (arg0, 1)) 9481 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) 9482 && !TREE_OVERFLOW (arg1)) 9483 { 9484 tem = fold_div_compare (loc, code, type, arg0, arg1); 9485 if (tem != NULL_TREE) 9486 return tem; 9487 } 9488 9489 /* Fold ~X op ~Y as Y op X. */ 9490 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9491 && TREE_CODE (arg1) == BIT_NOT_EXPR) 9492 { 9493 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); 9494 return fold_build2_loc (loc, code, type, 9495 fold_convert_loc (loc, cmp_type, 9496 TREE_OPERAND (arg1, 0)), 9497 TREE_OPERAND (arg0, 0)); 9498 } 9499 9500 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */ 9501 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9502 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST)) 9503 { 9504 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); 9505 return fold_build2_loc (loc, swap_tree_comparison (code), type, 9506 TREE_OPERAND (arg0, 0), 9507 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type, 9508 fold_convert_loc (loc, cmp_type, arg1))); 9509 } 9510 9511 return NULL_TREE; 9512} 9513 9514 9515/* Subroutine of fold_binary. Optimize complex multiplications of the 9516 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The 9517 argument EXPR represents the expression "z" of type TYPE. */ 9518 9519static tree 9520fold_mult_zconjz (location_t loc, tree type, tree expr) 9521{ 9522 tree itype = TREE_TYPE (type); 9523 tree rpart, ipart, tem; 9524 9525 if (TREE_CODE (expr) == COMPLEX_EXPR) 9526 { 9527 rpart = TREE_OPERAND (expr, 0); 9528 ipart = TREE_OPERAND (expr, 1); 9529 } 9530 else if (TREE_CODE (expr) == COMPLEX_CST) 9531 { 9532 rpart = TREE_REALPART (expr); 9533 ipart = TREE_IMAGPART (expr); 9534 } 9535 else 9536 { 9537 expr = save_expr (expr); 9538 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr); 9539 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr); 9540 } 9541 9542 rpart = save_expr (rpart); 9543 ipart = save_expr (ipart); 9544 tem = fold_build2_loc (loc, PLUS_EXPR, itype, 9545 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart), 9546 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart)); 9547 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem, 9548 build_zero_cst (itype)); 9549} 9550 9551 9552/* Subroutine of fold_binary. If P is the value of EXPR, computes 9553 power-of-two M and (arbitrary) N such that M divides (P-N). This condition 9554 guarantees that P and N have the same least significant log2(M) bits. 9555 N is not otherwise constrained. In particular, N is not normalized to 9556 0 <= N < M as is common. In general, the precise value of P is unknown. 9557 M is chosen as large as possible such that constant N can be determined. 9558 9559 Returns M and sets *RESIDUE to N. 9560 9561 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into 9562 account. This is not always possible due to PR 35705. 9563 */ 9564 9565static unsigned HOST_WIDE_INT 9566get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue, 9567 bool allow_func_align) 9568{ 9569 enum tree_code code; 9570 9571 *residue = 0; 9572 9573 code = TREE_CODE (expr); 9574 if (code == ADDR_EXPR) 9575 { 9576 unsigned int bitalign; 9577 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue); 9578 *residue /= BITS_PER_UNIT; 9579 return bitalign / BITS_PER_UNIT; 9580 } 9581 else if (code == POINTER_PLUS_EXPR) 9582 { 9583 tree op0, op1; 9584 unsigned HOST_WIDE_INT modulus; 9585 enum tree_code inner_code; 9586 9587 op0 = TREE_OPERAND (expr, 0); 9588 STRIP_NOPS (op0); 9589 modulus = get_pointer_modulus_and_residue (op0, residue, 9590 allow_func_align); 9591 9592 op1 = TREE_OPERAND (expr, 1); 9593 STRIP_NOPS (op1); 9594 inner_code = TREE_CODE (op1); 9595 if (inner_code == INTEGER_CST) 9596 { 9597 *residue += TREE_INT_CST_LOW (op1); 9598 return modulus; 9599 } 9600 else if (inner_code == MULT_EXPR) 9601 { 9602 op1 = TREE_OPERAND (op1, 1); 9603 if (TREE_CODE (op1) == INTEGER_CST) 9604 { 9605 unsigned HOST_WIDE_INT align; 9606 9607 /* Compute the greatest power-of-2 divisor of op1. */ 9608 align = TREE_INT_CST_LOW (op1); 9609 align &= -align; 9610 9611 /* If align is non-zero and less than *modulus, replace 9612 *modulus with align., If align is 0, then either op1 is 0 9613 or the greatest power-of-2 divisor of op1 doesn't fit in an 9614 unsigned HOST_WIDE_INT. In either case, no additional 9615 constraint is imposed. */ 9616 if (align) 9617 modulus = MIN (modulus, align); 9618 9619 return modulus; 9620 } 9621 } 9622 } 9623 9624 /* If we get here, we were unable to determine anything useful about the 9625 expression. */ 9626 return 1; 9627} 9628 9629/* Helper function for fold_vec_perm. Store elements of VECTOR_CST or 9630 CONSTRUCTOR ARG into array ELTS and return true if successful. */ 9631 9632static bool 9633vec_cst_ctor_to_array (tree arg, tree *elts) 9634{ 9635 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i; 9636 9637 if (TREE_CODE (arg) == VECTOR_CST) 9638 { 9639 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i) 9640 elts[i] = VECTOR_CST_ELT (arg, i); 9641 } 9642 else if (TREE_CODE (arg) == CONSTRUCTOR) 9643 { 9644 constructor_elt *elt; 9645 9646 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt) 9647 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE) 9648 return false; 9649 else 9650 elts[i] = elt->value; 9651 } 9652 else 9653 return false; 9654 for (; i < nelts; i++) 9655 elts[i] 9656 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node); 9657 return true; 9658} 9659 9660/* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL 9661 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful, 9662 NULL_TREE otherwise. */ 9663 9664static tree 9665fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel) 9666{ 9667 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i; 9668 tree *elts; 9669 bool need_ctor = false; 9670 9671 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts 9672 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts); 9673 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type) 9674 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type)) 9675 return NULL_TREE; 9676 9677 elts = XALLOCAVEC (tree, nelts * 3); 9678 if (!vec_cst_ctor_to_array (arg0, elts) 9679 || !vec_cst_ctor_to_array (arg1, elts + nelts)) 9680 return NULL_TREE; 9681 9682 for (i = 0; i < nelts; i++) 9683 { 9684 if (!CONSTANT_CLASS_P (elts[sel[i]])) 9685 need_ctor = true; 9686 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]); 9687 } 9688 9689 if (need_ctor) 9690 { 9691 vec<constructor_elt, va_gc> *v; 9692 vec_alloc (v, nelts); 9693 for (i = 0; i < nelts; i++) 9694 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]); 9695 return build_constructor (type, v); 9696 } 9697 else 9698 return build_vector (type, &elts[2 * nelts]); 9699} 9700 9701/* Try to fold a pointer difference of type TYPE two address expressions of 9702 array references AREF0 and AREF1 using location LOC. Return a 9703 simplified expression for the difference or NULL_TREE. */ 9704 9705static tree 9706fold_addr_of_array_ref_difference (location_t loc, tree type, 9707 tree aref0, tree aref1) 9708{ 9709 tree base0 = TREE_OPERAND (aref0, 0); 9710 tree base1 = TREE_OPERAND (aref1, 0); 9711 tree base_offset = build_int_cst (type, 0); 9712 9713 /* If the bases are array references as well, recurse. If the bases 9714 are pointer indirections compute the difference of the pointers. 9715 If the bases are equal, we are set. */ 9716 if ((TREE_CODE (base0) == ARRAY_REF 9717 && TREE_CODE (base1) == ARRAY_REF 9718 && (base_offset 9719 = fold_addr_of_array_ref_difference (loc, type, base0, base1))) 9720 || (INDIRECT_REF_P (base0) 9721 && INDIRECT_REF_P (base1) 9722 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type, 9723 TREE_OPERAND (base0, 0), 9724 TREE_OPERAND (base1, 0)))) 9725 || operand_equal_p (base0, base1, 0)) 9726 { 9727 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1)); 9728 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1)); 9729 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0)); 9730 tree diff = build2 (MINUS_EXPR, type, op0, op1); 9731 return fold_build2_loc (loc, PLUS_EXPR, type, 9732 base_offset, 9733 fold_build2_loc (loc, MULT_EXPR, type, 9734 diff, esz)); 9735 } 9736 return NULL_TREE; 9737} 9738 9739/* If the real or vector real constant CST of type TYPE has an exact 9740 inverse, return it, else return NULL. */ 9741 9742tree 9743exact_inverse (tree type, tree cst) 9744{ 9745 REAL_VALUE_TYPE r; 9746 tree unit_type, *elts; 9747 machine_mode mode; 9748 unsigned vec_nelts, i; 9749 9750 switch (TREE_CODE (cst)) 9751 { 9752 case REAL_CST: 9753 r = TREE_REAL_CST (cst); 9754 9755 if (exact_real_inverse (TYPE_MODE (type), &r)) 9756 return build_real (type, r); 9757 9758 return NULL_TREE; 9759 9760 case VECTOR_CST: 9761 vec_nelts = VECTOR_CST_NELTS (cst); 9762 elts = XALLOCAVEC (tree, vec_nelts); 9763 unit_type = TREE_TYPE (type); 9764 mode = TYPE_MODE (unit_type); 9765 9766 for (i = 0; i < vec_nelts; i++) 9767 { 9768 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i)); 9769 if (!exact_real_inverse (mode, &r)) 9770 return NULL_TREE; 9771 elts[i] = build_real (unit_type, r); 9772 } 9773 9774 return build_vector (type, elts); 9775 9776 default: 9777 return NULL_TREE; 9778 } 9779} 9780 9781/* Mask out the tz least significant bits of X of type TYPE where 9782 tz is the number of trailing zeroes in Y. */ 9783static wide_int 9784mask_with_tz (tree type, const wide_int &x, const wide_int &y) 9785{ 9786 int tz = wi::ctz (y); 9787 if (tz > 0) 9788 return wi::mask (tz, true, TYPE_PRECISION (type)) & x; 9789 return x; 9790} 9791 9792/* Return true when T is an address and is known to be nonzero. 9793 For floating point we further ensure that T is not denormal. 9794 Similar logic is present in nonzero_address in rtlanal.h. 9795 9796 If the return value is based on the assumption that signed overflow 9797 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 9798 change *STRICT_OVERFLOW_P. */ 9799 9800static bool 9801tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p) 9802{ 9803 tree type = TREE_TYPE (t); 9804 enum tree_code code; 9805 9806 /* Doing something useful for floating point would need more work. */ 9807 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) 9808 return false; 9809 9810 code = TREE_CODE (t); 9811 switch (TREE_CODE_CLASS (code)) 9812 { 9813 case tcc_unary: 9814 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0), 9815 strict_overflow_p); 9816 case tcc_binary: 9817 case tcc_comparison: 9818 return tree_binary_nonzero_warnv_p (code, type, 9819 TREE_OPERAND (t, 0), 9820 TREE_OPERAND (t, 1), 9821 strict_overflow_p); 9822 case tcc_constant: 9823 case tcc_declaration: 9824 case tcc_reference: 9825 return tree_single_nonzero_warnv_p (t, strict_overflow_p); 9826 9827 default: 9828 break; 9829 } 9830 9831 switch (code) 9832 { 9833 case TRUTH_NOT_EXPR: 9834 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0), 9835 strict_overflow_p); 9836 9837 case TRUTH_AND_EXPR: 9838 case TRUTH_OR_EXPR: 9839 case TRUTH_XOR_EXPR: 9840 return tree_binary_nonzero_warnv_p (code, type, 9841 TREE_OPERAND (t, 0), 9842 TREE_OPERAND (t, 1), 9843 strict_overflow_p); 9844 9845 case COND_EXPR: 9846 case CONSTRUCTOR: 9847 case OBJ_TYPE_REF: 9848 case ASSERT_EXPR: 9849 case ADDR_EXPR: 9850 case WITH_SIZE_EXPR: 9851 case SSA_NAME: 9852 return tree_single_nonzero_warnv_p (t, strict_overflow_p); 9853 9854 case COMPOUND_EXPR: 9855 case MODIFY_EXPR: 9856 case BIND_EXPR: 9857 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 9858 strict_overflow_p); 9859 9860 case SAVE_EXPR: 9861 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 9862 strict_overflow_p); 9863 9864 case CALL_EXPR: 9865 { 9866 tree fndecl = get_callee_fndecl (t); 9867 if (!fndecl) return false; 9868 if (flag_delete_null_pointer_checks && !flag_check_new 9869 && DECL_IS_OPERATOR_NEW (fndecl) 9870 && !TREE_NOTHROW (fndecl)) 9871 return true; 9872 if (flag_delete_null_pointer_checks 9873 && lookup_attribute ("returns_nonnull", 9874 TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))) 9875 return true; 9876 return alloca_call_p (t); 9877 } 9878 9879 default: 9880 break; 9881 } 9882 return false; 9883} 9884 9885/* Return true when T is an address and is known to be nonzero. 9886 Handle warnings about undefined signed overflow. */ 9887 9888static bool 9889tree_expr_nonzero_p (tree t) 9890{ 9891 bool ret, strict_overflow_p; 9892 9893 strict_overflow_p = false; 9894 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p); 9895 if (strict_overflow_p) 9896 fold_overflow_warning (("assuming signed overflow does not occur when " 9897 "determining that expression is always " 9898 "non-zero"), 9899 WARN_STRICT_OVERFLOW_MISC); 9900 return ret; 9901} 9902 9903/* Fold a binary expression of code CODE and type TYPE with operands 9904 OP0 and OP1. LOC is the location of the resulting expression. 9905 Return the folded expression if folding is successful. Otherwise, 9906 return NULL_TREE. */ 9907 9908tree 9909fold_binary_loc (location_t loc, 9910 enum tree_code code, tree type, tree op0, tree op1) 9911{ 9912 enum tree_code_class kind = TREE_CODE_CLASS (code); 9913 tree arg0, arg1, tem; 9914 tree t1 = NULL_TREE; 9915 bool strict_overflow_p; 9916 unsigned int prec; 9917 9918 gcc_assert (IS_EXPR_CODE_CLASS (kind) 9919 && TREE_CODE_LENGTH (code) == 2 9920 && op0 != NULL_TREE 9921 && op1 != NULL_TREE); 9922 9923 arg0 = op0; 9924 arg1 = op1; 9925 9926 /* Strip any conversions that don't change the mode. This is 9927 safe for every expression, except for a comparison expression 9928 because its signedness is derived from its operands. So, in 9929 the latter case, only strip conversions that don't change the 9930 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments 9931 preserved. 9932 9933 Note that this is done as an internal manipulation within the 9934 constant folder, in order to find the simplest representation 9935 of the arguments so that their form can be studied. In any 9936 cases, the appropriate type conversions should be put back in 9937 the tree that will get out of the constant folder. */ 9938 9939 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR) 9940 { 9941 STRIP_SIGN_NOPS (arg0); 9942 STRIP_SIGN_NOPS (arg1); 9943 } 9944 else 9945 { 9946 STRIP_NOPS (arg0); 9947 STRIP_NOPS (arg1); 9948 } 9949 9950 /* Note that TREE_CONSTANT isn't enough: static var addresses are 9951 constant but we can't do arithmetic on them. */ 9952 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1)) 9953 { 9954 tem = const_binop (code, type, arg0, arg1); 9955 if (tem != NULL_TREE) 9956 { 9957 if (TREE_TYPE (tem) != type) 9958 tem = fold_convert_loc (loc, type, tem); 9959 return tem; 9960 } 9961 } 9962 9963 /* If this is a commutative operation, and ARG0 is a constant, move it 9964 to ARG1 to reduce the number of tests below. */ 9965 if (commutative_tree_code (code) 9966 && tree_swap_operands_p (arg0, arg1, true)) 9967 return fold_build2_loc (loc, code, type, op1, op0); 9968 9969 /* Likewise if this is a comparison, and ARG0 is a constant, move it 9970 to ARG1 to reduce the number of tests below. */ 9971 if (kind == tcc_comparison 9972 && tree_swap_operands_p (arg0, arg1, true)) 9973 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0); 9974 9975 tem = generic_simplify (loc, code, type, op0, op1); 9976 if (tem) 9977 return tem; 9978 9979 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand. 9980 9981 First check for cases where an arithmetic operation is applied to a 9982 compound, conditional, or comparison operation. Push the arithmetic 9983 operation inside the compound or conditional to see if any folding 9984 can then be done. Convert comparison to conditional for this purpose. 9985 The also optimizes non-constant cases that used to be done in 9986 expand_expr. 9987 9988 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR, 9989 one of the operands is a comparison and the other is a comparison, a 9990 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the 9991 code below would make the expression more complex. Change it to a 9992 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to 9993 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */ 9994 9995 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR 9996 || code == EQ_EXPR || code == NE_EXPR) 9997 && TREE_CODE (type) != VECTOR_TYPE 9998 && ((truth_value_p (TREE_CODE (arg0)) 9999 && (truth_value_p (TREE_CODE (arg1)) 10000 || (TREE_CODE (arg1) == BIT_AND_EXPR 10001 && integer_onep (TREE_OPERAND (arg1, 1))))) 10002 || (truth_value_p (TREE_CODE (arg1)) 10003 && (truth_value_p (TREE_CODE (arg0)) 10004 || (TREE_CODE (arg0) == BIT_AND_EXPR 10005 && integer_onep (TREE_OPERAND (arg0, 1))))))) 10006 { 10007 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR 10008 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR 10009 : TRUTH_XOR_EXPR, 10010 boolean_type_node, 10011 fold_convert_loc (loc, boolean_type_node, arg0), 10012 fold_convert_loc (loc, boolean_type_node, arg1)); 10013 10014 if (code == EQ_EXPR) 10015 tem = invert_truthvalue_loc (loc, tem); 10016 10017 return fold_convert_loc (loc, type, tem); 10018 } 10019 10020 if (TREE_CODE_CLASS (code) == tcc_binary 10021 || TREE_CODE_CLASS (code) == tcc_comparison) 10022 { 10023 if (TREE_CODE (arg0) == COMPOUND_EXPR) 10024 { 10025 tem = fold_build2_loc (loc, code, type, 10026 fold_convert_loc (loc, TREE_TYPE (op0), 10027 TREE_OPERAND (arg0, 1)), op1); 10028 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), 10029 tem); 10030 } 10031 if (TREE_CODE (arg1) == COMPOUND_EXPR 10032 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 10033 { 10034 tem = fold_build2_loc (loc, code, type, op0, 10035 fold_convert_loc (loc, TREE_TYPE (op1), 10036 TREE_OPERAND (arg1, 1))); 10037 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), 10038 tem); 10039 } 10040 10041 if (TREE_CODE (arg0) == COND_EXPR 10042 || TREE_CODE (arg0) == VEC_COND_EXPR 10043 || COMPARISON_CLASS_P (arg0)) 10044 { 10045 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1, 10046 arg0, arg1, 10047 /*cond_first_p=*/1); 10048 if (tem != NULL_TREE) 10049 return tem; 10050 } 10051 10052 if (TREE_CODE (arg1) == COND_EXPR 10053 || TREE_CODE (arg1) == VEC_COND_EXPR 10054 || COMPARISON_CLASS_P (arg1)) 10055 { 10056 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1, 10057 arg1, arg0, 10058 /*cond_first_p=*/0); 10059 if (tem != NULL_TREE) 10060 return tem; 10061 } 10062 } 10063 10064 switch (code) 10065 { 10066 case MEM_REF: 10067 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */ 10068 if (TREE_CODE (arg0) == ADDR_EXPR 10069 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF) 10070 { 10071 tree iref = TREE_OPERAND (arg0, 0); 10072 return fold_build2 (MEM_REF, type, 10073 TREE_OPERAND (iref, 0), 10074 int_const_binop (PLUS_EXPR, arg1, 10075 TREE_OPERAND (iref, 1))); 10076 } 10077 10078 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */ 10079 if (TREE_CODE (arg0) == ADDR_EXPR 10080 && handled_component_p (TREE_OPERAND (arg0, 0))) 10081 { 10082 tree base; 10083 HOST_WIDE_INT coffset; 10084 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0), 10085 &coffset); 10086 if (!base) 10087 return NULL_TREE; 10088 return fold_build2 (MEM_REF, type, 10089 build_fold_addr_expr (base), 10090 int_const_binop (PLUS_EXPR, arg1, 10091 size_int (coffset))); 10092 } 10093 10094 return NULL_TREE; 10095 10096 case POINTER_PLUS_EXPR: 10097 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */ 10098 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) 10099 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))) 10100 return fold_convert_loc (loc, type, 10101 fold_build2_loc (loc, PLUS_EXPR, sizetype, 10102 fold_convert_loc (loc, sizetype, 10103 arg1), 10104 fold_convert_loc (loc, sizetype, 10105 arg0))); 10106 10107 return NULL_TREE; 10108 10109 case PLUS_EXPR: 10110 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type)) 10111 { 10112 /* X + (X / CST) * -CST is X % CST. */ 10113 if (TREE_CODE (arg1) == MULT_EXPR 10114 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR 10115 && operand_equal_p (arg0, 10116 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)) 10117 { 10118 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1); 10119 tree cst1 = TREE_OPERAND (arg1, 1); 10120 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1), 10121 cst1, cst0); 10122 if (sum && integer_zerop (sum)) 10123 return fold_convert_loc (loc, type, 10124 fold_build2_loc (loc, TRUNC_MOD_EXPR, 10125 TREE_TYPE (arg0), arg0, 10126 cst0)); 10127 } 10128 } 10129 10130 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or 10131 one. Make sure the type is not saturating and has the signedness of 10132 the stripped operands, as fold_plusminus_mult_expr will re-associate. 10133 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */ 10134 if ((TREE_CODE (arg0) == MULT_EXPR 10135 || TREE_CODE (arg1) == MULT_EXPR) 10136 && !TYPE_SATURATING (type) 10137 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0)) 10138 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1)) 10139 && (!FLOAT_TYPE_P (type) || flag_associative_math)) 10140 { 10141 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1); 10142 if (tem) 10143 return tem; 10144 } 10145 10146 if (! FLOAT_TYPE_P (type)) 10147 { 10148 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing 10149 with a constant, and the two constants have no bits in common, 10150 we should treat this as a BIT_IOR_EXPR since this may produce more 10151 simplifications. */ 10152 if (TREE_CODE (arg0) == BIT_AND_EXPR 10153 && TREE_CODE (arg1) == BIT_AND_EXPR 10154 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 10155 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 10156 && wi::bit_and (TREE_OPERAND (arg0, 1), 10157 TREE_OPERAND (arg1, 1)) == 0) 10158 { 10159 code = BIT_IOR_EXPR; 10160 goto bit_ior; 10161 } 10162 10163 /* Reassociate (plus (plus (mult) (foo)) (mult)) as 10164 (plus (plus (mult) (mult)) (foo)) so that we can 10165 take advantage of the factoring cases below. */ 10166 if (ANY_INTEGRAL_TYPE_P (type) 10167 && TYPE_OVERFLOW_WRAPS (type) 10168 && (((TREE_CODE (arg0) == PLUS_EXPR 10169 || TREE_CODE (arg0) == MINUS_EXPR) 10170 && TREE_CODE (arg1) == MULT_EXPR) 10171 || ((TREE_CODE (arg1) == PLUS_EXPR 10172 || TREE_CODE (arg1) == MINUS_EXPR) 10173 && TREE_CODE (arg0) == MULT_EXPR))) 10174 { 10175 tree parg0, parg1, parg, marg; 10176 enum tree_code pcode; 10177 10178 if (TREE_CODE (arg1) == MULT_EXPR) 10179 parg = arg0, marg = arg1; 10180 else 10181 parg = arg1, marg = arg0; 10182 pcode = TREE_CODE (parg); 10183 parg0 = TREE_OPERAND (parg, 0); 10184 parg1 = TREE_OPERAND (parg, 1); 10185 STRIP_NOPS (parg0); 10186 STRIP_NOPS (parg1); 10187 10188 if (TREE_CODE (parg0) == MULT_EXPR 10189 && TREE_CODE (parg1) != MULT_EXPR) 10190 return fold_build2_loc (loc, pcode, type, 10191 fold_build2_loc (loc, PLUS_EXPR, type, 10192 fold_convert_loc (loc, type, 10193 parg0), 10194 fold_convert_loc (loc, type, 10195 marg)), 10196 fold_convert_loc (loc, type, parg1)); 10197 if (TREE_CODE (parg0) != MULT_EXPR 10198 && TREE_CODE (parg1) == MULT_EXPR) 10199 return 10200 fold_build2_loc (loc, PLUS_EXPR, type, 10201 fold_convert_loc (loc, type, parg0), 10202 fold_build2_loc (loc, pcode, type, 10203 fold_convert_loc (loc, type, marg), 10204 fold_convert_loc (loc, type, 10205 parg1))); 10206 } 10207 } 10208 else 10209 { 10210 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y ) 10211 to __complex__ ( x, y ). This is not the same for SNaNs or 10212 if signed zeros are involved. */ 10213 if (!HONOR_SNANS (element_mode (arg0)) 10214 && !HONOR_SIGNED_ZEROS (element_mode (arg0)) 10215 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) 10216 { 10217 tree rtype = TREE_TYPE (TREE_TYPE (arg0)); 10218 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0); 10219 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0); 10220 bool arg0rz = false, arg0iz = false; 10221 if ((arg0r && (arg0rz = real_zerop (arg0r))) 10222 || (arg0i && (arg0iz = real_zerop (arg0i)))) 10223 { 10224 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1); 10225 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1); 10226 if (arg0rz && arg1i && real_zerop (arg1i)) 10227 { 10228 tree rp = arg1r ? arg1r 10229 : build1 (REALPART_EXPR, rtype, arg1); 10230 tree ip = arg0i ? arg0i 10231 : build1 (IMAGPART_EXPR, rtype, arg0); 10232 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10233 } 10234 else if (arg0iz && arg1r && real_zerop (arg1r)) 10235 { 10236 tree rp = arg0r ? arg0r 10237 : build1 (REALPART_EXPR, rtype, arg0); 10238 tree ip = arg1i ? arg1i 10239 : build1 (IMAGPART_EXPR, rtype, arg1); 10240 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10241 } 10242 } 10243 } 10244 10245 if (flag_unsafe_math_optimizations 10246 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) 10247 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) 10248 && (tem = distribute_real_division (loc, code, type, arg0, arg1))) 10249 return tem; 10250 10251 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. 10252 We associate floats only if the user has specified 10253 -fassociative-math. */ 10254 if (flag_associative_math 10255 && TREE_CODE (arg1) == PLUS_EXPR 10256 && TREE_CODE (arg0) != MULT_EXPR) 10257 { 10258 tree tree10 = TREE_OPERAND (arg1, 0); 10259 tree tree11 = TREE_OPERAND (arg1, 1); 10260 if (TREE_CODE (tree11) == MULT_EXPR 10261 && TREE_CODE (tree10) == MULT_EXPR) 10262 { 10263 tree tree0; 10264 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10); 10265 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11); 10266 } 10267 } 10268 /* Convert (b*c + d*e) + a into b*c + (d*e +a). 10269 We associate floats only if the user has specified 10270 -fassociative-math. */ 10271 if (flag_associative_math 10272 && TREE_CODE (arg0) == PLUS_EXPR 10273 && TREE_CODE (arg1) != MULT_EXPR) 10274 { 10275 tree tree00 = TREE_OPERAND (arg0, 0); 10276 tree tree01 = TREE_OPERAND (arg0, 1); 10277 if (TREE_CODE (tree01) == MULT_EXPR 10278 && TREE_CODE (tree00) == MULT_EXPR) 10279 { 10280 tree tree0; 10281 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1); 10282 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0); 10283 } 10284 } 10285 } 10286 10287 bit_rotate: 10288 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A 10289 is a rotate of A by C1 bits. */ 10290 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A 10291 is a rotate of A by B bits. */ 10292 { 10293 enum tree_code code0, code1; 10294 tree rtype; 10295 code0 = TREE_CODE (arg0); 10296 code1 = TREE_CODE (arg1); 10297 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR) 10298 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR)) 10299 && operand_equal_p (TREE_OPERAND (arg0, 0), 10300 TREE_OPERAND (arg1, 0), 0) 10301 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)), 10302 TYPE_UNSIGNED (rtype)) 10303 /* Only create rotates in complete modes. Other cases are not 10304 expanded properly. */ 10305 && (element_precision (rtype) 10306 == element_precision (TYPE_MODE (rtype)))) 10307 { 10308 tree tree01, tree11; 10309 enum tree_code code01, code11; 10310 10311 tree01 = TREE_OPERAND (arg0, 1); 10312 tree11 = TREE_OPERAND (arg1, 1); 10313 STRIP_NOPS (tree01); 10314 STRIP_NOPS (tree11); 10315 code01 = TREE_CODE (tree01); 10316 code11 = TREE_CODE (tree11); 10317 if (code01 == INTEGER_CST 10318 && code11 == INTEGER_CST 10319 && (wi::to_widest (tree01) + wi::to_widest (tree11) 10320 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0))))) 10321 { 10322 tem = build2_loc (loc, LROTATE_EXPR, 10323 TREE_TYPE (TREE_OPERAND (arg0, 0)), 10324 TREE_OPERAND (arg0, 0), 10325 code0 == LSHIFT_EXPR 10326 ? TREE_OPERAND (arg0, 1) 10327 : TREE_OPERAND (arg1, 1)); 10328 return fold_convert_loc (loc, type, tem); 10329 } 10330 else if (code11 == MINUS_EXPR) 10331 { 10332 tree tree110, tree111; 10333 tree110 = TREE_OPERAND (tree11, 0); 10334 tree111 = TREE_OPERAND (tree11, 1); 10335 STRIP_NOPS (tree110); 10336 STRIP_NOPS (tree111); 10337 if (TREE_CODE (tree110) == INTEGER_CST 10338 && 0 == compare_tree_int (tree110, 10339 element_precision 10340 (TREE_TYPE (TREE_OPERAND 10341 (arg0, 0)))) 10342 && operand_equal_p (tree01, tree111, 0)) 10343 return 10344 fold_convert_loc (loc, type, 10345 build2 ((code0 == LSHIFT_EXPR 10346 ? LROTATE_EXPR 10347 : RROTATE_EXPR), 10348 TREE_TYPE (TREE_OPERAND (arg0, 0)), 10349 TREE_OPERAND (arg0, 0), 10350 TREE_OPERAND (arg0, 1))); 10351 } 10352 else if (code01 == MINUS_EXPR) 10353 { 10354 tree tree010, tree011; 10355 tree010 = TREE_OPERAND (tree01, 0); 10356 tree011 = TREE_OPERAND (tree01, 1); 10357 STRIP_NOPS (tree010); 10358 STRIP_NOPS (tree011); 10359 if (TREE_CODE (tree010) == INTEGER_CST 10360 && 0 == compare_tree_int (tree010, 10361 element_precision 10362 (TREE_TYPE (TREE_OPERAND 10363 (arg0, 0)))) 10364 && operand_equal_p (tree11, tree011, 0)) 10365 return fold_convert_loc 10366 (loc, type, 10367 build2 ((code0 != LSHIFT_EXPR 10368 ? LROTATE_EXPR 10369 : RROTATE_EXPR), 10370 TREE_TYPE (TREE_OPERAND (arg0, 0)), 10371 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1))); 10372 } 10373 } 10374 } 10375 10376 associate: 10377 /* In most languages, can't associate operations on floats through 10378 parentheses. Rather than remember where the parentheses were, we 10379 don't associate floats at all, unless the user has specified 10380 -fassociative-math. 10381 And, we need to make sure type is not saturating. */ 10382 10383 if ((! FLOAT_TYPE_P (type) || flag_associative_math) 10384 && !TYPE_SATURATING (type)) 10385 { 10386 tree var0, con0, lit0, minus_lit0; 10387 tree var1, con1, lit1, minus_lit1; 10388 tree atype = type; 10389 bool ok = true; 10390 10391 /* Split both trees into variables, constants, and literals. Then 10392 associate each group together, the constants with literals, 10393 then the result with variables. This increases the chances of 10394 literals being recombined later and of generating relocatable 10395 expressions for the sum of a constant and literal. */ 10396 var0 = split_tree (loc, arg0, type, code, 10397 &con0, &lit0, &minus_lit0, 0); 10398 var1 = split_tree (loc, arg1, type, code, 10399 &con1, &lit1, &minus_lit1, code == MINUS_EXPR); 10400 10401 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */ 10402 if (code == MINUS_EXPR) 10403 code = PLUS_EXPR; 10404 10405 /* With undefined overflow prefer doing association in a type 10406 which wraps on overflow, if that is one of the operand types. */ 10407 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED) 10408 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type))) 10409 { 10410 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 10411 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))) 10412 atype = TREE_TYPE (arg0); 10413 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) 10414 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1))) 10415 atype = TREE_TYPE (arg1); 10416 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type)); 10417 } 10418 10419 /* With undefined overflow we can only associate constants with one 10420 variable, and constants whose association doesn't overflow. */ 10421 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED) 10422 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype))) 10423 { 10424 if (var0 && var1) 10425 { 10426 tree tmp0 = var0; 10427 tree tmp1 = var1; 10428 bool one_neg = false; 10429 10430 if (TREE_CODE (tmp0) == NEGATE_EXPR) 10431 { 10432 tmp0 = TREE_OPERAND (tmp0, 0); 10433 one_neg = !one_neg; 10434 } 10435 if (CONVERT_EXPR_P (tmp0) 10436 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0))) 10437 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0))) 10438 <= TYPE_PRECISION (atype))) 10439 tmp0 = TREE_OPERAND (tmp0, 0); 10440 if (TREE_CODE (tmp1) == NEGATE_EXPR) 10441 { 10442 tmp1 = TREE_OPERAND (tmp1, 0); 10443 one_neg = !one_neg; 10444 } 10445 if (CONVERT_EXPR_P (tmp1) 10446 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0))) 10447 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0))) 10448 <= TYPE_PRECISION (atype))) 10449 tmp1 = TREE_OPERAND (tmp1, 0); 10450 /* The only case we can still associate with two variables 10451 is if they cancel out. */ 10452 if (!one_neg 10453 || !operand_equal_p (tmp0, tmp1, 0)) 10454 ok = false; 10455 } 10456 } 10457 10458 /* Only do something if we found more than two objects. Otherwise, 10459 nothing has changed and we risk infinite recursion. */ 10460 if (ok 10461 && (2 < ((var0 != 0) + (var1 != 0) 10462 + (con0 != 0) + (con1 != 0) 10463 + (lit0 != 0) + (lit1 != 0) 10464 + (minus_lit0 != 0) + (minus_lit1 != 0)))) 10465 { 10466 bool any_overflows = false; 10467 if (lit0) any_overflows |= TREE_OVERFLOW (lit0); 10468 if (lit1) any_overflows |= TREE_OVERFLOW (lit1); 10469 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0); 10470 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1); 10471 var0 = associate_trees (loc, var0, var1, code, atype); 10472 con0 = associate_trees (loc, con0, con1, code, atype); 10473 lit0 = associate_trees (loc, lit0, lit1, code, atype); 10474 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, 10475 code, atype); 10476 10477 /* Preserve the MINUS_EXPR if the negative part of the literal is 10478 greater than the positive part. Otherwise, the multiplicative 10479 folding code (i.e extract_muldiv) may be fooled in case 10480 unsigned constants are subtracted, like in the following 10481 example: ((X*2 + 4) - 8U)/2. */ 10482 if (minus_lit0 && lit0) 10483 { 10484 if (TREE_CODE (lit0) == INTEGER_CST 10485 && TREE_CODE (minus_lit0) == INTEGER_CST 10486 && tree_int_cst_lt (lit0, minus_lit0)) 10487 { 10488 minus_lit0 = associate_trees (loc, minus_lit0, lit0, 10489 MINUS_EXPR, atype); 10490 lit0 = 0; 10491 } 10492 else 10493 { 10494 lit0 = associate_trees (loc, lit0, minus_lit0, 10495 MINUS_EXPR, atype); 10496 minus_lit0 = 0; 10497 } 10498 } 10499 10500 /* Don't introduce overflows through reassociation. */ 10501 if (!any_overflows 10502 && ((lit0 && TREE_OVERFLOW_P (lit0)) 10503 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))) 10504 return NULL_TREE; 10505 10506 if (minus_lit0) 10507 { 10508 if (con0 == 0) 10509 return 10510 fold_convert_loc (loc, type, 10511 associate_trees (loc, var0, minus_lit0, 10512 MINUS_EXPR, atype)); 10513 else 10514 { 10515 con0 = associate_trees (loc, con0, minus_lit0, 10516 MINUS_EXPR, atype); 10517 return 10518 fold_convert_loc (loc, type, 10519 associate_trees (loc, var0, con0, 10520 PLUS_EXPR, atype)); 10521 } 10522 } 10523 10524 con0 = associate_trees (loc, con0, lit0, code, atype); 10525 return 10526 fold_convert_loc (loc, type, associate_trees (loc, var0, con0, 10527 code, atype)); 10528 } 10529 } 10530 10531 return NULL_TREE; 10532 10533 case MINUS_EXPR: 10534 /* Pointer simplifications for subtraction, simple reassociations. */ 10535 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0))) 10536 { 10537 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */ 10538 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR 10539 && TREE_CODE (arg1) == POINTER_PLUS_EXPR) 10540 { 10541 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 10542 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 10543 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 10544 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 10545 return fold_build2_loc (loc, PLUS_EXPR, type, 10546 fold_build2_loc (loc, MINUS_EXPR, type, 10547 arg00, arg10), 10548 fold_build2_loc (loc, MINUS_EXPR, type, 10549 arg01, arg11)); 10550 } 10551 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */ 10552 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) 10553 { 10554 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 10555 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 10556 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00, 10557 fold_convert_loc (loc, type, arg1)); 10558 if (tmp) 10559 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01); 10560 } 10561 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1 10562 simplifies. */ 10563 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR) 10564 { 10565 tree arg10 = fold_convert_loc (loc, type, 10566 TREE_OPERAND (arg1, 0)); 10567 tree arg11 = fold_convert_loc (loc, type, 10568 TREE_OPERAND (arg1, 1)); 10569 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, 10570 fold_convert_loc (loc, type, arg0), 10571 arg10); 10572 if (tmp) 10573 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11); 10574 } 10575 } 10576 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */ 10577 if (TREE_CODE (arg0) == NEGATE_EXPR 10578 && negate_expr_p (arg1) 10579 && reorder_operands_p (arg0, arg1)) 10580 return fold_build2_loc (loc, MINUS_EXPR, type, 10581 fold_convert_loc (loc, type, 10582 negate_expr (arg1)), 10583 fold_convert_loc (loc, type, 10584 TREE_OPERAND (arg0, 0))); 10585 10586 /* X - (X / Y) * Y is X % Y. */ 10587 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type)) 10588 && TREE_CODE (arg1) == MULT_EXPR 10589 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR 10590 && operand_equal_p (arg0, 10591 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0) 10592 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1), 10593 TREE_OPERAND (arg1, 1), 0)) 10594 return 10595 fold_convert_loc (loc, type, 10596 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0), 10597 arg0, TREE_OPERAND (arg1, 1))); 10598 10599 if (! FLOAT_TYPE_P (type)) 10600 { 10601 /* Fold A - (A & B) into ~B & A. */ 10602 if (!TREE_SIDE_EFFECTS (arg0) 10603 && TREE_CODE (arg1) == BIT_AND_EXPR) 10604 { 10605 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)) 10606 { 10607 tree arg10 = fold_convert_loc (loc, type, 10608 TREE_OPERAND (arg1, 0)); 10609 return fold_build2_loc (loc, BIT_AND_EXPR, type, 10610 fold_build1_loc (loc, BIT_NOT_EXPR, 10611 type, arg10), 10612 fold_convert_loc (loc, type, arg0)); 10613 } 10614 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 10615 { 10616 tree arg11 = fold_convert_loc (loc, 10617 type, TREE_OPERAND (arg1, 1)); 10618 return fold_build2_loc (loc, BIT_AND_EXPR, type, 10619 fold_build1_loc (loc, BIT_NOT_EXPR, 10620 type, arg11), 10621 fold_convert_loc (loc, type, arg0)); 10622 } 10623 } 10624 10625 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is 10626 any power of 2 minus 1. */ 10627 if (TREE_CODE (arg0) == BIT_AND_EXPR 10628 && TREE_CODE (arg1) == BIT_AND_EXPR 10629 && operand_equal_p (TREE_OPERAND (arg0, 0), 10630 TREE_OPERAND (arg1, 0), 0)) 10631 { 10632 tree mask0 = TREE_OPERAND (arg0, 1); 10633 tree mask1 = TREE_OPERAND (arg1, 1); 10634 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0); 10635 10636 if (operand_equal_p (tem, mask1, 0)) 10637 { 10638 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type, 10639 TREE_OPERAND (arg0, 0), mask1); 10640 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1); 10641 } 10642 } 10643 } 10644 10645 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to 10646 __complex__ ( x, -y ). This is not the same for SNaNs or if 10647 signed zeros are involved. */ 10648 if (!HONOR_SNANS (element_mode (arg0)) 10649 && !HONOR_SIGNED_ZEROS (element_mode (arg0)) 10650 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) 10651 { 10652 tree rtype = TREE_TYPE (TREE_TYPE (arg0)); 10653 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0); 10654 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0); 10655 bool arg0rz = false, arg0iz = false; 10656 if ((arg0r && (arg0rz = real_zerop (arg0r))) 10657 || (arg0i && (arg0iz = real_zerop (arg0i)))) 10658 { 10659 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1); 10660 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1); 10661 if (arg0rz && arg1i && real_zerop (arg1i)) 10662 { 10663 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype, 10664 arg1r ? arg1r 10665 : build1 (REALPART_EXPR, rtype, arg1)); 10666 tree ip = arg0i ? arg0i 10667 : build1 (IMAGPART_EXPR, rtype, arg0); 10668 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10669 } 10670 else if (arg0iz && arg1r && real_zerop (arg1r)) 10671 { 10672 tree rp = arg0r ? arg0r 10673 : build1 (REALPART_EXPR, rtype, arg0); 10674 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype, 10675 arg1i ? arg1i 10676 : build1 (IMAGPART_EXPR, rtype, arg1)); 10677 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10678 } 10679 } 10680 } 10681 10682 /* A - B -> A + (-B) if B is easily negatable. */ 10683 if (negate_expr_p (arg1) 10684 && !TYPE_OVERFLOW_SANITIZED (type) 10685 && ((FLOAT_TYPE_P (type) 10686 /* Avoid this transformation if B is a positive REAL_CST. */ 10687 && (TREE_CODE (arg1) != REAL_CST 10688 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))) 10689 || INTEGRAL_TYPE_P (type))) 10690 return fold_build2_loc (loc, PLUS_EXPR, type, 10691 fold_convert_loc (loc, type, arg0), 10692 fold_convert_loc (loc, type, 10693 negate_expr (arg1))); 10694 10695 /* Try folding difference of addresses. */ 10696 { 10697 HOST_WIDE_INT diff; 10698 10699 if ((TREE_CODE (arg0) == ADDR_EXPR 10700 || TREE_CODE (arg1) == ADDR_EXPR) 10701 && ptr_difference_const (arg0, arg1, &diff)) 10702 return build_int_cst_type (type, diff); 10703 } 10704 10705 /* Fold &a[i] - &a[j] to i-j. */ 10706 if (TREE_CODE (arg0) == ADDR_EXPR 10707 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF 10708 && TREE_CODE (arg1) == ADDR_EXPR 10709 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF) 10710 { 10711 tree tem = fold_addr_of_array_ref_difference (loc, type, 10712 TREE_OPERAND (arg0, 0), 10713 TREE_OPERAND (arg1, 0)); 10714 if (tem) 10715 return tem; 10716 } 10717 10718 if (FLOAT_TYPE_P (type) 10719 && flag_unsafe_math_optimizations 10720 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) 10721 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) 10722 && (tem = distribute_real_division (loc, code, type, arg0, arg1))) 10723 return tem; 10724 10725 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or 10726 one. Make sure the type is not saturating and has the signedness of 10727 the stripped operands, as fold_plusminus_mult_expr will re-associate. 10728 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */ 10729 if ((TREE_CODE (arg0) == MULT_EXPR 10730 || TREE_CODE (arg1) == MULT_EXPR) 10731 && !TYPE_SATURATING (type) 10732 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0)) 10733 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1)) 10734 && (!FLOAT_TYPE_P (type) || flag_associative_math)) 10735 { 10736 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1); 10737 if (tem) 10738 return tem; 10739 } 10740 10741 goto associate; 10742 10743 case MULT_EXPR: 10744 /* (-A) * (-B) -> A * B */ 10745 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) 10746 return fold_build2_loc (loc, MULT_EXPR, type, 10747 fold_convert_loc (loc, type, 10748 TREE_OPERAND (arg0, 0)), 10749 fold_convert_loc (loc, type, 10750 negate_expr (arg1))); 10751 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) 10752 return fold_build2_loc (loc, MULT_EXPR, type, 10753 fold_convert_loc (loc, type, 10754 negate_expr (arg0)), 10755 fold_convert_loc (loc, type, 10756 TREE_OPERAND (arg1, 0))); 10757 10758 if (! FLOAT_TYPE_P (type)) 10759 { 10760 /* Transform x * -C into -x * C if x is easily negatable. */ 10761 if (TREE_CODE (arg1) == INTEGER_CST 10762 && tree_int_cst_sgn (arg1) == -1 10763 && negate_expr_p (arg0) 10764 && (tem = negate_expr (arg1)) != arg1 10765 && !TREE_OVERFLOW (tem)) 10766 return fold_build2_loc (loc, MULT_EXPR, type, 10767 fold_convert_loc (loc, type, 10768 negate_expr (arg0)), 10769 tem); 10770 10771 /* (a * (1 << b)) is (a << b) */ 10772 if (TREE_CODE (arg1) == LSHIFT_EXPR 10773 && integer_onep (TREE_OPERAND (arg1, 0))) 10774 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0, 10775 TREE_OPERAND (arg1, 1)); 10776 if (TREE_CODE (arg0) == LSHIFT_EXPR 10777 && integer_onep (TREE_OPERAND (arg0, 0))) 10778 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1, 10779 TREE_OPERAND (arg0, 1)); 10780 10781 /* (A + A) * C -> A * 2 * C */ 10782 if (TREE_CODE (arg0) == PLUS_EXPR 10783 && TREE_CODE (arg1) == INTEGER_CST 10784 && operand_equal_p (TREE_OPERAND (arg0, 0), 10785 TREE_OPERAND (arg0, 1), 0)) 10786 return fold_build2_loc (loc, MULT_EXPR, type, 10787 omit_one_operand_loc (loc, type, 10788 TREE_OPERAND (arg0, 0), 10789 TREE_OPERAND (arg0, 1)), 10790 fold_build2_loc (loc, MULT_EXPR, type, 10791 build_int_cst (type, 2) , arg1)); 10792 10793 /* ((T) (X /[ex] C)) * C cancels out if the conversion is 10794 sign-changing only. */ 10795 if (TREE_CODE (arg1) == INTEGER_CST 10796 && TREE_CODE (arg0) == EXACT_DIV_EXPR 10797 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0)) 10798 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 10799 10800 strict_overflow_p = false; 10801 if (TREE_CODE (arg1) == INTEGER_CST 10802 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 10803 &strict_overflow_p))) 10804 { 10805 if (strict_overflow_p) 10806 fold_overflow_warning (("assuming signed overflow does not " 10807 "occur when simplifying " 10808 "multiplication"), 10809 WARN_STRICT_OVERFLOW_MISC); 10810 return fold_convert_loc (loc, type, tem); 10811 } 10812 10813 /* Optimize z * conj(z) for integer complex numbers. */ 10814 if (TREE_CODE (arg0) == CONJ_EXPR 10815 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 10816 return fold_mult_zconjz (loc, type, arg1); 10817 if (TREE_CODE (arg1) == CONJ_EXPR 10818 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 10819 return fold_mult_zconjz (loc, type, arg0); 10820 } 10821 else 10822 { 10823 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change 10824 the result for floating point types due to rounding so it is applied 10825 only if -fassociative-math was specify. */ 10826 if (flag_associative_math 10827 && TREE_CODE (arg0) == RDIV_EXPR 10828 && TREE_CODE (arg1) == REAL_CST 10829 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST) 10830 { 10831 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0), 10832 arg1); 10833 if (tem) 10834 return fold_build2_loc (loc, RDIV_EXPR, type, tem, 10835 TREE_OPERAND (arg0, 1)); 10836 } 10837 10838 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */ 10839 if (operand_equal_p (arg0, arg1, 0)) 10840 { 10841 tree tem = fold_strip_sign_ops (arg0); 10842 if (tem != NULL_TREE) 10843 { 10844 tem = fold_convert_loc (loc, type, tem); 10845 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem); 10846 } 10847 } 10848 10849 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z). 10850 This is not the same for NaNs or if signed zeros are 10851 involved. */ 10852 if (!HONOR_NANS (arg0) 10853 && !HONOR_SIGNED_ZEROS (element_mode (arg0)) 10854 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)) 10855 && TREE_CODE (arg1) == COMPLEX_CST 10856 && real_zerop (TREE_REALPART (arg1))) 10857 { 10858 tree rtype = TREE_TYPE (TREE_TYPE (arg0)); 10859 if (real_onep (TREE_IMAGPART (arg1))) 10860 return 10861 fold_build2_loc (loc, COMPLEX_EXPR, type, 10862 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR, 10863 rtype, arg0)), 10864 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0)); 10865 else if (real_minus_onep (TREE_IMAGPART (arg1))) 10866 return 10867 fold_build2_loc (loc, COMPLEX_EXPR, type, 10868 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0), 10869 negate_expr (fold_build1_loc (loc, REALPART_EXPR, 10870 rtype, arg0))); 10871 } 10872 10873 /* Optimize z * conj(z) for floating point complex numbers. 10874 Guarded by flag_unsafe_math_optimizations as non-finite 10875 imaginary components don't produce scalar results. */ 10876 if (flag_unsafe_math_optimizations 10877 && TREE_CODE (arg0) == CONJ_EXPR 10878 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 10879 return fold_mult_zconjz (loc, type, arg1); 10880 if (flag_unsafe_math_optimizations 10881 && TREE_CODE (arg1) == CONJ_EXPR 10882 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 10883 return fold_mult_zconjz (loc, type, arg0); 10884 10885 if (flag_unsafe_math_optimizations) 10886 { 10887 enum built_in_function fcode0 = builtin_mathfn_code (arg0); 10888 enum built_in_function fcode1 = builtin_mathfn_code (arg1); 10889 10890 /* Optimizations of root(...)*root(...). */ 10891 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0)) 10892 { 10893 tree rootfn, arg; 10894 tree arg00 = CALL_EXPR_ARG (arg0, 0); 10895 tree arg10 = CALL_EXPR_ARG (arg1, 0); 10896 10897 /* Optimize sqrt(x)*sqrt(x) as x. */ 10898 if (BUILTIN_SQRT_P (fcode0) 10899 && operand_equal_p (arg00, arg10, 0) 10900 && ! HONOR_SNANS (element_mode (type))) 10901 return arg00; 10902 10903 /* Optimize root(x)*root(y) as root(x*y). */ 10904 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 10905 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10); 10906 return build_call_expr_loc (loc, rootfn, 1, arg); 10907 } 10908 10909 /* Optimize expN(x)*expN(y) as expN(x+y). */ 10910 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0)) 10911 { 10912 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 10913 tree arg = fold_build2_loc (loc, PLUS_EXPR, type, 10914 CALL_EXPR_ARG (arg0, 0), 10915 CALL_EXPR_ARG (arg1, 0)); 10916 return build_call_expr_loc (loc, expfn, 1, arg); 10917 } 10918 10919 /* Optimizations of pow(...)*pow(...). */ 10920 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW) 10921 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF) 10922 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL)) 10923 { 10924 tree arg00 = CALL_EXPR_ARG (arg0, 0); 10925 tree arg01 = CALL_EXPR_ARG (arg0, 1); 10926 tree arg10 = CALL_EXPR_ARG (arg1, 0); 10927 tree arg11 = CALL_EXPR_ARG (arg1, 1); 10928 10929 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */ 10930 if (operand_equal_p (arg01, arg11, 0)) 10931 { 10932 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 10933 tree arg = fold_build2_loc (loc, MULT_EXPR, type, 10934 arg00, arg10); 10935 return build_call_expr_loc (loc, powfn, 2, arg, arg01); 10936 } 10937 10938 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */ 10939 if (operand_equal_p (arg00, arg10, 0)) 10940 { 10941 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 10942 tree arg = fold_build2_loc (loc, PLUS_EXPR, type, 10943 arg01, arg11); 10944 return build_call_expr_loc (loc, powfn, 2, arg00, arg); 10945 } 10946 } 10947 10948 /* Optimize tan(x)*cos(x) as sin(x). */ 10949 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS) 10950 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF) 10951 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL) 10952 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN) 10953 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF) 10954 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL)) 10955 && operand_equal_p (CALL_EXPR_ARG (arg0, 0), 10956 CALL_EXPR_ARG (arg1, 0), 0)) 10957 { 10958 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN); 10959 10960 if (sinfn != NULL_TREE) 10961 return build_call_expr_loc (loc, sinfn, 1, 10962 CALL_EXPR_ARG (arg0, 0)); 10963 } 10964 10965 /* Optimize x*pow(x,c) as pow(x,c+1). */ 10966 if (fcode1 == BUILT_IN_POW 10967 || fcode1 == BUILT_IN_POWF 10968 || fcode1 == BUILT_IN_POWL) 10969 { 10970 tree arg10 = CALL_EXPR_ARG (arg1, 0); 10971 tree arg11 = CALL_EXPR_ARG (arg1, 1); 10972 if (TREE_CODE (arg11) == REAL_CST 10973 && !TREE_OVERFLOW (arg11) 10974 && operand_equal_p (arg0, arg10, 0)) 10975 { 10976 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 10977 REAL_VALUE_TYPE c; 10978 tree arg; 10979 10980 c = TREE_REAL_CST (arg11); 10981 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); 10982 arg = build_real (type, c); 10983 return build_call_expr_loc (loc, powfn, 2, arg0, arg); 10984 } 10985 } 10986 10987 /* Optimize pow(x,c)*x as pow(x,c+1). */ 10988 if (fcode0 == BUILT_IN_POW 10989 || fcode0 == BUILT_IN_POWF 10990 || fcode0 == BUILT_IN_POWL) 10991 { 10992 tree arg00 = CALL_EXPR_ARG (arg0, 0); 10993 tree arg01 = CALL_EXPR_ARG (arg0, 1); 10994 if (TREE_CODE (arg01) == REAL_CST 10995 && !TREE_OVERFLOW (arg01) 10996 && operand_equal_p (arg1, arg00, 0)) 10997 { 10998 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 10999 REAL_VALUE_TYPE c; 11000 tree arg; 11001 11002 c = TREE_REAL_CST (arg01); 11003 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); 11004 arg = build_real (type, c); 11005 return build_call_expr_loc (loc, powfn, 2, arg1, arg); 11006 } 11007 } 11008 11009 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */ 11010 if (!in_gimple_form 11011 && optimize 11012 && operand_equal_p (arg0, arg1, 0)) 11013 { 11014 tree powfn = mathfn_built_in (type, BUILT_IN_POW); 11015 11016 if (powfn) 11017 { 11018 tree arg = build_real (type, dconst2); 11019 return build_call_expr_loc (loc, powfn, 2, arg0, arg); 11020 } 11021 } 11022 } 11023 } 11024 goto associate; 11025 11026 case BIT_IOR_EXPR: 11027 bit_ior: 11028 /* ~X | X is -1. */ 11029 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11030 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11031 { 11032 t1 = build_zero_cst (type); 11033 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11034 return omit_one_operand_loc (loc, type, t1, arg1); 11035 } 11036 11037 /* X | ~X is -1. */ 11038 if (TREE_CODE (arg1) == BIT_NOT_EXPR 11039 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11040 { 11041 t1 = build_zero_cst (type); 11042 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11043 return omit_one_operand_loc (loc, type, t1, arg0); 11044 } 11045 11046 /* Canonicalize (X & C1) | C2. */ 11047 if (TREE_CODE (arg0) == BIT_AND_EXPR 11048 && TREE_CODE (arg1) == INTEGER_CST 11049 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 11050 { 11051 int width = TYPE_PRECISION (type), w; 11052 wide_int c1 = TREE_OPERAND (arg0, 1); 11053 wide_int c2 = arg1; 11054 11055 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */ 11056 if ((c1 & c2) == c1) 11057 return omit_one_operand_loc (loc, type, arg1, 11058 TREE_OPERAND (arg0, 0)); 11059 11060 wide_int msk = wi::mask (width, false, 11061 TYPE_PRECISION (TREE_TYPE (arg1))); 11062 11063 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */ 11064 if (msk.and_not (c1 | c2) == 0) 11065 return fold_build2_loc (loc, BIT_IOR_EXPR, type, 11066 TREE_OPERAND (arg0, 0), arg1); 11067 11068 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2, 11069 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some 11070 mode which allows further optimizations. */ 11071 c1 &= msk; 11072 c2 &= msk; 11073 wide_int c3 = c1.and_not (c2); 11074 for (w = BITS_PER_UNIT; w <= width; w <<= 1) 11075 { 11076 wide_int mask = wi::mask (w, false, 11077 TYPE_PRECISION (type)); 11078 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0) 11079 { 11080 c3 = mask; 11081 break; 11082 } 11083 } 11084 11085 if (c3 != c1) 11086 return fold_build2_loc (loc, BIT_IOR_EXPR, type, 11087 fold_build2_loc (loc, BIT_AND_EXPR, type, 11088 TREE_OPERAND (arg0, 0), 11089 wide_int_to_tree (type, 11090 c3)), 11091 arg1); 11092 } 11093 11094 /* (X & ~Y) | (~X & Y) is X ^ Y */ 11095 if (TREE_CODE (arg0) == BIT_AND_EXPR 11096 && TREE_CODE (arg1) == BIT_AND_EXPR) 11097 { 11098 tree a0, a1, l0, l1, n0, n1; 11099 11100 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 11101 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 11102 11103 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11104 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 11105 11106 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0); 11107 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1); 11108 11109 if ((operand_equal_p (n0, a0, 0) 11110 && operand_equal_p (n1, a1, 0)) 11111 || (operand_equal_p (n0, a1, 0) 11112 && operand_equal_p (n1, a0, 0))) 11113 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1); 11114 } 11115 11116 t1 = distribute_bit_expr (loc, code, type, arg0, arg1); 11117 if (t1 != NULL_TREE) 11118 return t1; 11119 11120 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))). 11121 11122 This results in more efficient code for machines without a NAND 11123 instruction. Combine will canonicalize to the first form 11124 which will allow use of NAND instructions provided by the 11125 backend if they exist. */ 11126 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11127 && TREE_CODE (arg1) == BIT_NOT_EXPR) 11128 { 11129 return 11130 fold_build1_loc (loc, BIT_NOT_EXPR, type, 11131 build2 (BIT_AND_EXPR, type, 11132 fold_convert_loc (loc, type, 11133 TREE_OPERAND (arg0, 0)), 11134 fold_convert_loc (loc, type, 11135 TREE_OPERAND (arg1, 0)))); 11136 } 11137 11138 /* See if this can be simplified into a rotate first. If that 11139 is unsuccessful continue in the association code. */ 11140 goto bit_rotate; 11141 11142 case BIT_XOR_EXPR: 11143 /* ~X ^ X is -1. */ 11144 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11145 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11146 { 11147 t1 = build_zero_cst (type); 11148 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11149 return omit_one_operand_loc (loc, type, t1, arg1); 11150 } 11151 11152 /* X ^ ~X is -1. */ 11153 if (TREE_CODE (arg1) == BIT_NOT_EXPR 11154 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11155 { 11156 t1 = build_zero_cst (type); 11157 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11158 return omit_one_operand_loc (loc, type, t1, arg0); 11159 } 11160 11161 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing 11162 with a constant, and the two constants have no bits in common, 11163 we should treat this as a BIT_IOR_EXPR since this may produce more 11164 simplifications. */ 11165 if (TREE_CODE (arg0) == BIT_AND_EXPR 11166 && TREE_CODE (arg1) == BIT_AND_EXPR 11167 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 11168 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 11169 && wi::bit_and (TREE_OPERAND (arg0, 1), 11170 TREE_OPERAND (arg1, 1)) == 0) 11171 { 11172 code = BIT_IOR_EXPR; 11173 goto bit_ior; 11174 } 11175 11176 /* (X | Y) ^ X -> Y & ~ X*/ 11177 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11178 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11179 { 11180 tree t2 = TREE_OPERAND (arg0, 1); 11181 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), 11182 arg1); 11183 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11184 fold_convert_loc (loc, type, t2), 11185 fold_convert_loc (loc, type, t1)); 11186 return t1; 11187 } 11188 11189 /* (Y | X) ^ X -> Y & ~ X*/ 11190 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11191 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11192 { 11193 tree t2 = TREE_OPERAND (arg0, 0); 11194 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), 11195 arg1); 11196 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11197 fold_convert_loc (loc, type, t2), 11198 fold_convert_loc (loc, type, t1)); 11199 return t1; 11200 } 11201 11202 /* X ^ (X | Y) -> Y & ~ X*/ 11203 if (TREE_CODE (arg1) == BIT_IOR_EXPR 11204 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0)) 11205 { 11206 tree t2 = TREE_OPERAND (arg1, 1); 11207 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0), 11208 arg0); 11209 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11210 fold_convert_loc (loc, type, t2), 11211 fold_convert_loc (loc, type, t1)); 11212 return t1; 11213 } 11214 11215 /* X ^ (Y | X) -> Y & ~ X*/ 11216 if (TREE_CODE (arg1) == BIT_IOR_EXPR 11217 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0)) 11218 { 11219 tree t2 = TREE_OPERAND (arg1, 0); 11220 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0), 11221 arg0); 11222 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11223 fold_convert_loc (loc, type, t2), 11224 fold_convert_loc (loc, type, t1)); 11225 return t1; 11226 } 11227 11228 /* Convert ~X ^ ~Y to X ^ Y. */ 11229 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11230 && TREE_CODE (arg1) == BIT_NOT_EXPR) 11231 return fold_build2_loc (loc, code, type, 11232 fold_convert_loc (loc, type, 11233 TREE_OPERAND (arg0, 0)), 11234 fold_convert_loc (loc, type, 11235 TREE_OPERAND (arg1, 0))); 11236 11237 /* Convert ~X ^ C to X ^ ~C. */ 11238 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11239 && TREE_CODE (arg1) == INTEGER_CST) 11240 return fold_build2_loc (loc, code, type, 11241 fold_convert_loc (loc, type, 11242 TREE_OPERAND (arg0, 0)), 11243 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1)); 11244 11245 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */ 11246 if (TREE_CODE (arg0) == BIT_AND_EXPR 11247 && INTEGRAL_TYPE_P (type) 11248 && integer_onep (TREE_OPERAND (arg0, 1)) 11249 && integer_onep (arg1)) 11250 return fold_build2_loc (loc, EQ_EXPR, type, arg0, 11251 build_zero_cst (TREE_TYPE (arg0))); 11252 11253 /* Fold (X & Y) ^ Y as ~X & Y. */ 11254 if (TREE_CODE (arg0) == BIT_AND_EXPR 11255 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11256 { 11257 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11258 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11259 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11260 fold_convert_loc (loc, type, arg1)); 11261 } 11262 /* Fold (X & Y) ^ X as ~Y & X. */ 11263 if (TREE_CODE (arg0) == BIT_AND_EXPR 11264 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 11265 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 11266 { 11267 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 11268 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11269 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11270 fold_convert_loc (loc, type, arg1)); 11271 } 11272 /* Fold X ^ (X & Y) as X & ~Y. */ 11273 if (TREE_CODE (arg1) == BIT_AND_EXPR 11274 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11275 { 11276 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 11277 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11278 fold_convert_loc (loc, type, arg0), 11279 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem)); 11280 } 11281 /* Fold X ^ (Y & X) as ~Y & X. */ 11282 if (TREE_CODE (arg1) == BIT_AND_EXPR 11283 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 11284 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 11285 { 11286 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 11287 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11288 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11289 fold_convert_loc (loc, type, arg0)); 11290 } 11291 11292 /* See if this can be simplified into a rotate first. If that 11293 is unsuccessful continue in the association code. */ 11294 goto bit_rotate; 11295 11296 case BIT_AND_EXPR: 11297 /* ~X & X, (X == 0) & X, and !X & X are always zero. */ 11298 if ((TREE_CODE (arg0) == BIT_NOT_EXPR 11299 || TREE_CODE (arg0) == TRUTH_NOT_EXPR 11300 || (TREE_CODE (arg0) == EQ_EXPR 11301 && integer_zerop (TREE_OPERAND (arg0, 1)))) 11302 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11303 return omit_one_operand_loc (loc, type, integer_zero_node, arg1); 11304 11305 /* X & ~X , X & (X == 0), and X & !X are always zero. */ 11306 if ((TREE_CODE (arg1) == BIT_NOT_EXPR 11307 || TREE_CODE (arg1) == TRUTH_NOT_EXPR 11308 || (TREE_CODE (arg1) == EQ_EXPR 11309 && integer_zerop (TREE_OPERAND (arg1, 1)))) 11310 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11311 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 11312 11313 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */ 11314 if (TREE_CODE (arg0) == BIT_XOR_EXPR 11315 && INTEGRAL_TYPE_P (type) 11316 && integer_onep (TREE_OPERAND (arg0, 1)) 11317 && integer_onep (arg1)) 11318 { 11319 tree tem2; 11320 tem = TREE_OPERAND (arg0, 0); 11321 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1); 11322 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), 11323 tem, tem2); 11324 return fold_build2_loc (loc, EQ_EXPR, type, tem2, 11325 build_zero_cst (TREE_TYPE (tem))); 11326 } 11327 /* Fold ~X & 1 as (X & 1) == 0. */ 11328 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11329 && INTEGRAL_TYPE_P (type) 11330 && integer_onep (arg1)) 11331 { 11332 tree tem2; 11333 tem = TREE_OPERAND (arg0, 0); 11334 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1); 11335 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), 11336 tem, tem2); 11337 return fold_build2_loc (loc, EQ_EXPR, type, tem2, 11338 build_zero_cst (TREE_TYPE (tem))); 11339 } 11340 /* Fold !X & 1 as X == 0. */ 11341 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 11342 && integer_onep (arg1)) 11343 { 11344 tem = TREE_OPERAND (arg0, 0); 11345 return fold_build2_loc (loc, EQ_EXPR, type, tem, 11346 build_zero_cst (TREE_TYPE (tem))); 11347 } 11348 11349 /* Fold (X ^ Y) & Y as ~X & Y. */ 11350 if (TREE_CODE (arg0) == BIT_XOR_EXPR 11351 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11352 { 11353 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11354 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11355 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11356 fold_convert_loc (loc, type, arg1)); 11357 } 11358 /* Fold (X ^ Y) & X as ~Y & X. */ 11359 if (TREE_CODE (arg0) == BIT_XOR_EXPR 11360 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 11361 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 11362 { 11363 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 11364 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11365 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11366 fold_convert_loc (loc, type, arg1)); 11367 } 11368 /* Fold X & (X ^ Y) as X & ~Y. */ 11369 if (TREE_CODE (arg1) == BIT_XOR_EXPR 11370 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11371 { 11372 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 11373 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11374 fold_convert_loc (loc, type, arg0), 11375 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem)); 11376 } 11377 /* Fold X & (Y ^ X) as ~Y & X. */ 11378 if (TREE_CODE (arg1) == BIT_XOR_EXPR 11379 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 11380 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 11381 { 11382 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 11383 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11384 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11385 fold_convert_loc (loc, type, arg0)); 11386 } 11387 11388 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant 11389 multiple of 1 << CST. */ 11390 if (TREE_CODE (arg1) == INTEGER_CST) 11391 { 11392 wide_int cst1 = arg1; 11393 wide_int ncst1 = -cst1; 11394 if ((cst1 & ncst1) == ncst1 11395 && multiple_of_p (type, arg0, 11396 wide_int_to_tree (TREE_TYPE (arg1), ncst1))) 11397 return fold_convert_loc (loc, type, arg0); 11398 } 11399 11400 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero 11401 bits from CST2. */ 11402 if (TREE_CODE (arg1) == INTEGER_CST 11403 && TREE_CODE (arg0) == MULT_EXPR 11404 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 11405 { 11406 wide_int warg1 = arg1; 11407 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1)); 11408 11409 if (masked == 0) 11410 return omit_two_operands_loc (loc, type, build_zero_cst (type), 11411 arg0, arg1); 11412 else if (masked != warg1) 11413 { 11414 /* Avoid the transform if arg1 is a mask of some 11415 mode which allows further optimizations. */ 11416 int pop = wi::popcount (warg1); 11417 if (!(pop >= BITS_PER_UNIT 11418 && exact_log2 (pop) != -1 11419 && wi::mask (pop, false, warg1.get_precision ()) == warg1)) 11420 return fold_build2_loc (loc, code, type, op0, 11421 wide_int_to_tree (type, masked)); 11422 } 11423 } 11424 11425 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M, 11426 ((A & N) + B) & M -> (A + B) & M 11427 Similarly if (N & M) == 0, 11428 ((A | N) + B) & M -> (A + B) & M 11429 and for - instead of + (or unary - instead of +) 11430 and/or ^ instead of |. 11431 If B is constant and (B & M) == 0, fold into A & M. */ 11432 if (TREE_CODE (arg1) == INTEGER_CST) 11433 { 11434 wide_int cst1 = arg1; 11435 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0 11436 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 11437 && (TREE_CODE (arg0) == PLUS_EXPR 11438 || TREE_CODE (arg0) == MINUS_EXPR 11439 || TREE_CODE (arg0) == NEGATE_EXPR) 11440 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)) 11441 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE)) 11442 { 11443 tree pmop[2]; 11444 int which = 0; 11445 wide_int cst0; 11446 11447 /* Now we know that arg0 is (C + D) or (C - D) or 11448 -C and arg1 (M) is == (1LL << cst) - 1. 11449 Store C into PMOP[0] and D into PMOP[1]. */ 11450 pmop[0] = TREE_OPERAND (arg0, 0); 11451 pmop[1] = NULL; 11452 if (TREE_CODE (arg0) != NEGATE_EXPR) 11453 { 11454 pmop[1] = TREE_OPERAND (arg0, 1); 11455 which = 1; 11456 } 11457 11458 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1) 11459 which = -1; 11460 11461 for (; which >= 0; which--) 11462 switch (TREE_CODE (pmop[which])) 11463 { 11464 case BIT_AND_EXPR: 11465 case BIT_IOR_EXPR: 11466 case BIT_XOR_EXPR: 11467 if (TREE_CODE (TREE_OPERAND (pmop[which], 1)) 11468 != INTEGER_CST) 11469 break; 11470 cst0 = TREE_OPERAND (pmop[which], 1); 11471 cst0 &= cst1; 11472 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR) 11473 { 11474 if (cst0 != cst1) 11475 break; 11476 } 11477 else if (cst0 != 0) 11478 break; 11479 /* If C or D is of the form (A & N) where 11480 (N & M) == M, or of the form (A | N) or 11481 (A ^ N) where (N & M) == 0, replace it with A. */ 11482 pmop[which] = TREE_OPERAND (pmop[which], 0); 11483 break; 11484 case INTEGER_CST: 11485 /* If C or D is a N where (N & M) == 0, it can be 11486 omitted (assumed 0). */ 11487 if ((TREE_CODE (arg0) == PLUS_EXPR 11488 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0)) 11489 && (cst1 & pmop[which]) == 0) 11490 pmop[which] = NULL; 11491 break; 11492 default: 11493 break; 11494 } 11495 11496 /* Only build anything new if we optimized one or both arguments 11497 above. */ 11498 if (pmop[0] != TREE_OPERAND (arg0, 0) 11499 || (TREE_CODE (arg0) != NEGATE_EXPR 11500 && pmop[1] != TREE_OPERAND (arg0, 1))) 11501 { 11502 tree utype = TREE_TYPE (arg0); 11503 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))) 11504 { 11505 /* Perform the operations in a type that has defined 11506 overflow behavior. */ 11507 utype = unsigned_type_for (TREE_TYPE (arg0)); 11508 if (pmop[0] != NULL) 11509 pmop[0] = fold_convert_loc (loc, utype, pmop[0]); 11510 if (pmop[1] != NULL) 11511 pmop[1] = fold_convert_loc (loc, utype, pmop[1]); 11512 } 11513 11514 if (TREE_CODE (arg0) == NEGATE_EXPR) 11515 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]); 11516 else if (TREE_CODE (arg0) == PLUS_EXPR) 11517 { 11518 if (pmop[0] != NULL && pmop[1] != NULL) 11519 tem = fold_build2_loc (loc, PLUS_EXPR, utype, 11520 pmop[0], pmop[1]); 11521 else if (pmop[0] != NULL) 11522 tem = pmop[0]; 11523 else if (pmop[1] != NULL) 11524 tem = pmop[1]; 11525 else 11526 return build_int_cst (type, 0); 11527 } 11528 else if (pmop[0] == NULL) 11529 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]); 11530 else 11531 tem = fold_build2_loc (loc, MINUS_EXPR, utype, 11532 pmop[0], pmop[1]); 11533 /* TEM is now the new binary +, - or unary - replacement. */ 11534 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem, 11535 fold_convert_loc (loc, utype, arg1)); 11536 return fold_convert_loc (loc, type, tem); 11537 } 11538 } 11539 } 11540 11541 t1 = distribute_bit_expr (loc, code, type, arg0, arg1); 11542 if (t1 != NULL_TREE) 11543 return t1; 11544 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */ 11545 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR 11546 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))) 11547 { 11548 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0))); 11549 11550 wide_int mask = wide_int::from (arg1, prec, UNSIGNED); 11551 if (mask == -1) 11552 return 11553 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11554 } 11555 11556 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))). 11557 11558 This results in more efficient code for machines without a NOR 11559 instruction. Combine will canonicalize to the first form 11560 which will allow use of NOR instructions provided by the 11561 backend if they exist. */ 11562 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11563 && TREE_CODE (arg1) == BIT_NOT_EXPR) 11564 { 11565 return fold_build1_loc (loc, BIT_NOT_EXPR, type, 11566 build2 (BIT_IOR_EXPR, type, 11567 fold_convert_loc (loc, type, 11568 TREE_OPERAND (arg0, 0)), 11569 fold_convert_loc (loc, type, 11570 TREE_OPERAND (arg1, 0)))); 11571 } 11572 11573 /* If arg0 is derived from the address of an object or function, we may 11574 be able to fold this expression using the object or function's 11575 alignment. */ 11576 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1)) 11577 { 11578 unsigned HOST_WIDE_INT modulus, residue; 11579 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1); 11580 11581 modulus = get_pointer_modulus_and_residue (arg0, &residue, 11582 integer_onep (arg1)); 11583 11584 /* This works because modulus is a power of 2. If this weren't the 11585 case, we'd have to replace it by its greatest power-of-2 11586 divisor: modulus & -modulus. */ 11587 if (low < modulus) 11588 return build_int_cst (type, residue & low); 11589 } 11590 11591 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1)) 11592 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1)) 11593 if the new mask might be further optimized. */ 11594 if ((TREE_CODE (arg0) == LSHIFT_EXPR 11595 || TREE_CODE (arg0) == RSHIFT_EXPR) 11596 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT 11597 && TREE_CODE (arg1) == INTEGER_CST 11598 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1)) 11599 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0 11600 && (tree_to_uhwi (TREE_OPERAND (arg0, 1)) 11601 < TYPE_PRECISION (TREE_TYPE (arg0)))) 11602 { 11603 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1)); 11604 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1); 11605 unsigned HOST_WIDE_INT newmask, zerobits = 0; 11606 tree shift_type = TREE_TYPE (arg0); 11607 11608 if (TREE_CODE (arg0) == LSHIFT_EXPR) 11609 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1); 11610 else if (TREE_CODE (arg0) == RSHIFT_EXPR 11611 && TYPE_PRECISION (TREE_TYPE (arg0)) 11612 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0)))) 11613 { 11614 prec = TYPE_PRECISION (TREE_TYPE (arg0)); 11615 tree arg00 = TREE_OPERAND (arg0, 0); 11616 /* See if more bits can be proven as zero because of 11617 zero extension. */ 11618 if (TREE_CODE (arg00) == NOP_EXPR 11619 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0)))) 11620 { 11621 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0)); 11622 if (TYPE_PRECISION (inner_type) 11623 == GET_MODE_PRECISION (TYPE_MODE (inner_type)) 11624 && TYPE_PRECISION (inner_type) < prec) 11625 { 11626 prec = TYPE_PRECISION (inner_type); 11627 /* See if we can shorten the right shift. */ 11628 if (shiftc < prec) 11629 shift_type = inner_type; 11630 /* Otherwise X >> C1 is all zeros, so we'll optimize 11631 it into (X, 0) later on by making sure zerobits 11632 is all ones. */ 11633 } 11634 } 11635 zerobits = ~(unsigned HOST_WIDE_INT) 0; 11636 if (shiftc < prec) 11637 { 11638 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc; 11639 zerobits <<= prec - shiftc; 11640 } 11641 /* For arithmetic shift if sign bit could be set, zerobits 11642 can contain actually sign bits, so no transformation is 11643 possible, unless MASK masks them all away. In that 11644 case the shift needs to be converted into logical shift. */ 11645 if (!TYPE_UNSIGNED (TREE_TYPE (arg0)) 11646 && prec == TYPE_PRECISION (TREE_TYPE (arg0))) 11647 { 11648 if ((mask & zerobits) == 0) 11649 shift_type = unsigned_type_for (TREE_TYPE (arg0)); 11650 else 11651 zerobits = 0; 11652 } 11653 } 11654 11655 /* ((X << 16) & 0xff00) is (X, 0). */ 11656 if ((mask & zerobits) == mask) 11657 return omit_one_operand_loc (loc, type, 11658 build_int_cst (type, 0), arg0); 11659 11660 newmask = mask | zerobits; 11661 if (newmask != mask && (newmask & (newmask + 1)) == 0) 11662 { 11663 /* Only do the transformation if NEWMASK is some integer 11664 mode's mask. */ 11665 for (prec = BITS_PER_UNIT; 11666 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1) 11667 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1) 11668 break; 11669 if (prec < HOST_BITS_PER_WIDE_INT 11670 || newmask == ~(unsigned HOST_WIDE_INT) 0) 11671 { 11672 tree newmaskt; 11673 11674 if (shift_type != TREE_TYPE (arg0)) 11675 { 11676 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type, 11677 fold_convert_loc (loc, shift_type, 11678 TREE_OPERAND (arg0, 0)), 11679 TREE_OPERAND (arg0, 1)); 11680 tem = fold_convert_loc (loc, type, tem); 11681 } 11682 else 11683 tem = op0; 11684 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask); 11685 if (!tree_int_cst_equal (newmaskt, arg1)) 11686 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt); 11687 } 11688 } 11689 } 11690 11691 goto associate; 11692 11693 case RDIV_EXPR: 11694 /* Don't touch a floating-point divide by zero unless the mode 11695 of the constant can represent infinity. */ 11696 if (TREE_CODE (arg1) == REAL_CST 11697 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))) 11698 && real_zerop (arg1)) 11699 return NULL_TREE; 11700 11701 /* (-A) / (-B) -> A / B */ 11702 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) 11703 return fold_build2_loc (loc, RDIV_EXPR, type, 11704 TREE_OPERAND (arg0, 0), 11705 negate_expr (arg1)); 11706 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) 11707 return fold_build2_loc (loc, RDIV_EXPR, type, 11708 negate_expr (arg0), 11709 TREE_OPERAND (arg1, 0)); 11710 11711 /* Convert A/B/C to A/(B*C). */ 11712 if (flag_reciprocal_math 11713 && TREE_CODE (arg0) == RDIV_EXPR) 11714 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0), 11715 fold_build2_loc (loc, MULT_EXPR, type, 11716 TREE_OPERAND (arg0, 1), arg1)); 11717 11718 /* Convert A/(B/C) to (A/B)*C. */ 11719 if (flag_reciprocal_math 11720 && TREE_CODE (arg1) == RDIV_EXPR) 11721 return fold_build2_loc (loc, MULT_EXPR, type, 11722 fold_build2_loc (loc, RDIV_EXPR, type, arg0, 11723 TREE_OPERAND (arg1, 0)), 11724 TREE_OPERAND (arg1, 1)); 11725 11726 /* Convert C1/(X*C2) into (C1/C2)/X. */ 11727 if (flag_reciprocal_math 11728 && TREE_CODE (arg1) == MULT_EXPR 11729 && TREE_CODE (arg0) == REAL_CST 11730 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) 11731 { 11732 tree tem = const_binop (RDIV_EXPR, arg0, 11733 TREE_OPERAND (arg1, 1)); 11734 if (tem) 11735 return fold_build2_loc (loc, RDIV_EXPR, type, tem, 11736 TREE_OPERAND (arg1, 0)); 11737 } 11738 11739 if (flag_unsafe_math_optimizations) 11740 { 11741 enum built_in_function fcode0 = builtin_mathfn_code (arg0); 11742 enum built_in_function fcode1 = builtin_mathfn_code (arg1); 11743 11744 /* Optimize sin(x)/cos(x) as tan(x). */ 11745 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS) 11746 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF) 11747 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL)) 11748 && operand_equal_p (CALL_EXPR_ARG (arg0, 0), 11749 CALL_EXPR_ARG (arg1, 0), 0)) 11750 { 11751 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); 11752 11753 if (tanfn != NULL_TREE) 11754 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0)); 11755 } 11756 11757 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */ 11758 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN) 11759 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF) 11760 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL)) 11761 && operand_equal_p (CALL_EXPR_ARG (arg0, 0), 11762 CALL_EXPR_ARG (arg1, 0), 0)) 11763 { 11764 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); 11765 11766 if (tanfn != NULL_TREE) 11767 { 11768 tree tmp = build_call_expr_loc (loc, tanfn, 1, 11769 CALL_EXPR_ARG (arg0, 0)); 11770 return fold_build2_loc (loc, RDIV_EXPR, type, 11771 build_real (type, dconst1), tmp); 11772 } 11773 } 11774 11775 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about 11776 NaNs or Infinities. */ 11777 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN) 11778 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF) 11779 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL))) 11780 { 11781 tree arg00 = CALL_EXPR_ARG (arg0, 0); 11782 tree arg01 = CALL_EXPR_ARG (arg1, 0); 11783 11784 if (! HONOR_NANS (arg00) 11785 && ! HONOR_INFINITIES (element_mode (arg00)) 11786 && operand_equal_p (arg00, arg01, 0)) 11787 { 11788 tree cosfn = mathfn_built_in (type, BUILT_IN_COS); 11789 11790 if (cosfn != NULL_TREE) 11791 return build_call_expr_loc (loc, cosfn, 1, arg00); 11792 } 11793 } 11794 11795 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about 11796 NaNs or Infinities. */ 11797 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN) 11798 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF) 11799 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL))) 11800 { 11801 tree arg00 = CALL_EXPR_ARG (arg0, 0); 11802 tree arg01 = CALL_EXPR_ARG (arg1, 0); 11803 11804 if (! HONOR_NANS (arg00) 11805 && ! HONOR_INFINITIES (element_mode (arg00)) 11806 && operand_equal_p (arg00, arg01, 0)) 11807 { 11808 tree cosfn = mathfn_built_in (type, BUILT_IN_COS); 11809 11810 if (cosfn != NULL_TREE) 11811 { 11812 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00); 11813 return fold_build2_loc (loc, RDIV_EXPR, type, 11814 build_real (type, dconst1), 11815 tmp); 11816 } 11817 } 11818 } 11819 11820 /* Optimize pow(x,c)/x as pow(x,c-1). */ 11821 if (fcode0 == BUILT_IN_POW 11822 || fcode0 == BUILT_IN_POWF 11823 || fcode0 == BUILT_IN_POWL) 11824 { 11825 tree arg00 = CALL_EXPR_ARG (arg0, 0); 11826 tree arg01 = CALL_EXPR_ARG (arg0, 1); 11827 if (TREE_CODE (arg01) == REAL_CST 11828 && !TREE_OVERFLOW (arg01) 11829 && operand_equal_p (arg1, arg00, 0)) 11830 { 11831 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 11832 REAL_VALUE_TYPE c; 11833 tree arg; 11834 11835 c = TREE_REAL_CST (arg01); 11836 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1); 11837 arg = build_real (type, c); 11838 return build_call_expr_loc (loc, powfn, 2, arg1, arg); 11839 } 11840 } 11841 11842 /* Optimize a/root(b/c) into a*root(c/b). */ 11843 if (BUILTIN_ROOT_P (fcode1)) 11844 { 11845 tree rootarg = CALL_EXPR_ARG (arg1, 0); 11846 11847 if (TREE_CODE (rootarg) == RDIV_EXPR) 11848 { 11849 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 11850 tree b = TREE_OPERAND (rootarg, 0); 11851 tree c = TREE_OPERAND (rootarg, 1); 11852 11853 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b); 11854 11855 tmp = build_call_expr_loc (loc, rootfn, 1, tmp); 11856 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp); 11857 } 11858 } 11859 11860 /* Optimize x/expN(y) into x*expN(-y). */ 11861 if (BUILTIN_EXPONENT_P (fcode1)) 11862 { 11863 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 11864 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0)); 11865 arg1 = build_call_expr_loc (loc, 11866 expfn, 1, 11867 fold_convert_loc (loc, type, arg)); 11868 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1); 11869 } 11870 11871 /* Optimize x/pow(y,z) into x*pow(y,-z). */ 11872 if (fcode1 == BUILT_IN_POW 11873 || fcode1 == BUILT_IN_POWF 11874 || fcode1 == BUILT_IN_POWL) 11875 { 11876 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 11877 tree arg10 = CALL_EXPR_ARG (arg1, 0); 11878 tree arg11 = CALL_EXPR_ARG (arg1, 1); 11879 tree neg11 = fold_convert_loc (loc, type, 11880 negate_expr (arg11)); 11881 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11); 11882 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1); 11883 } 11884 } 11885 return NULL_TREE; 11886 11887 case TRUNC_DIV_EXPR: 11888 /* Optimize (X & (-A)) / A where A is a power of 2, 11889 to X >> log2(A) */ 11890 if (TREE_CODE (arg0) == BIT_AND_EXPR 11891 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST 11892 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0) 11893 { 11894 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1), 11895 arg1, TREE_OPERAND (arg0, 1)); 11896 if (sum && integer_zerop (sum)) { 11897 tree pow2 = build_int_cst (integer_type_node, 11898 wi::exact_log2 (arg1)); 11899 return fold_build2_loc (loc, RSHIFT_EXPR, type, 11900 TREE_OPERAND (arg0, 0), pow2); 11901 } 11902 } 11903 11904 /* Fall through */ 11905 11906 case FLOOR_DIV_EXPR: 11907 /* Simplify A / (B << N) where A and B are positive and B is 11908 a power of 2, to A >> (N + log2(B)). */ 11909 strict_overflow_p = false; 11910 if (TREE_CODE (arg1) == LSHIFT_EXPR 11911 && (TYPE_UNSIGNED (type) 11912 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p))) 11913 { 11914 tree sval = TREE_OPERAND (arg1, 0); 11915 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0) 11916 { 11917 tree sh_cnt = TREE_OPERAND (arg1, 1); 11918 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt), 11919 wi::exact_log2 (sval)); 11920 11921 if (strict_overflow_p) 11922 fold_overflow_warning (("assuming signed overflow does not " 11923 "occur when simplifying A / (B << N)"), 11924 WARN_STRICT_OVERFLOW_MISC); 11925 11926 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt), 11927 sh_cnt, pow2); 11928 return fold_build2_loc (loc, RSHIFT_EXPR, type, 11929 fold_convert_loc (loc, type, arg0), sh_cnt); 11930 } 11931 } 11932 11933 /* Fall through */ 11934 11935 case ROUND_DIV_EXPR: 11936 case CEIL_DIV_EXPR: 11937 case EXACT_DIV_EXPR: 11938 if (integer_zerop (arg1)) 11939 return NULL_TREE; 11940 11941 /* Convert -A / -B to A / B when the type is signed and overflow is 11942 undefined. */ 11943 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 11944 && TREE_CODE (arg0) == NEGATE_EXPR 11945 && negate_expr_p (arg1)) 11946 { 11947 if (INTEGRAL_TYPE_P (type)) 11948 fold_overflow_warning (("assuming signed overflow does not occur " 11949 "when distributing negation across " 11950 "division"), 11951 WARN_STRICT_OVERFLOW_MISC); 11952 return fold_build2_loc (loc, code, type, 11953 fold_convert_loc (loc, type, 11954 TREE_OPERAND (arg0, 0)), 11955 fold_convert_loc (loc, type, 11956 negate_expr (arg1))); 11957 } 11958 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 11959 && TREE_CODE (arg1) == NEGATE_EXPR 11960 && negate_expr_p (arg0)) 11961 { 11962 if (INTEGRAL_TYPE_P (type)) 11963 fold_overflow_warning (("assuming signed overflow does not occur " 11964 "when distributing negation across " 11965 "division"), 11966 WARN_STRICT_OVERFLOW_MISC); 11967 return fold_build2_loc (loc, code, type, 11968 fold_convert_loc (loc, type, 11969 negate_expr (arg0)), 11970 fold_convert_loc (loc, type, 11971 TREE_OPERAND (arg1, 0))); 11972 } 11973 11974 /* If arg0 is a multiple of arg1, then rewrite to the fastest div 11975 operation, EXACT_DIV_EXPR. 11976 11977 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now. 11978 At one time others generated faster code, it's not clear if they do 11979 after the last round to changes to the DIV code in expmed.c. */ 11980 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR) 11981 && multiple_of_p (type, arg0, arg1)) 11982 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1); 11983 11984 strict_overflow_p = false; 11985 if (TREE_CODE (arg1) == INTEGER_CST 11986 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 11987 &strict_overflow_p))) 11988 { 11989 if (strict_overflow_p) 11990 fold_overflow_warning (("assuming signed overflow does not occur " 11991 "when simplifying division"), 11992 WARN_STRICT_OVERFLOW_MISC); 11993 return fold_convert_loc (loc, type, tem); 11994 } 11995 11996 return NULL_TREE; 11997 11998 case CEIL_MOD_EXPR: 11999 case FLOOR_MOD_EXPR: 12000 case ROUND_MOD_EXPR: 12001 case TRUNC_MOD_EXPR: 12002 /* X % -Y is the same as X % Y. */ 12003 if (code == TRUNC_MOD_EXPR 12004 && !TYPE_UNSIGNED (type) 12005 && TREE_CODE (arg1) == NEGATE_EXPR 12006 && !TYPE_OVERFLOW_TRAPS (type)) 12007 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0), 12008 fold_convert_loc (loc, type, 12009 TREE_OPERAND (arg1, 0))); 12010 12011 strict_overflow_p = false; 12012 if (TREE_CODE (arg1) == INTEGER_CST 12013 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 12014 &strict_overflow_p))) 12015 { 12016 if (strict_overflow_p) 12017 fold_overflow_warning (("assuming signed overflow does not occur " 12018 "when simplifying modulus"), 12019 WARN_STRICT_OVERFLOW_MISC); 12020 return fold_convert_loc (loc, type, tem); 12021 } 12022 12023 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR, 12024 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */ 12025 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR) 12026 && (TYPE_UNSIGNED (type) 12027 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p))) 12028 { 12029 tree c = arg1; 12030 /* Also optimize A % (C << N) where C is a power of 2, 12031 to A & ((C << N) - 1). */ 12032 if (TREE_CODE (arg1) == LSHIFT_EXPR) 12033 c = TREE_OPERAND (arg1, 0); 12034 12035 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0) 12036 { 12037 tree mask 12038 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1, 12039 build_int_cst (TREE_TYPE (arg1), 1)); 12040 if (strict_overflow_p) 12041 fold_overflow_warning (("assuming signed overflow does not " 12042 "occur when simplifying " 12043 "X % (power of two)"), 12044 WARN_STRICT_OVERFLOW_MISC); 12045 return fold_build2_loc (loc, BIT_AND_EXPR, type, 12046 fold_convert_loc (loc, type, arg0), 12047 fold_convert_loc (loc, type, mask)); 12048 } 12049 } 12050 12051 return NULL_TREE; 12052 12053 case LROTATE_EXPR: 12054 case RROTATE_EXPR: 12055 case RSHIFT_EXPR: 12056 case LSHIFT_EXPR: 12057 /* Since negative shift count is not well-defined, 12058 don't try to compute it in the compiler. */ 12059 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0) 12060 return NULL_TREE; 12061 12062 prec = element_precision (type); 12063 12064 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */ 12065 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1) 12066 && tree_to_uhwi (arg1) < prec 12067 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1)) 12068 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec) 12069 { 12070 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1)) 12071 + tree_to_uhwi (arg1)); 12072 12073 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2 12074 being well defined. */ 12075 if (low >= prec) 12076 { 12077 if (code == LROTATE_EXPR || code == RROTATE_EXPR) 12078 low = low % prec; 12079 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR) 12080 return omit_one_operand_loc (loc, type, build_zero_cst (type), 12081 TREE_OPERAND (arg0, 0)); 12082 else 12083 low = prec - 1; 12084 } 12085 12086 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12087 build_int_cst (TREE_TYPE (arg1), low)); 12088 } 12089 12090 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c 12091 into x & ((unsigned)-1 >> c) for unsigned types. */ 12092 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR) 12093 || (TYPE_UNSIGNED (type) 12094 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR)) 12095 && tree_fits_uhwi_p (arg1) 12096 && tree_to_uhwi (arg1) < prec 12097 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1)) 12098 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec) 12099 { 12100 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1)); 12101 HOST_WIDE_INT low1 = tree_to_uhwi (arg1); 12102 tree lshift; 12103 tree arg00; 12104 12105 if (low0 == low1) 12106 { 12107 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 12108 12109 lshift = build_minus_one_cst (type); 12110 lshift = const_binop (code, lshift, arg1); 12111 12112 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift); 12113 } 12114 } 12115 12116 /* If we have a rotate of a bit operation with the rotate count and 12117 the second operand of the bit operation both constant, 12118 permute the two operations. */ 12119 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST 12120 && (TREE_CODE (arg0) == BIT_AND_EXPR 12121 || TREE_CODE (arg0) == BIT_IOR_EXPR 12122 || TREE_CODE (arg0) == BIT_XOR_EXPR) 12123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12124 return fold_build2_loc (loc, TREE_CODE (arg0), type, 12125 fold_build2_loc (loc, code, type, 12126 TREE_OPERAND (arg0, 0), arg1), 12127 fold_build2_loc (loc, code, type, 12128 TREE_OPERAND (arg0, 1), arg1)); 12129 12130 /* Two consecutive rotates adding up to the some integer 12131 multiple of the precision of the type can be ignored. */ 12132 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST 12133 && TREE_CODE (arg0) == RROTATE_EXPR 12134 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 12135 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)), 12136 prec) == 0) 12137 return TREE_OPERAND (arg0, 0); 12138 12139 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1) 12140 (X & C2) >> C1 into (X >> C1) & (C2 >> C1) 12141 if the latter can be further optimized. */ 12142 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR) 12143 && TREE_CODE (arg0) == BIT_AND_EXPR 12144 && TREE_CODE (arg1) == INTEGER_CST 12145 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12146 { 12147 tree mask = fold_build2_loc (loc, code, type, 12148 fold_convert_loc (loc, type, 12149 TREE_OPERAND (arg0, 1)), 12150 arg1); 12151 tree shift = fold_build2_loc (loc, code, type, 12152 fold_convert_loc (loc, type, 12153 TREE_OPERAND (arg0, 0)), 12154 arg1); 12155 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask); 12156 if (tem) 12157 return tem; 12158 } 12159 12160 return NULL_TREE; 12161 12162 case MIN_EXPR: 12163 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1); 12164 if (tem) 12165 return tem; 12166 goto associate; 12167 12168 case MAX_EXPR: 12169 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1); 12170 if (tem) 12171 return tem; 12172 goto associate; 12173 12174 case TRUTH_ANDIF_EXPR: 12175 /* Note that the operands of this must be ints 12176 and their values must be 0 or 1. 12177 ("true" is a fixed value perhaps depending on the language.) */ 12178 /* If first arg is constant zero, return it. */ 12179 if (integer_zerop (arg0)) 12180 return fold_convert_loc (loc, type, arg0); 12181 case TRUTH_AND_EXPR: 12182 /* If either arg is constant true, drop it. */ 12183 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 12184 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 12185 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1) 12186 /* Preserve sequence points. */ 12187 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0))) 12188 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12189 /* If second arg is constant zero, result is zero, but first arg 12190 must be evaluated. */ 12191 if (integer_zerop (arg1)) 12192 return omit_one_operand_loc (loc, type, arg1, arg0); 12193 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR 12194 case will be handled here. */ 12195 if (integer_zerop (arg0)) 12196 return omit_one_operand_loc (loc, type, arg0, arg1); 12197 12198 /* !X && X is always false. */ 12199 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 12200 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 12201 return omit_one_operand_loc (loc, type, integer_zero_node, arg1); 12202 /* X && !X is always false. */ 12203 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 12204 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 12205 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 12206 12207 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y 12208 means A >= Y && A != MAX, but in this case we know that 12209 A < X <= MAX. */ 12210 12211 if (!TREE_SIDE_EFFECTS (arg0) 12212 && !TREE_SIDE_EFFECTS (arg1)) 12213 { 12214 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1); 12215 if (tem && !operand_equal_p (tem, arg0, 0)) 12216 return fold_build2_loc (loc, code, type, tem, arg1); 12217 12218 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0); 12219 if (tem && !operand_equal_p (tem, arg1, 0)) 12220 return fold_build2_loc (loc, code, type, arg0, tem); 12221 } 12222 12223 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1)) 12224 != NULL_TREE) 12225 return tem; 12226 12227 return NULL_TREE; 12228 12229 case TRUTH_ORIF_EXPR: 12230 /* Note that the operands of this must be ints 12231 and their values must be 0 or true. 12232 ("true" is a fixed value perhaps depending on the language.) */ 12233 /* If first arg is constant true, return it. */ 12234 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 12235 return fold_convert_loc (loc, type, arg0); 12236 case TRUTH_OR_EXPR: 12237 /* If either arg is constant zero, drop it. */ 12238 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0)) 12239 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 12240 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1) 12241 /* Preserve sequence points. */ 12242 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0))) 12243 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12244 /* If second arg is constant true, result is true, but we must 12245 evaluate first arg. */ 12246 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)) 12247 return omit_one_operand_loc (loc, type, arg1, arg0); 12248 /* Likewise for first arg, but note this only occurs here for 12249 TRUTH_OR_EXPR. */ 12250 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 12251 return omit_one_operand_loc (loc, type, arg0, arg1); 12252 12253 /* !X || X is always true. */ 12254 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 12255 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 12256 return omit_one_operand_loc (loc, type, integer_one_node, arg1); 12257 /* X || !X is always true. */ 12258 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 12259 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 12260 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 12261 12262 /* (X && !Y) || (!X && Y) is X ^ Y */ 12263 if (TREE_CODE (arg0) == TRUTH_AND_EXPR 12264 && TREE_CODE (arg1) == TRUTH_AND_EXPR) 12265 { 12266 tree a0, a1, l0, l1, n0, n1; 12267 12268 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 12269 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 12270 12271 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 12272 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 12273 12274 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0); 12275 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1); 12276 12277 if ((operand_equal_p (n0, a0, 0) 12278 && operand_equal_p (n1, a1, 0)) 12279 || (operand_equal_p (n0, a1, 0) 12280 && operand_equal_p (n1, a0, 0))) 12281 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1); 12282 } 12283 12284 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1)) 12285 != NULL_TREE) 12286 return tem; 12287 12288 return NULL_TREE; 12289 12290 case TRUTH_XOR_EXPR: 12291 /* If the second arg is constant zero, drop it. */ 12292 if (integer_zerop (arg1)) 12293 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12294 /* If the second arg is constant true, this is a logical inversion. */ 12295 if (integer_onep (arg1)) 12296 { 12297 tem = invert_truthvalue_loc (loc, arg0); 12298 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 12299 } 12300 /* Identical arguments cancel to zero. */ 12301 if (operand_equal_p (arg0, arg1, 0)) 12302 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 12303 12304 /* !X ^ X is always true. */ 12305 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 12306 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 12307 return omit_one_operand_loc (loc, type, integer_one_node, arg1); 12308 12309 /* X ^ !X is always true. */ 12310 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 12311 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 12312 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 12313 12314 return NULL_TREE; 12315 12316 case EQ_EXPR: 12317 case NE_EXPR: 12318 STRIP_NOPS (arg0); 12319 STRIP_NOPS (arg1); 12320 12321 tem = fold_comparison (loc, code, type, op0, op1); 12322 if (tem != NULL_TREE) 12323 return tem; 12324 12325 /* bool_var != 0 becomes bool_var. */ 12326 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) 12327 && code == NE_EXPR) 12328 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12329 12330 /* bool_var == 1 becomes bool_var. */ 12331 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) 12332 && code == EQ_EXPR) 12333 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12334 12335 /* bool_var != 1 becomes !bool_var. */ 12336 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) 12337 && code == NE_EXPR) 12338 return fold_convert_loc (loc, type, 12339 fold_build1_loc (loc, TRUTH_NOT_EXPR, 12340 TREE_TYPE (arg0), arg0)); 12341 12342 /* bool_var == 0 becomes !bool_var. */ 12343 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) 12344 && code == EQ_EXPR) 12345 return fold_convert_loc (loc, type, 12346 fold_build1_loc (loc, TRUTH_NOT_EXPR, 12347 TREE_TYPE (arg0), arg0)); 12348 12349 /* !exp != 0 becomes !exp */ 12350 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1) 12351 && code == NE_EXPR) 12352 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12353 12354 /* If this is an equality comparison of the address of two non-weak, 12355 unaliased symbols neither of which are extern (since we do not 12356 have access to attributes for externs), then we know the result. */ 12357 if (TREE_CODE (arg0) == ADDR_EXPR 12358 && DECL_P (TREE_OPERAND (arg0, 0)) 12359 && TREE_CODE (arg1) == ADDR_EXPR 12360 && DECL_P (TREE_OPERAND (arg1, 0))) 12361 { 12362 int equal; 12363 12364 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0)) 12365 && decl_in_symtab_p (TREE_OPERAND (arg1, 0))) 12366 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0)) 12367 ->equal_address_to (symtab_node::get_create 12368 (TREE_OPERAND (arg1, 0))); 12369 else 12370 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0); 12371 if (equal != 2) 12372 return constant_boolean_node (equal 12373 ? code == EQ_EXPR : code != EQ_EXPR, 12374 type); 12375 } 12376 12377 /* Similarly for a NEGATE_EXPR. */ 12378 if (TREE_CODE (arg0) == NEGATE_EXPR 12379 && TREE_CODE (arg1) == INTEGER_CST 12380 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0), 12381 arg1))) 12382 && TREE_CODE (tem) == INTEGER_CST 12383 && !TREE_OVERFLOW (tem)) 12384 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 12385 12386 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */ 12387 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12388 && TREE_CODE (arg1) == INTEGER_CST 12389 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12390 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12391 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0), 12392 fold_convert_loc (loc, 12393 TREE_TYPE (arg0), 12394 arg1), 12395 TREE_OPERAND (arg0, 1))); 12396 12397 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */ 12398 if ((TREE_CODE (arg0) == PLUS_EXPR 12399 || TREE_CODE (arg0) == POINTER_PLUS_EXPR 12400 || TREE_CODE (arg0) == MINUS_EXPR) 12401 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0, 12402 0)), 12403 arg1, 0) 12404 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 12405 || POINTER_TYPE_P (TREE_TYPE (arg0)))) 12406 { 12407 tree val = TREE_OPERAND (arg0, 1); 12408 val = fold_build2_loc (loc, code, type, val, 12409 build_int_cst (TREE_TYPE (val), 0)); 12410 return omit_two_operands_loc (loc, type, val, 12411 TREE_OPERAND (arg0, 0), arg1); 12412 } 12413 12414 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */ 12415 if ((TREE_CODE (arg1) == PLUS_EXPR 12416 || TREE_CODE (arg1) == POINTER_PLUS_EXPR 12417 || TREE_CODE (arg1) == MINUS_EXPR) 12418 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1, 12419 0)), 12420 arg0, 0) 12421 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) 12422 || POINTER_TYPE_P (TREE_TYPE (arg1)))) 12423 { 12424 tree val = TREE_OPERAND (arg1, 1); 12425 val = fold_build2_loc (loc, code, type, val, 12426 build_int_cst (TREE_TYPE (val), 0)); 12427 return omit_two_operands_loc (loc, type, val, 12428 TREE_OPERAND (arg1, 0), arg0); 12429 } 12430 12431 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */ 12432 if (TREE_CODE (arg0) == MINUS_EXPR 12433 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST 12434 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0, 12435 1)), 12436 arg1, 0) 12437 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1) 12438 return omit_two_operands_loc (loc, type, 12439 code == NE_EXPR 12440 ? boolean_true_node : boolean_false_node, 12441 TREE_OPERAND (arg0, 1), arg1); 12442 12443 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */ 12444 if (TREE_CODE (arg1) == MINUS_EXPR 12445 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST 12446 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1, 12447 1)), 12448 arg0, 0) 12449 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1) 12450 return omit_two_operands_loc (loc, type, 12451 code == NE_EXPR 12452 ? boolean_true_node : boolean_false_node, 12453 TREE_OPERAND (arg1, 1), arg0); 12454 12455 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */ 12456 if (TREE_CODE (arg0) == ABS_EXPR 12457 && (integer_zerop (arg1) || real_zerop (arg1))) 12458 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1); 12459 12460 /* If this is an EQ or NE comparison with zero and ARG0 is 12461 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require 12462 two operations, but the latter can be done in one less insn 12463 on machines that have only two-operand insns or on which a 12464 constant cannot be the first operand. */ 12465 if (TREE_CODE (arg0) == BIT_AND_EXPR 12466 && integer_zerop (arg1)) 12467 { 12468 tree arg00 = TREE_OPERAND (arg0, 0); 12469 tree arg01 = TREE_OPERAND (arg0, 1); 12470 if (TREE_CODE (arg00) == LSHIFT_EXPR 12471 && integer_onep (TREE_OPERAND (arg00, 0))) 12472 { 12473 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00), 12474 arg01, TREE_OPERAND (arg00, 1)); 12475 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem, 12476 build_int_cst (TREE_TYPE (arg0), 1)); 12477 return fold_build2_loc (loc, code, type, 12478 fold_convert_loc (loc, TREE_TYPE (arg1), tem), 12479 arg1); 12480 } 12481 else if (TREE_CODE (arg01) == LSHIFT_EXPR 12482 && integer_onep (TREE_OPERAND (arg01, 0))) 12483 { 12484 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01), 12485 arg00, TREE_OPERAND (arg01, 1)); 12486 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem, 12487 build_int_cst (TREE_TYPE (arg0), 1)); 12488 return fold_build2_loc (loc, code, type, 12489 fold_convert_loc (loc, TREE_TYPE (arg1), tem), 12490 arg1); 12491 } 12492 } 12493 12494 /* If this is an NE or EQ comparison of zero against the result of a 12495 signed MOD operation whose second operand is a power of 2, make 12496 the MOD operation unsigned since it is simpler and equivalent. */ 12497 if (integer_zerop (arg1) 12498 && !TYPE_UNSIGNED (TREE_TYPE (arg0)) 12499 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR 12500 || TREE_CODE (arg0) == CEIL_MOD_EXPR 12501 || TREE_CODE (arg0) == FLOOR_MOD_EXPR 12502 || TREE_CODE (arg0) == ROUND_MOD_EXPR) 12503 && integer_pow2p (TREE_OPERAND (arg0, 1))) 12504 { 12505 tree newtype = unsigned_type_for (TREE_TYPE (arg0)); 12506 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype, 12507 fold_convert_loc (loc, newtype, 12508 TREE_OPERAND (arg0, 0)), 12509 fold_convert_loc (loc, newtype, 12510 TREE_OPERAND (arg0, 1))); 12511 12512 return fold_build2_loc (loc, code, type, newmod, 12513 fold_convert_loc (loc, newtype, arg1)); 12514 } 12515 12516 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where 12517 C1 is a valid shift constant, and C2 is a power of two, i.e. 12518 a single bit. */ 12519 if (TREE_CODE (arg0) == BIT_AND_EXPR 12520 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR 12521 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)) 12522 == INTEGER_CST 12523 && integer_pow2p (TREE_OPERAND (arg0, 1)) 12524 && integer_zerop (arg1)) 12525 { 12526 tree itype = TREE_TYPE (arg0); 12527 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1); 12528 prec = TYPE_PRECISION (itype); 12529 12530 /* Check for a valid shift count. */ 12531 if (wi::ltu_p (arg001, prec)) 12532 { 12533 tree arg01 = TREE_OPERAND (arg0, 1); 12534 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 12535 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01); 12536 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0 12537 can be rewritten as (X & (C2 << C1)) != 0. */ 12538 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec) 12539 { 12540 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001); 12541 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem); 12542 return fold_build2_loc (loc, code, type, tem, 12543 fold_convert_loc (loc, itype, arg1)); 12544 } 12545 /* Otherwise, for signed (arithmetic) shifts, 12546 ((X >> C1) & C2) != 0 is rewritten as X < 0, and 12547 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */ 12548 else if (!TYPE_UNSIGNED (itype)) 12549 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type, 12550 arg000, build_int_cst (itype, 0)); 12551 /* Otherwise, of unsigned (logical) shifts, 12552 ((X >> C1) & C2) != 0 is rewritten as (X,false), and 12553 ((X >> C1) & C2) == 0 is rewritten as (X,true). */ 12554 else 12555 return omit_one_operand_loc (loc, type, 12556 code == EQ_EXPR ? integer_one_node 12557 : integer_zero_node, 12558 arg000); 12559 } 12560 } 12561 12562 /* If we have (A & C) == C where C is a power of 2, convert this into 12563 (A & C) != 0. Similarly for NE_EXPR. */ 12564 if (TREE_CODE (arg0) == BIT_AND_EXPR 12565 && integer_pow2p (TREE_OPERAND (arg0, 1)) 12566 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 12567 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 12568 arg0, fold_convert_loc (loc, TREE_TYPE (arg0), 12569 integer_zero_node)); 12570 12571 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign 12572 bit, then fold the expression into A < 0 or A >= 0. */ 12573 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type); 12574 if (tem) 12575 return tem; 12576 12577 /* If we have (A & C) == D where D & ~C != 0, convert this into 0. 12578 Similarly for NE_EXPR. */ 12579 if (TREE_CODE (arg0) == BIT_AND_EXPR 12580 && TREE_CODE (arg1) == INTEGER_CST 12581 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12582 { 12583 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR, 12584 TREE_TYPE (TREE_OPERAND (arg0, 1)), 12585 TREE_OPERAND (arg0, 1)); 12586 tree dandnotc 12587 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), 12588 fold_convert_loc (loc, TREE_TYPE (arg0), arg1), 12589 notc); 12590 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node; 12591 if (integer_nonzerop (dandnotc)) 12592 return omit_one_operand_loc (loc, type, rslt, arg0); 12593 } 12594 12595 /* If we have (A | C) == D where C & ~D != 0, convert this into 0. 12596 Similarly for NE_EXPR. */ 12597 if (TREE_CODE (arg0) == BIT_IOR_EXPR 12598 && TREE_CODE (arg1) == INTEGER_CST 12599 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12600 { 12601 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1); 12602 tree candnotd 12603 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), 12604 TREE_OPERAND (arg0, 1), 12605 fold_convert_loc (loc, TREE_TYPE (arg0), notd)); 12606 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node; 12607 if (integer_nonzerop (candnotd)) 12608 return omit_one_operand_loc (loc, type, rslt, arg0); 12609 } 12610 12611 /* If this is a comparison of a field, we may be able to simplify it. */ 12612 if ((TREE_CODE (arg0) == COMPONENT_REF 12613 || TREE_CODE (arg0) == BIT_FIELD_REF) 12614 /* Handle the constant case even without -O 12615 to make sure the warnings are given. */ 12616 && (optimize || TREE_CODE (arg1) == INTEGER_CST)) 12617 { 12618 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1); 12619 if (t1) 12620 return t1; 12621 } 12622 12623 /* Optimize comparisons of strlen vs zero to a compare of the 12624 first character of the string vs zero. To wit, 12625 strlen(ptr) == 0 => *ptr == 0 12626 strlen(ptr) != 0 => *ptr != 0 12627 Other cases should reduce to one of these two (or a constant) 12628 due to the return value of strlen being unsigned. */ 12629 if (TREE_CODE (arg0) == CALL_EXPR 12630 && integer_zerop (arg1)) 12631 { 12632 tree fndecl = get_callee_fndecl (arg0); 12633 12634 if (fndecl 12635 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 12636 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN 12637 && call_expr_nargs (arg0) == 1 12638 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE) 12639 { 12640 tree iref = build_fold_indirect_ref_loc (loc, 12641 CALL_EXPR_ARG (arg0, 0)); 12642 return fold_build2_loc (loc, code, type, iref, 12643 build_int_cst (TREE_TYPE (iref), 0)); 12644 } 12645 } 12646 12647 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width 12648 of X. Similarly fold (X >> C) == 0 into X >= 0. */ 12649 if (TREE_CODE (arg0) == RSHIFT_EXPR 12650 && integer_zerop (arg1) 12651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12652 { 12653 tree arg00 = TREE_OPERAND (arg0, 0); 12654 tree arg01 = TREE_OPERAND (arg0, 1); 12655 tree itype = TREE_TYPE (arg00); 12656 if (wi::eq_p (arg01, element_precision (itype) - 1)) 12657 { 12658 if (TYPE_UNSIGNED (itype)) 12659 { 12660 itype = signed_type_for (itype); 12661 arg00 = fold_convert_loc (loc, itype, arg00); 12662 } 12663 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, 12664 type, arg00, build_zero_cst (itype)); 12665 } 12666 } 12667 12668 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */ 12669 if (integer_zerop (arg1) 12670 && TREE_CODE (arg0) == BIT_XOR_EXPR) 12671 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12672 TREE_OPERAND (arg0, 1)); 12673 12674 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */ 12675 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12676 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 12677 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12678 build_zero_cst (TREE_TYPE (arg0))); 12679 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */ 12680 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12681 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 12682 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 12683 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1), 12684 build_zero_cst (TREE_TYPE (arg0))); 12685 12686 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */ 12687 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12688 && TREE_CODE (arg1) == INTEGER_CST 12689 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12690 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12691 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1), 12692 TREE_OPERAND (arg0, 1), arg1)); 12693 12694 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into 12695 (X & C) == 0 when C is a single bit. */ 12696 if (TREE_CODE (arg0) == BIT_AND_EXPR 12697 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR 12698 && integer_zerop (arg1) 12699 && integer_pow2p (TREE_OPERAND (arg0, 1))) 12700 { 12701 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), 12702 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0), 12703 TREE_OPERAND (arg0, 1)); 12704 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, 12705 type, tem, 12706 fold_convert_loc (loc, TREE_TYPE (arg0), 12707 arg1)); 12708 } 12709 12710 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the 12711 constant C is a power of two, i.e. a single bit. */ 12712 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12713 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR 12714 && integer_zerop (arg1) 12715 && integer_pow2p (TREE_OPERAND (arg0, 1)) 12716 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 12717 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST)) 12718 { 12719 tree arg00 = TREE_OPERAND (arg0, 0); 12720 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 12721 arg00, build_int_cst (TREE_TYPE (arg00), 0)); 12722 } 12723 12724 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0, 12725 when is C is a power of two, i.e. a single bit. */ 12726 if (TREE_CODE (arg0) == BIT_AND_EXPR 12727 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR 12728 && integer_zerop (arg1) 12729 && integer_pow2p (TREE_OPERAND (arg0, 1)) 12730 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 12731 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST)) 12732 { 12733 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 12734 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000), 12735 arg000, TREE_OPERAND (arg0, 1)); 12736 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 12737 tem, build_int_cst (TREE_TYPE (tem), 0)); 12738 } 12739 12740 if (integer_zerop (arg1) 12741 && tree_expr_nonzero_p (arg0)) 12742 { 12743 tree res = constant_boolean_node (code==NE_EXPR, type); 12744 return omit_one_operand_loc (loc, type, res, arg0); 12745 } 12746 12747 /* Fold -X op -Y as X op Y, where op is eq/ne. */ 12748 if (TREE_CODE (arg0) == NEGATE_EXPR 12749 && TREE_CODE (arg1) == NEGATE_EXPR) 12750 return fold_build2_loc (loc, code, type, 12751 TREE_OPERAND (arg0, 0), 12752 fold_convert_loc (loc, TREE_TYPE (arg0), 12753 TREE_OPERAND (arg1, 0))); 12754 12755 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */ 12756 if (TREE_CODE (arg0) == BIT_AND_EXPR 12757 && TREE_CODE (arg1) == BIT_AND_EXPR) 12758 { 12759 tree arg00 = TREE_OPERAND (arg0, 0); 12760 tree arg01 = TREE_OPERAND (arg0, 1); 12761 tree arg10 = TREE_OPERAND (arg1, 0); 12762 tree arg11 = TREE_OPERAND (arg1, 1); 12763 tree itype = TREE_TYPE (arg0); 12764 12765 if (operand_equal_p (arg01, arg11, 0)) 12766 return fold_build2_loc (loc, code, type, 12767 fold_build2_loc (loc, BIT_AND_EXPR, itype, 12768 fold_build2_loc (loc, 12769 BIT_XOR_EXPR, itype, 12770 arg00, arg10), 12771 arg01), 12772 build_zero_cst (itype)); 12773 12774 if (operand_equal_p (arg01, arg10, 0)) 12775 return fold_build2_loc (loc, code, type, 12776 fold_build2_loc (loc, BIT_AND_EXPR, itype, 12777 fold_build2_loc (loc, 12778 BIT_XOR_EXPR, itype, 12779 arg00, arg11), 12780 arg01), 12781 build_zero_cst (itype)); 12782 12783 if (operand_equal_p (arg00, arg11, 0)) 12784 return fold_build2_loc (loc, code, type, 12785 fold_build2_loc (loc, BIT_AND_EXPR, itype, 12786 fold_build2_loc (loc, 12787 BIT_XOR_EXPR, itype, 12788 arg01, arg10), 12789 arg00), 12790 build_zero_cst (itype)); 12791 12792 if (operand_equal_p (arg00, arg10, 0)) 12793 return fold_build2_loc (loc, code, type, 12794 fold_build2_loc (loc, BIT_AND_EXPR, itype, 12795 fold_build2_loc (loc, 12796 BIT_XOR_EXPR, itype, 12797 arg01, arg11), 12798 arg00), 12799 build_zero_cst (itype)); 12800 } 12801 12802 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12803 && TREE_CODE (arg1) == BIT_XOR_EXPR) 12804 { 12805 tree arg00 = TREE_OPERAND (arg0, 0); 12806 tree arg01 = TREE_OPERAND (arg0, 1); 12807 tree arg10 = TREE_OPERAND (arg1, 0); 12808 tree arg11 = TREE_OPERAND (arg1, 1); 12809 tree itype = TREE_TYPE (arg0); 12810 12811 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries. 12812 operand_equal_p guarantees no side-effects so we don't need 12813 to use omit_one_operand on Z. */ 12814 if (operand_equal_p (arg01, arg11, 0)) 12815 return fold_build2_loc (loc, code, type, arg00, 12816 fold_convert_loc (loc, TREE_TYPE (arg00), 12817 arg10)); 12818 if (operand_equal_p (arg01, arg10, 0)) 12819 return fold_build2_loc (loc, code, type, arg00, 12820 fold_convert_loc (loc, TREE_TYPE (arg00), 12821 arg11)); 12822 if (operand_equal_p (arg00, arg11, 0)) 12823 return fold_build2_loc (loc, code, type, arg01, 12824 fold_convert_loc (loc, TREE_TYPE (arg01), 12825 arg10)); 12826 if (operand_equal_p (arg00, arg10, 0)) 12827 return fold_build2_loc (loc, code, type, arg01, 12828 fold_convert_loc (loc, TREE_TYPE (arg01), 12829 arg11)); 12830 12831 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */ 12832 if (TREE_CODE (arg01) == INTEGER_CST 12833 && TREE_CODE (arg11) == INTEGER_CST) 12834 { 12835 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, 12836 fold_convert_loc (loc, itype, arg11)); 12837 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem); 12838 return fold_build2_loc (loc, code, type, tem, 12839 fold_convert_loc (loc, itype, arg10)); 12840 } 12841 } 12842 12843 /* Attempt to simplify equality/inequality comparisons of complex 12844 values. Only lower the comparison if the result is known or 12845 can be simplified to a single scalar comparison. */ 12846 if ((TREE_CODE (arg0) == COMPLEX_EXPR 12847 || TREE_CODE (arg0) == COMPLEX_CST) 12848 && (TREE_CODE (arg1) == COMPLEX_EXPR 12849 || TREE_CODE (arg1) == COMPLEX_CST)) 12850 { 12851 tree real0, imag0, real1, imag1; 12852 tree rcond, icond; 12853 12854 if (TREE_CODE (arg0) == COMPLEX_EXPR) 12855 { 12856 real0 = TREE_OPERAND (arg0, 0); 12857 imag0 = TREE_OPERAND (arg0, 1); 12858 } 12859 else 12860 { 12861 real0 = TREE_REALPART (arg0); 12862 imag0 = TREE_IMAGPART (arg0); 12863 } 12864 12865 if (TREE_CODE (arg1) == COMPLEX_EXPR) 12866 { 12867 real1 = TREE_OPERAND (arg1, 0); 12868 imag1 = TREE_OPERAND (arg1, 1); 12869 } 12870 else 12871 { 12872 real1 = TREE_REALPART (arg1); 12873 imag1 = TREE_IMAGPART (arg1); 12874 } 12875 12876 rcond = fold_binary_loc (loc, code, type, real0, real1); 12877 if (rcond && TREE_CODE (rcond) == INTEGER_CST) 12878 { 12879 if (integer_zerop (rcond)) 12880 { 12881 if (code == EQ_EXPR) 12882 return omit_two_operands_loc (loc, type, boolean_false_node, 12883 imag0, imag1); 12884 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1); 12885 } 12886 else 12887 { 12888 if (code == NE_EXPR) 12889 return omit_two_operands_loc (loc, type, boolean_true_node, 12890 imag0, imag1); 12891 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1); 12892 } 12893 } 12894 12895 icond = fold_binary_loc (loc, code, type, imag0, imag1); 12896 if (icond && TREE_CODE (icond) == INTEGER_CST) 12897 { 12898 if (integer_zerop (icond)) 12899 { 12900 if (code == EQ_EXPR) 12901 return omit_two_operands_loc (loc, type, boolean_false_node, 12902 real0, real1); 12903 return fold_build2_loc (loc, NE_EXPR, type, real0, real1); 12904 } 12905 else 12906 { 12907 if (code == NE_EXPR) 12908 return omit_two_operands_loc (loc, type, boolean_true_node, 12909 real0, real1); 12910 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1); 12911 } 12912 } 12913 } 12914 12915 return NULL_TREE; 12916 12917 case LT_EXPR: 12918 case GT_EXPR: 12919 case LE_EXPR: 12920 case GE_EXPR: 12921 tem = fold_comparison (loc, code, type, op0, op1); 12922 if (tem != NULL_TREE) 12923 return tem; 12924 12925 /* Transform comparisons of the form X +- C CMP X. */ 12926 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 12927 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 12928 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 12929 && !HONOR_SNANS (arg0)) 12930 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 12931 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))))) 12932 { 12933 tree arg01 = TREE_OPERAND (arg0, 1); 12934 enum tree_code code0 = TREE_CODE (arg0); 12935 int is_positive; 12936 12937 if (TREE_CODE (arg01) == REAL_CST) 12938 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1; 12939 else 12940 is_positive = tree_int_cst_sgn (arg01); 12941 12942 /* (X - c) > X becomes false. */ 12943 if (code == GT_EXPR 12944 && ((code0 == MINUS_EXPR && is_positive >= 0) 12945 || (code0 == PLUS_EXPR && is_positive <= 0))) 12946 { 12947 if (TREE_CODE (arg01) == INTEGER_CST 12948 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 12949 fold_overflow_warning (("assuming signed overflow does not " 12950 "occur when assuming that (X - c) > X " 12951 "is always false"), 12952 WARN_STRICT_OVERFLOW_ALL); 12953 return constant_boolean_node (0, type); 12954 } 12955 12956 /* Likewise (X + c) < X becomes false. */ 12957 if (code == LT_EXPR 12958 && ((code0 == PLUS_EXPR && is_positive >= 0) 12959 || (code0 == MINUS_EXPR && is_positive <= 0))) 12960 { 12961 if (TREE_CODE (arg01) == INTEGER_CST 12962 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 12963 fold_overflow_warning (("assuming signed overflow does not " 12964 "occur when assuming that " 12965 "(X + c) < X is always false"), 12966 WARN_STRICT_OVERFLOW_ALL); 12967 return constant_boolean_node (0, type); 12968 } 12969 12970 /* Convert (X - c) <= X to true. */ 12971 if (!HONOR_NANS (arg1) 12972 && code == LE_EXPR 12973 && ((code0 == MINUS_EXPR && is_positive >= 0) 12974 || (code0 == PLUS_EXPR && is_positive <= 0))) 12975 { 12976 if (TREE_CODE (arg01) == INTEGER_CST 12977 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 12978 fold_overflow_warning (("assuming signed overflow does not " 12979 "occur when assuming that " 12980 "(X - c) <= X is always true"), 12981 WARN_STRICT_OVERFLOW_ALL); 12982 return constant_boolean_node (1, type); 12983 } 12984 12985 /* Convert (X + c) >= X to true. */ 12986 if (!HONOR_NANS (arg1) 12987 && code == GE_EXPR 12988 && ((code0 == PLUS_EXPR && is_positive >= 0) 12989 || (code0 == MINUS_EXPR && is_positive <= 0))) 12990 { 12991 if (TREE_CODE (arg01) == INTEGER_CST 12992 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 12993 fold_overflow_warning (("assuming signed overflow does not " 12994 "occur when assuming that " 12995 "(X + c) >= X is always true"), 12996 WARN_STRICT_OVERFLOW_ALL); 12997 return constant_boolean_node (1, type); 12998 } 12999 13000 if (TREE_CODE (arg01) == INTEGER_CST) 13001 { 13002 /* Convert X + c > X and X - c < X to true for integers. */ 13003 if (code == GT_EXPR 13004 && ((code0 == PLUS_EXPR && is_positive > 0) 13005 || (code0 == MINUS_EXPR && is_positive < 0))) 13006 { 13007 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13008 fold_overflow_warning (("assuming signed overflow does " 13009 "not occur when assuming that " 13010 "(X + c) > X is always true"), 13011 WARN_STRICT_OVERFLOW_ALL); 13012 return constant_boolean_node (1, type); 13013 } 13014 13015 if (code == LT_EXPR 13016 && ((code0 == MINUS_EXPR && is_positive > 0) 13017 || (code0 == PLUS_EXPR && is_positive < 0))) 13018 { 13019 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13020 fold_overflow_warning (("assuming signed overflow does " 13021 "not occur when assuming that " 13022 "(X - c) < X is always true"), 13023 WARN_STRICT_OVERFLOW_ALL); 13024 return constant_boolean_node (1, type); 13025 } 13026 13027 /* Convert X + c <= X and X - c >= X to false for integers. */ 13028 if (code == LE_EXPR 13029 && ((code0 == PLUS_EXPR && is_positive > 0) 13030 || (code0 == MINUS_EXPR && is_positive < 0))) 13031 { 13032 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13033 fold_overflow_warning (("assuming signed overflow does " 13034 "not occur when assuming that " 13035 "(X + c) <= X is always false"), 13036 WARN_STRICT_OVERFLOW_ALL); 13037 return constant_boolean_node (0, type); 13038 } 13039 13040 if (code == GE_EXPR 13041 && ((code0 == MINUS_EXPR && is_positive > 0) 13042 || (code0 == PLUS_EXPR && is_positive < 0))) 13043 { 13044 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13045 fold_overflow_warning (("assuming signed overflow does " 13046 "not occur when assuming that " 13047 "(X - c) >= X is always false"), 13048 WARN_STRICT_OVERFLOW_ALL); 13049 return constant_boolean_node (0, type); 13050 } 13051 } 13052 } 13053 13054 /* Comparisons with the highest or lowest possible integer of 13055 the specified precision will have known values. */ 13056 { 13057 tree arg1_type = TREE_TYPE (arg1); 13058 unsigned int prec = TYPE_PRECISION (arg1_type); 13059 13060 if (TREE_CODE (arg1) == INTEGER_CST 13061 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type))) 13062 { 13063 wide_int max = wi::max_value (arg1_type); 13064 wide_int signed_max = wi::max_value (prec, SIGNED); 13065 wide_int min = wi::min_value (arg1_type); 13066 13067 if (wi::eq_p (arg1, max)) 13068 switch (code) 13069 { 13070 case GT_EXPR: 13071 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 13072 13073 case GE_EXPR: 13074 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1); 13075 13076 case LE_EXPR: 13077 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 13078 13079 case LT_EXPR: 13080 return fold_build2_loc (loc, NE_EXPR, type, op0, op1); 13081 13082 /* The GE_EXPR and LT_EXPR cases above are not normally 13083 reached because of previous transformations. */ 13084 13085 default: 13086 break; 13087 } 13088 else if (wi::eq_p (arg1, max - 1)) 13089 switch (code) 13090 { 13091 case GT_EXPR: 13092 arg1 = const_binop (PLUS_EXPR, arg1, 13093 build_int_cst (TREE_TYPE (arg1), 1)); 13094 return fold_build2_loc (loc, EQ_EXPR, type, 13095 fold_convert_loc (loc, 13096 TREE_TYPE (arg1), arg0), 13097 arg1); 13098 case LE_EXPR: 13099 arg1 = const_binop (PLUS_EXPR, arg1, 13100 build_int_cst (TREE_TYPE (arg1), 1)); 13101 return fold_build2_loc (loc, NE_EXPR, type, 13102 fold_convert_loc (loc, TREE_TYPE (arg1), 13103 arg0), 13104 arg1); 13105 default: 13106 break; 13107 } 13108 else if (wi::eq_p (arg1, min)) 13109 switch (code) 13110 { 13111 case LT_EXPR: 13112 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 13113 13114 case LE_EXPR: 13115 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1); 13116 13117 case GE_EXPR: 13118 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 13119 13120 case GT_EXPR: 13121 return fold_build2_loc (loc, NE_EXPR, type, op0, op1); 13122 13123 default: 13124 break; 13125 } 13126 else if (wi::eq_p (arg1, min + 1)) 13127 switch (code) 13128 { 13129 case GE_EXPR: 13130 arg1 = const_binop (MINUS_EXPR, arg1, 13131 build_int_cst (TREE_TYPE (arg1), 1)); 13132 return fold_build2_loc (loc, NE_EXPR, type, 13133 fold_convert_loc (loc, 13134 TREE_TYPE (arg1), arg0), 13135 arg1); 13136 case LT_EXPR: 13137 arg1 = const_binop (MINUS_EXPR, arg1, 13138 build_int_cst (TREE_TYPE (arg1), 1)); 13139 return fold_build2_loc (loc, EQ_EXPR, type, 13140 fold_convert_loc (loc, TREE_TYPE (arg1), 13141 arg0), 13142 arg1); 13143 default: 13144 break; 13145 } 13146 13147 else if (wi::eq_p (arg1, signed_max) 13148 && TYPE_UNSIGNED (arg1_type) 13149 /* We will flip the signedness of the comparison operator 13150 associated with the mode of arg1, so the sign bit is 13151 specified by this mode. Check that arg1 is the signed 13152 max associated with this sign bit. */ 13153 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type)) 13154 /* signed_type does not work on pointer types. */ 13155 && INTEGRAL_TYPE_P (arg1_type)) 13156 { 13157 /* The following case also applies to X < signed_max+1 13158 and X >= signed_max+1 because previous transformations. */ 13159 if (code == LE_EXPR || code == GT_EXPR) 13160 { 13161 tree st = signed_type_for (arg1_type); 13162 return fold_build2_loc (loc, 13163 code == LE_EXPR ? GE_EXPR : LT_EXPR, 13164 type, fold_convert_loc (loc, st, arg0), 13165 build_int_cst (st, 0)); 13166 } 13167 } 13168 } 13169 } 13170 13171 /* If we are comparing an ABS_EXPR with a constant, we can 13172 convert all the cases into explicit comparisons, but they may 13173 well not be faster than doing the ABS and one comparison. 13174 But ABS (X) <= C is a range comparison, which becomes a subtraction 13175 and a comparison, and is probably faster. */ 13176 if (code == LE_EXPR 13177 && TREE_CODE (arg1) == INTEGER_CST 13178 && TREE_CODE (arg0) == ABS_EXPR 13179 && ! TREE_SIDE_EFFECTS (arg0) 13180 && (0 != (tem = negate_expr (arg1))) 13181 && TREE_CODE (tem) == INTEGER_CST 13182 && !TREE_OVERFLOW (tem)) 13183 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 13184 build2 (GE_EXPR, type, 13185 TREE_OPERAND (arg0, 0), tem), 13186 build2 (LE_EXPR, type, 13187 TREE_OPERAND (arg0, 0), arg1)); 13188 13189 /* Convert ABS_EXPR<x> >= 0 to true. */ 13190 strict_overflow_p = false; 13191 if (code == GE_EXPR 13192 && (integer_zerop (arg1) 13193 || (! HONOR_NANS (arg0) 13194 && real_zerop (arg1))) 13195 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)) 13196 { 13197 if (strict_overflow_p) 13198 fold_overflow_warning (("assuming signed overflow does not occur " 13199 "when simplifying comparison of " 13200 "absolute value and zero"), 13201 WARN_STRICT_OVERFLOW_CONDITIONAL); 13202 return omit_one_operand_loc (loc, type, 13203 constant_boolean_node (true, type), 13204 arg0); 13205 } 13206 13207 /* Convert ABS_EXPR<x> < 0 to false. */ 13208 strict_overflow_p = false; 13209 if (code == LT_EXPR 13210 && (integer_zerop (arg1) || real_zerop (arg1)) 13211 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)) 13212 { 13213 if (strict_overflow_p) 13214 fold_overflow_warning (("assuming signed overflow does not occur " 13215 "when simplifying comparison of " 13216 "absolute value and zero"), 13217 WARN_STRICT_OVERFLOW_CONDITIONAL); 13218 return omit_one_operand_loc (loc, type, 13219 constant_boolean_node (false, type), 13220 arg0); 13221 } 13222 13223 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0 13224 and similarly for >= into !=. */ 13225 if ((code == LT_EXPR || code == GE_EXPR) 13226 && TYPE_UNSIGNED (TREE_TYPE (arg0)) 13227 && TREE_CODE (arg1) == LSHIFT_EXPR 13228 && integer_onep (TREE_OPERAND (arg1, 0))) 13229 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, 13230 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0, 13231 TREE_OPERAND (arg1, 1)), 13232 build_zero_cst (TREE_TYPE (arg0))); 13233 13234 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing, 13235 otherwise Y might be >= # of bits in X's type and thus e.g. 13236 (unsigned char) (1 << Y) for Y 15 might be 0. 13237 If the cast is widening, then 1 << Y should have unsigned type, 13238 otherwise if Y is number of bits in the signed shift type minus 1, 13239 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y 13240 31 might be 0xffffffff80000000. */ 13241 if ((code == LT_EXPR || code == GE_EXPR) 13242 && TYPE_UNSIGNED (TREE_TYPE (arg0)) 13243 && CONVERT_EXPR_P (arg1) 13244 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR 13245 && (element_precision (TREE_TYPE (arg1)) 13246 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))) 13247 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))) 13248 || (element_precision (TREE_TYPE (arg1)) 13249 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))) 13250 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0))) 13251 { 13252 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0, 13253 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1)); 13254 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, 13255 fold_convert_loc (loc, TREE_TYPE (arg0), tem), 13256 build_zero_cst (TREE_TYPE (arg0))); 13257 } 13258 13259 return NULL_TREE; 13260 13261 case UNORDERED_EXPR: 13262 case ORDERED_EXPR: 13263 case UNLT_EXPR: 13264 case UNLE_EXPR: 13265 case UNGT_EXPR: 13266 case UNGE_EXPR: 13267 case UNEQ_EXPR: 13268 case LTGT_EXPR: 13269 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST) 13270 { 13271 t1 = fold_relational_const (code, type, arg0, arg1); 13272 if (t1 != NULL_TREE) 13273 return t1; 13274 } 13275 13276 /* If the first operand is NaN, the result is constant. */ 13277 if (TREE_CODE (arg0) == REAL_CST 13278 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0)) 13279 && (code != LTGT_EXPR || ! flag_trapping_math)) 13280 { 13281 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR) 13282 ? integer_zero_node 13283 : integer_one_node; 13284 return omit_one_operand_loc (loc, type, t1, arg1); 13285 } 13286 13287 /* If the second operand is NaN, the result is constant. */ 13288 if (TREE_CODE (arg1) == REAL_CST 13289 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)) 13290 && (code != LTGT_EXPR || ! flag_trapping_math)) 13291 { 13292 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR) 13293 ? integer_zero_node 13294 : integer_one_node; 13295 return omit_one_operand_loc (loc, type, t1, arg0); 13296 } 13297 13298 /* Simplify unordered comparison of something with itself. */ 13299 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR) 13300 && operand_equal_p (arg0, arg1, 0)) 13301 return constant_boolean_node (1, type); 13302 13303 if (code == LTGT_EXPR 13304 && !flag_trapping_math 13305 && operand_equal_p (arg0, arg1, 0)) 13306 return constant_boolean_node (0, type); 13307 13308 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */ 13309 { 13310 tree targ0 = strip_float_extensions (arg0); 13311 tree targ1 = strip_float_extensions (arg1); 13312 tree newtype = TREE_TYPE (targ0); 13313 13314 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype)) 13315 newtype = TREE_TYPE (targ1); 13316 13317 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0))) 13318 return fold_build2_loc (loc, code, type, 13319 fold_convert_loc (loc, newtype, targ0), 13320 fold_convert_loc (loc, newtype, targ1)); 13321 } 13322 13323 return NULL_TREE; 13324 13325 case COMPOUND_EXPR: 13326 /* When pedantic, a compound expression can be neither an lvalue 13327 nor an integer constant expression. */ 13328 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1)) 13329 return NULL_TREE; 13330 /* Don't let (0, 0) be null pointer constant. */ 13331 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1) 13332 : fold_convert_loc (loc, type, arg1); 13333 return pedantic_non_lvalue_loc (loc, tem); 13334 13335 case ASSERT_EXPR: 13336 /* An ASSERT_EXPR should never be passed to fold_binary. */ 13337 gcc_unreachable (); 13338 13339 default: 13340 return NULL_TREE; 13341 } /* switch (code) */ 13342} 13343 13344/* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is 13345 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees 13346 of GOTO_EXPR. */ 13347 13348static tree 13349contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 13350{ 13351 switch (TREE_CODE (*tp)) 13352 { 13353 case LABEL_EXPR: 13354 return *tp; 13355 13356 case GOTO_EXPR: 13357 *walk_subtrees = 0; 13358 13359 /* ... fall through ... */ 13360 13361 default: 13362 return NULL_TREE; 13363 } 13364} 13365 13366/* Return whether the sub-tree ST contains a label which is accessible from 13367 outside the sub-tree. */ 13368 13369static bool 13370contains_label_p (tree st) 13371{ 13372 return 13373 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE); 13374} 13375 13376/* Fold a ternary expression of code CODE and type TYPE with operands 13377 OP0, OP1, and OP2. Return the folded expression if folding is 13378 successful. Otherwise, return NULL_TREE. */ 13379 13380tree 13381fold_ternary_loc (location_t loc, enum tree_code code, tree type, 13382 tree op0, tree op1, tree op2) 13383{ 13384 tree tem; 13385 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE; 13386 enum tree_code_class kind = TREE_CODE_CLASS (code); 13387 13388 gcc_assert (IS_EXPR_CODE_CLASS (kind) 13389 && TREE_CODE_LENGTH (code) == 3); 13390 13391 /* If this is a commutative operation, and OP0 is a constant, move it 13392 to OP1 to reduce the number of tests below. */ 13393 if (commutative_ternary_tree_code (code) 13394 && tree_swap_operands_p (op0, op1, true)) 13395 return fold_build3_loc (loc, code, type, op1, op0, op2); 13396 13397 tem = generic_simplify (loc, code, type, op0, op1, op2); 13398 if (tem) 13399 return tem; 13400 13401 /* Strip any conversions that don't change the mode. This is safe 13402 for every expression, except for a comparison expression because 13403 its signedness is derived from its operands. So, in the latter 13404 case, only strip conversions that don't change the signedness. 13405 13406 Note that this is done as an internal manipulation within the 13407 constant folder, in order to find the simplest representation of 13408 the arguments so that their form can be studied. In any cases, 13409 the appropriate type conversions should be put back in the tree 13410 that will get out of the constant folder. */ 13411 if (op0) 13412 { 13413 arg0 = op0; 13414 STRIP_NOPS (arg0); 13415 } 13416 13417 if (op1) 13418 { 13419 arg1 = op1; 13420 STRIP_NOPS (arg1); 13421 } 13422 13423 if (op2) 13424 { 13425 arg2 = op2; 13426 STRIP_NOPS (arg2); 13427 } 13428 13429 switch (code) 13430 { 13431 case COMPONENT_REF: 13432 if (TREE_CODE (arg0) == CONSTRUCTOR 13433 && ! type_contains_placeholder_p (TREE_TYPE (arg0))) 13434 { 13435 unsigned HOST_WIDE_INT idx; 13436 tree field, value; 13437 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value) 13438 if (field == arg1) 13439 return value; 13440 } 13441 return NULL_TREE; 13442 13443 case COND_EXPR: 13444 case VEC_COND_EXPR: 13445 /* Pedantic ANSI C says that a conditional expression is never an lvalue, 13446 so all simple results must be passed through pedantic_non_lvalue. */ 13447 if (TREE_CODE (arg0) == INTEGER_CST) 13448 { 13449 tree unused_op = integer_zerop (arg0) ? op1 : op2; 13450 tem = integer_zerop (arg0) ? op2 : op1; 13451 /* Only optimize constant conditions when the selected branch 13452 has the same type as the COND_EXPR. This avoids optimizing 13453 away "c ? x : throw", where the throw has a void type. 13454 Avoid throwing away that operand which contains label. */ 13455 if ((!TREE_SIDE_EFFECTS (unused_op) 13456 || !contains_label_p (unused_op)) 13457 && (! VOID_TYPE_P (TREE_TYPE (tem)) 13458 || VOID_TYPE_P (type))) 13459 return pedantic_non_lvalue_loc (loc, tem); 13460 return NULL_TREE; 13461 } 13462 else if (TREE_CODE (arg0) == VECTOR_CST) 13463 { 13464 if ((TREE_CODE (arg1) == VECTOR_CST 13465 || TREE_CODE (arg1) == CONSTRUCTOR) 13466 && (TREE_CODE (arg2) == VECTOR_CST 13467 || TREE_CODE (arg2) == CONSTRUCTOR)) 13468 { 13469 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i; 13470 unsigned char *sel = XALLOCAVEC (unsigned char, nelts); 13471 gcc_assert (nelts == VECTOR_CST_NELTS (arg0)); 13472 for (i = 0; i < nelts; i++) 13473 { 13474 tree val = VECTOR_CST_ELT (arg0, i); 13475 if (integer_all_onesp (val)) 13476 sel[i] = i; 13477 else if (integer_zerop (val)) 13478 sel[i] = nelts + i; 13479 else /* Currently unreachable. */ 13480 return NULL_TREE; 13481 } 13482 tree t = fold_vec_perm (type, arg1, arg2, sel); 13483 if (t != NULL_TREE) 13484 return t; 13485 } 13486 } 13487 13488 /* If we have A op B ? A : C, we may be able to convert this to a 13489 simpler expression, depending on the operation and the values 13490 of B and C. Signed zeros prevent all of these transformations, 13491 for reasons given above each one. 13492 13493 Also try swapping the arguments and inverting the conditional. */ 13494 if (COMPARISON_CLASS_P (arg0) 13495 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), 13496 arg1, TREE_OPERAND (arg0, 1)) 13497 && !HONOR_SIGNED_ZEROS (element_mode (arg1))) 13498 { 13499 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2); 13500 if (tem) 13501 return tem; 13502 } 13503 13504 if (COMPARISON_CLASS_P (arg0) 13505 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), 13506 op2, 13507 TREE_OPERAND (arg0, 1)) 13508 && !HONOR_SIGNED_ZEROS (element_mode (op2))) 13509 { 13510 location_t loc0 = expr_location_or (arg0, loc); 13511 tem = fold_invert_truthvalue (loc0, arg0); 13512 if (tem && COMPARISON_CLASS_P (tem)) 13513 { 13514 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1); 13515 if (tem) 13516 return tem; 13517 } 13518 } 13519 13520 /* If the second operand is simpler than the third, swap them 13521 since that produces better jump optimization results. */ 13522 if (truth_value_p (TREE_CODE (arg0)) 13523 && tree_swap_operands_p (op1, op2, false)) 13524 { 13525 location_t loc0 = expr_location_or (arg0, loc); 13526 /* See if this can be inverted. If it can't, possibly because 13527 it was a floating-point inequality comparison, don't do 13528 anything. */ 13529 tem = fold_invert_truthvalue (loc0, arg0); 13530 if (tem) 13531 return fold_build3_loc (loc, code, type, tem, op2, op1); 13532 } 13533 13534 /* Convert A ? 1 : 0 to simply A. */ 13535 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1) 13536 : (integer_onep (op1) 13537 && !VECTOR_TYPE_P (type))) 13538 && integer_zerop (op2) 13539 /* If we try to convert OP0 to our type, the 13540 call to fold will try to move the conversion inside 13541 a COND, which will recurse. In that case, the COND_EXPR 13542 is probably the best choice, so leave it alone. */ 13543 && type == TREE_TYPE (arg0)) 13544 return pedantic_non_lvalue_loc (loc, arg0); 13545 13546 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR 13547 over COND_EXPR in cases such as floating point comparisons. */ 13548 if (integer_zerop (op1) 13549 && (code == VEC_COND_EXPR ? integer_all_onesp (op2) 13550 : (integer_onep (op2) 13551 && !VECTOR_TYPE_P (type))) 13552 && truth_value_p (TREE_CODE (arg0))) 13553 return pedantic_non_lvalue_loc (loc, 13554 fold_convert_loc (loc, type, 13555 invert_truthvalue_loc (loc, 13556 arg0))); 13557 13558 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */ 13559 if (TREE_CODE (arg0) == LT_EXPR 13560 && integer_zerop (TREE_OPERAND (arg0, 1)) 13561 && integer_zerop (op2) 13562 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1))) 13563 { 13564 /* sign_bit_p looks through both zero and sign extensions, 13565 but for this optimization only sign extensions are 13566 usable. */ 13567 tree tem2 = TREE_OPERAND (arg0, 0); 13568 while (tem != tem2) 13569 { 13570 if (TREE_CODE (tem2) != NOP_EXPR 13571 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0)))) 13572 { 13573 tem = NULL_TREE; 13574 break; 13575 } 13576 tem2 = TREE_OPERAND (tem2, 0); 13577 } 13578 /* sign_bit_p only checks ARG1 bits within A's precision. 13579 If <sign bit of A> has wider type than A, bits outside 13580 of A's precision in <sign bit of A> need to be checked. 13581 If they are all 0, this optimization needs to be done 13582 in unsigned A's type, if they are all 1 in signed A's type, 13583 otherwise this can't be done. */ 13584 if (tem 13585 && TYPE_PRECISION (TREE_TYPE (tem)) 13586 < TYPE_PRECISION (TREE_TYPE (arg1)) 13587 && TYPE_PRECISION (TREE_TYPE (tem)) 13588 < TYPE_PRECISION (type)) 13589 { 13590 int inner_width, outer_width; 13591 tree tem_type; 13592 13593 inner_width = TYPE_PRECISION (TREE_TYPE (tem)); 13594 outer_width = TYPE_PRECISION (TREE_TYPE (arg1)); 13595 if (outer_width > TYPE_PRECISION (type)) 13596 outer_width = TYPE_PRECISION (type); 13597 13598 wide_int mask = wi::shifted_mask 13599 (inner_width, outer_width - inner_width, false, 13600 TYPE_PRECISION (TREE_TYPE (arg1))); 13601 13602 wide_int common = mask & arg1; 13603 if (common == mask) 13604 { 13605 tem_type = signed_type_for (TREE_TYPE (tem)); 13606 tem = fold_convert_loc (loc, tem_type, tem); 13607 } 13608 else if (common == 0) 13609 { 13610 tem_type = unsigned_type_for (TREE_TYPE (tem)); 13611 tem = fold_convert_loc (loc, tem_type, tem); 13612 } 13613 else 13614 tem = NULL; 13615 } 13616 13617 if (tem) 13618 return 13619 fold_convert_loc (loc, type, 13620 fold_build2_loc (loc, BIT_AND_EXPR, 13621 TREE_TYPE (tem), tem, 13622 fold_convert_loc (loc, 13623 TREE_TYPE (tem), 13624 arg1))); 13625 } 13626 13627 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was 13628 already handled above. */ 13629 if (TREE_CODE (arg0) == BIT_AND_EXPR 13630 && integer_onep (TREE_OPERAND (arg0, 1)) 13631 && integer_zerop (op2) 13632 && integer_pow2p (arg1)) 13633 { 13634 tree tem = TREE_OPERAND (arg0, 0); 13635 STRIP_NOPS (tem); 13636 if (TREE_CODE (tem) == RSHIFT_EXPR 13637 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)) 13638 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) == 13639 tree_to_uhwi (TREE_OPERAND (tem, 1))) 13640 return fold_build2_loc (loc, BIT_AND_EXPR, type, 13641 TREE_OPERAND (tem, 0), arg1); 13642 } 13643 13644 /* A & N ? N : 0 is simply A & N if N is a power of two. This 13645 is probably obsolete because the first operand should be a 13646 truth value (that's why we have the two cases above), but let's 13647 leave it in until we can confirm this for all front-ends. */ 13648 if (integer_zerop (op2) 13649 && TREE_CODE (arg0) == NE_EXPR 13650 && integer_zerop (TREE_OPERAND (arg0, 1)) 13651 && integer_pow2p (arg1) 13652 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR 13653 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 13654 arg1, OEP_ONLY_CONST)) 13655 return pedantic_non_lvalue_loc (loc, 13656 fold_convert_loc (loc, type, 13657 TREE_OPERAND (arg0, 0))); 13658 13659 /* Disable the transformations below for vectors, since 13660 fold_binary_op_with_conditional_arg may undo them immediately, 13661 yielding an infinite loop. */ 13662 if (code == VEC_COND_EXPR) 13663 return NULL_TREE; 13664 13665 /* Convert A ? B : 0 into A && B if A and B are truth values. */ 13666 if (integer_zerop (op2) 13667 && truth_value_p (TREE_CODE (arg0)) 13668 && truth_value_p (TREE_CODE (arg1)) 13669 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) 13670 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR 13671 : TRUTH_ANDIF_EXPR, 13672 type, fold_convert_loc (loc, type, arg0), arg1); 13673 13674 /* Convert A ? B : 1 into !A || B if A and B are truth values. */ 13675 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2) 13676 && truth_value_p (TREE_CODE (arg0)) 13677 && truth_value_p (TREE_CODE (arg1)) 13678 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) 13679 { 13680 location_t loc0 = expr_location_or (arg0, loc); 13681 /* Only perform transformation if ARG0 is easily inverted. */ 13682 tem = fold_invert_truthvalue (loc0, arg0); 13683 if (tem) 13684 return fold_build2_loc (loc, code == VEC_COND_EXPR 13685 ? BIT_IOR_EXPR 13686 : TRUTH_ORIF_EXPR, 13687 type, fold_convert_loc (loc, type, tem), 13688 arg1); 13689 } 13690 13691 /* Convert A ? 0 : B into !A && B if A and B are truth values. */ 13692 if (integer_zerop (arg1) 13693 && truth_value_p (TREE_CODE (arg0)) 13694 && truth_value_p (TREE_CODE (op2)) 13695 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) 13696 { 13697 location_t loc0 = expr_location_or (arg0, loc); 13698 /* Only perform transformation if ARG0 is easily inverted. */ 13699 tem = fold_invert_truthvalue (loc0, arg0); 13700 if (tem) 13701 return fold_build2_loc (loc, code == VEC_COND_EXPR 13702 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR, 13703 type, fold_convert_loc (loc, type, tem), 13704 op2); 13705 } 13706 13707 /* Convert A ? 1 : B into A || B if A and B are truth values. */ 13708 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1) 13709 && truth_value_p (TREE_CODE (arg0)) 13710 && truth_value_p (TREE_CODE (op2)) 13711 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) 13712 return fold_build2_loc (loc, code == VEC_COND_EXPR 13713 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR, 13714 type, fold_convert_loc (loc, type, arg0), op2); 13715 13716 return NULL_TREE; 13717 13718 case CALL_EXPR: 13719 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses 13720 of fold_ternary on them. */ 13721 gcc_unreachable (); 13722 13723 case BIT_FIELD_REF: 13724 if ((TREE_CODE (arg0) == VECTOR_CST 13725 || (TREE_CODE (arg0) == CONSTRUCTOR 13726 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE)) 13727 && (type == TREE_TYPE (TREE_TYPE (arg0)) 13728 || (TREE_CODE (type) == VECTOR_TYPE 13729 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))) 13730 { 13731 tree eltype = TREE_TYPE (TREE_TYPE (arg0)); 13732 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype)); 13733 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1); 13734 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2); 13735 13736 if (n != 0 13737 && (idx % width) == 0 13738 && (n % width) == 0 13739 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))) 13740 { 13741 idx = idx / width; 13742 n = n / width; 13743 13744 if (TREE_CODE (arg0) == VECTOR_CST) 13745 { 13746 if (n == 1) 13747 return VECTOR_CST_ELT (arg0, idx); 13748 13749 tree *vals = XALLOCAVEC (tree, n); 13750 for (unsigned i = 0; i < n; ++i) 13751 vals[i] = VECTOR_CST_ELT (arg0, idx + i); 13752 return build_vector (type, vals); 13753 } 13754 13755 /* Constructor elements can be subvectors. */ 13756 unsigned HOST_WIDE_INT k = 1; 13757 if (CONSTRUCTOR_NELTS (arg0) != 0) 13758 { 13759 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value); 13760 if (TREE_CODE (cons_elem) == VECTOR_TYPE) 13761 k = TYPE_VECTOR_SUBPARTS (cons_elem); 13762 } 13763 13764 /* We keep an exact subset of the constructor elements. */ 13765 if ((idx % k) == 0 && (n % k) == 0) 13766 { 13767 if (CONSTRUCTOR_NELTS (arg0) == 0) 13768 return build_constructor (type, NULL); 13769 idx /= k; 13770 n /= k; 13771 if (n == 1) 13772 { 13773 if (idx < CONSTRUCTOR_NELTS (arg0)) 13774 return CONSTRUCTOR_ELT (arg0, idx)->value; 13775 return build_zero_cst (type); 13776 } 13777 13778 vec<constructor_elt, va_gc> *vals; 13779 vec_alloc (vals, n); 13780 for (unsigned i = 0; 13781 i < n && idx + i < CONSTRUCTOR_NELTS (arg0); 13782 ++i) 13783 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE, 13784 CONSTRUCTOR_ELT 13785 (arg0, idx + i)->value); 13786 return build_constructor (type, vals); 13787 } 13788 /* The bitfield references a single constructor element. */ 13789 else if (idx + n <= (idx / k + 1) * k) 13790 { 13791 if (CONSTRUCTOR_NELTS (arg0) <= idx / k) 13792 return build_zero_cst (type); 13793 else if (n == k) 13794 return CONSTRUCTOR_ELT (arg0, idx / k)->value; 13795 else 13796 return fold_build3_loc (loc, code, type, 13797 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1, 13798 build_int_cst (TREE_TYPE (op2), (idx % k) * width)); 13799 } 13800 } 13801 } 13802 13803 /* A bit-field-ref that referenced the full argument can be stripped. */ 13804 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 13805 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1) 13806 && integer_zerop (op2)) 13807 return fold_convert_loc (loc, type, arg0); 13808 13809 /* On constants we can use native encode/interpret to constant 13810 fold (nearly) all BIT_FIELD_REFs. */ 13811 if (CONSTANT_CLASS_P (arg0) 13812 && can_native_interpret_type_p (type) 13813 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0))) 13814 /* This limitation should not be necessary, we just need to 13815 round this up to mode size. */ 13816 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0 13817 /* Need bit-shifting of the buffer to relax the following. */ 13818 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0) 13819 { 13820 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2); 13821 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1); 13822 unsigned HOST_WIDE_INT clen; 13823 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0))); 13824 /* ??? We cannot tell native_encode_expr to start at 13825 some random byte only. So limit us to a reasonable amount 13826 of work. */ 13827 if (clen <= 4096) 13828 { 13829 unsigned char *b = XALLOCAVEC (unsigned char, clen); 13830 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen); 13831 if (len > 0 13832 && len * BITS_PER_UNIT >= bitpos + bitsize) 13833 { 13834 tree v = native_interpret_expr (type, 13835 b + bitpos / BITS_PER_UNIT, 13836 bitsize / BITS_PER_UNIT); 13837 if (v) 13838 return v; 13839 } 13840 } 13841 } 13842 13843 return NULL_TREE; 13844 13845 case FMA_EXPR: 13846 /* For integers we can decompose the FMA if possible. */ 13847 if (TREE_CODE (arg0) == INTEGER_CST 13848 && TREE_CODE (arg1) == INTEGER_CST) 13849 return fold_build2_loc (loc, PLUS_EXPR, type, 13850 const_binop (MULT_EXPR, arg0, arg1), arg2); 13851 if (integer_zerop (arg2)) 13852 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1); 13853 13854 return fold_fma (loc, type, arg0, arg1, arg2); 13855 13856 case VEC_PERM_EXPR: 13857 if (TREE_CODE (arg2) == VECTOR_CST) 13858 { 13859 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2; 13860 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts); 13861 unsigned char *sel2 = sel + nelts; 13862 bool need_mask_canon = false; 13863 bool need_mask_canon2 = false; 13864 bool all_in_vec0 = true; 13865 bool all_in_vec1 = true; 13866 bool maybe_identity = true; 13867 bool single_arg = (op0 == op1); 13868 bool changed = false; 13869 13870 mask2 = 2 * nelts - 1; 13871 mask = single_arg ? (nelts - 1) : mask2; 13872 gcc_assert (nelts == VECTOR_CST_NELTS (arg2)); 13873 for (i = 0; i < nelts; i++) 13874 { 13875 tree val = VECTOR_CST_ELT (arg2, i); 13876 if (TREE_CODE (val) != INTEGER_CST) 13877 return NULL_TREE; 13878 13879 /* Make sure that the perm value is in an acceptable 13880 range. */ 13881 wide_int t = val; 13882 need_mask_canon |= wi::gtu_p (t, mask); 13883 need_mask_canon2 |= wi::gtu_p (t, mask2); 13884 sel[i] = t.to_uhwi () & mask; 13885 sel2[i] = t.to_uhwi () & mask2; 13886 13887 if (sel[i] < nelts) 13888 all_in_vec1 = false; 13889 else 13890 all_in_vec0 = false; 13891 13892 if ((sel[i] & (nelts-1)) != i) 13893 maybe_identity = false; 13894 } 13895 13896 if (maybe_identity) 13897 { 13898 if (all_in_vec0) 13899 return op0; 13900 if (all_in_vec1) 13901 return op1; 13902 } 13903 13904 if (all_in_vec0) 13905 op1 = op0; 13906 else if (all_in_vec1) 13907 { 13908 op0 = op1; 13909 for (i = 0; i < nelts; i++) 13910 sel[i] -= nelts; 13911 need_mask_canon = true; 13912 } 13913 13914 if ((TREE_CODE (op0) == VECTOR_CST 13915 || TREE_CODE (op0) == CONSTRUCTOR) 13916 && (TREE_CODE (op1) == VECTOR_CST 13917 || TREE_CODE (op1) == CONSTRUCTOR)) 13918 { 13919 tree t = fold_vec_perm (type, op0, op1, sel); 13920 if (t != NULL_TREE) 13921 return t; 13922 } 13923 13924 if (op0 == op1 && !single_arg) 13925 changed = true; 13926 13927 /* Some targets are deficient and fail to expand a single 13928 argument permutation while still allowing an equivalent 13929 2-argument version. */ 13930 if (need_mask_canon && arg2 == op2 13931 && !can_vec_perm_p (TYPE_MODE (type), false, sel) 13932 && can_vec_perm_p (TYPE_MODE (type), false, sel2)) 13933 { 13934 need_mask_canon = need_mask_canon2; 13935 sel = sel2; 13936 } 13937 13938 if (need_mask_canon && arg2 == op2) 13939 { 13940 tree *tsel = XALLOCAVEC (tree, nelts); 13941 tree eltype = TREE_TYPE (TREE_TYPE (arg2)); 13942 for (i = 0; i < nelts; i++) 13943 tsel[i] = build_int_cst (eltype, sel[i]); 13944 op2 = build_vector (TREE_TYPE (arg2), tsel); 13945 changed = true; 13946 } 13947 13948 if (changed) 13949 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2); 13950 } 13951 return NULL_TREE; 13952 13953 default: 13954 return NULL_TREE; 13955 } /* switch (code) */ 13956} 13957 13958/* Perform constant folding and related simplification of EXPR. 13959 The related simplifications include x*1 => x, x*0 => 0, etc., 13960 and application of the associative law. 13961 NOP_EXPR conversions may be removed freely (as long as we 13962 are careful not to change the type of the overall expression). 13963 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR, 13964 but we can constant-fold them if they have constant operands. */ 13965 13966#ifdef ENABLE_FOLD_CHECKING 13967# define fold(x) fold_1 (x) 13968static tree fold_1 (tree); 13969static 13970#endif 13971tree 13972fold (tree expr) 13973{ 13974 const tree t = expr; 13975 enum tree_code code = TREE_CODE (t); 13976 enum tree_code_class kind = TREE_CODE_CLASS (code); 13977 tree tem; 13978 location_t loc = EXPR_LOCATION (expr); 13979 13980 /* Return right away if a constant. */ 13981 if (kind == tcc_constant) 13982 return t; 13983 13984 /* CALL_EXPR-like objects with variable numbers of operands are 13985 treated specially. */ 13986 if (kind == tcc_vl_exp) 13987 { 13988 if (code == CALL_EXPR) 13989 { 13990 tem = fold_call_expr (loc, expr, false); 13991 return tem ? tem : expr; 13992 } 13993 return expr; 13994 } 13995 13996 if (IS_EXPR_CODE_CLASS (kind)) 13997 { 13998 tree type = TREE_TYPE (t); 13999 tree op0, op1, op2; 14000 14001 switch (TREE_CODE_LENGTH (code)) 14002 { 14003 case 1: 14004 op0 = TREE_OPERAND (t, 0); 14005 tem = fold_unary_loc (loc, code, type, op0); 14006 return tem ? tem : expr; 14007 case 2: 14008 op0 = TREE_OPERAND (t, 0); 14009 op1 = TREE_OPERAND (t, 1); 14010 tem = fold_binary_loc (loc, code, type, op0, op1); 14011 return tem ? tem : expr; 14012 case 3: 14013 op0 = TREE_OPERAND (t, 0); 14014 op1 = TREE_OPERAND (t, 1); 14015 op2 = TREE_OPERAND (t, 2); 14016 tem = fold_ternary_loc (loc, code, type, op0, op1, op2); 14017 return tem ? tem : expr; 14018 default: 14019 break; 14020 } 14021 } 14022 14023 switch (code) 14024 { 14025 case ARRAY_REF: 14026 { 14027 tree op0 = TREE_OPERAND (t, 0); 14028 tree op1 = TREE_OPERAND (t, 1); 14029 14030 if (TREE_CODE (op1) == INTEGER_CST 14031 && TREE_CODE (op0) == CONSTRUCTOR 14032 && ! type_contains_placeholder_p (TREE_TYPE (op0))) 14033 { 14034 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0); 14035 unsigned HOST_WIDE_INT end = vec_safe_length (elts); 14036 unsigned HOST_WIDE_INT begin = 0; 14037 14038 /* Find a matching index by means of a binary search. */ 14039 while (begin != end) 14040 { 14041 unsigned HOST_WIDE_INT middle = (begin + end) / 2; 14042 tree index = (*elts)[middle].index; 14043 14044 if (TREE_CODE (index) == INTEGER_CST 14045 && tree_int_cst_lt (index, op1)) 14046 begin = middle + 1; 14047 else if (TREE_CODE (index) == INTEGER_CST 14048 && tree_int_cst_lt (op1, index)) 14049 end = middle; 14050 else if (TREE_CODE (index) == RANGE_EXPR 14051 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1)) 14052 begin = middle + 1; 14053 else if (TREE_CODE (index) == RANGE_EXPR 14054 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0))) 14055 end = middle; 14056 else 14057 return (*elts)[middle].value; 14058 } 14059 } 14060 14061 return t; 14062 } 14063 14064 /* Return a VECTOR_CST if possible. */ 14065 case CONSTRUCTOR: 14066 { 14067 tree type = TREE_TYPE (t); 14068 if (TREE_CODE (type) != VECTOR_TYPE) 14069 return t; 14070 14071 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type)); 14072 unsigned HOST_WIDE_INT idx, pos = 0; 14073 tree value; 14074 14075 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value) 14076 { 14077 if (!CONSTANT_CLASS_P (value)) 14078 return t; 14079 if (TREE_CODE (value) == VECTOR_CST) 14080 { 14081 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i) 14082 vec[pos++] = VECTOR_CST_ELT (value, i); 14083 } 14084 else 14085 vec[pos++] = value; 14086 } 14087 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos) 14088 vec[pos] = build_zero_cst (TREE_TYPE (type)); 14089 14090 return build_vector (type, vec); 14091 } 14092 14093 case CONST_DECL: 14094 return fold (DECL_INITIAL (t)); 14095 14096 default: 14097 return t; 14098 } /* switch (code) */ 14099} 14100 14101#ifdef ENABLE_FOLD_CHECKING 14102#undef fold 14103 14104static void fold_checksum_tree (const_tree, struct md5_ctx *, 14105 hash_table<pointer_hash<const tree_node> > *); 14106static void fold_check_failed (const_tree, const_tree); 14107void print_fold_checksum (const_tree); 14108 14109/* When --enable-checking=fold, compute a digest of expr before 14110 and after actual fold call to see if fold did not accidentally 14111 change original expr. */ 14112 14113tree 14114fold (tree expr) 14115{ 14116 tree ret; 14117 struct md5_ctx ctx; 14118 unsigned char checksum_before[16], checksum_after[16]; 14119 hash_table<pointer_hash<const tree_node> > ht (32); 14120 14121 md5_init_ctx (&ctx); 14122 fold_checksum_tree (expr, &ctx, &ht); 14123 md5_finish_ctx (&ctx, checksum_before); 14124 ht.empty (); 14125 14126 ret = fold_1 (expr); 14127 14128 md5_init_ctx (&ctx); 14129 fold_checksum_tree (expr, &ctx, &ht); 14130 md5_finish_ctx (&ctx, checksum_after); 14131 14132 if (memcmp (checksum_before, checksum_after, 16)) 14133 fold_check_failed (expr, ret); 14134 14135 return ret; 14136} 14137 14138void 14139print_fold_checksum (const_tree expr) 14140{ 14141 struct md5_ctx ctx; 14142 unsigned char checksum[16], cnt; 14143 hash_table<pointer_hash<const tree_node> > ht (32); 14144 14145 md5_init_ctx (&ctx); 14146 fold_checksum_tree (expr, &ctx, &ht); 14147 md5_finish_ctx (&ctx, checksum); 14148 for (cnt = 0; cnt < 16; ++cnt) 14149 fprintf (stderr, "%02x", checksum[cnt]); 14150 putc ('\n', stderr); 14151} 14152 14153static void 14154fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED) 14155{ 14156 internal_error ("fold check: original tree changed by fold"); 14157} 14158 14159static void 14160fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, 14161 hash_table<pointer_hash <const tree_node> > *ht) 14162{ 14163 const tree_node **slot; 14164 enum tree_code code; 14165 union tree_node buf; 14166 int i, len; 14167 14168 recursive_label: 14169 if (expr == NULL) 14170 return; 14171 slot = ht->find_slot (expr, INSERT); 14172 if (*slot != NULL) 14173 return; 14174 *slot = expr; 14175 code = TREE_CODE (expr); 14176 if (TREE_CODE_CLASS (code) == tcc_declaration 14177 && HAS_DECL_ASSEMBLER_NAME_P (expr)) 14178 { 14179 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */ 14180 memcpy ((char *) &buf, expr, tree_size (expr)); 14181 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL); 14182 buf.decl_with_vis.symtab_node = NULL; 14183 expr = (tree) &buf; 14184 } 14185 else if (TREE_CODE_CLASS (code) == tcc_type 14186 && (TYPE_POINTER_TO (expr) 14187 || TYPE_REFERENCE_TO (expr) 14188 || TYPE_CACHED_VALUES_P (expr) 14189 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) 14190 || TYPE_NEXT_VARIANT (expr))) 14191 { 14192 /* Allow these fields to be modified. */ 14193 tree tmp; 14194 memcpy ((char *) &buf, expr, tree_size (expr)); 14195 expr = tmp = (tree) &buf; 14196 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0; 14197 TYPE_POINTER_TO (tmp) = NULL; 14198 TYPE_REFERENCE_TO (tmp) = NULL; 14199 TYPE_NEXT_VARIANT (tmp) = NULL; 14200 if (TYPE_CACHED_VALUES_P (tmp)) 14201 { 14202 TYPE_CACHED_VALUES_P (tmp) = 0; 14203 TYPE_CACHED_VALUES (tmp) = NULL; 14204 } 14205 } 14206 md5_process_bytes (expr, tree_size (expr), ctx); 14207 if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) 14208 fold_checksum_tree (TREE_TYPE (expr), ctx, ht); 14209 if (TREE_CODE_CLASS (code) != tcc_type 14210 && TREE_CODE_CLASS (code) != tcc_declaration 14211 && code != TREE_LIST 14212 && code != SSA_NAME 14213 && CODE_CONTAINS_STRUCT (code, TS_COMMON)) 14214 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht); 14215 switch (TREE_CODE_CLASS (code)) 14216 { 14217 case tcc_constant: 14218 switch (code) 14219 { 14220 case STRING_CST: 14221 md5_process_bytes (TREE_STRING_POINTER (expr), 14222 TREE_STRING_LENGTH (expr), ctx); 14223 break; 14224 case COMPLEX_CST: 14225 fold_checksum_tree (TREE_REALPART (expr), ctx, ht); 14226 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht); 14227 break; 14228 case VECTOR_CST: 14229 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i) 14230 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht); 14231 break; 14232 default: 14233 break; 14234 } 14235 break; 14236 case tcc_exceptional: 14237 switch (code) 14238 { 14239 case TREE_LIST: 14240 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht); 14241 fold_checksum_tree (TREE_VALUE (expr), ctx, ht); 14242 expr = TREE_CHAIN (expr); 14243 goto recursive_label; 14244 break; 14245 case TREE_VEC: 14246 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i) 14247 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht); 14248 break; 14249 default: 14250 break; 14251 } 14252 break; 14253 case tcc_expression: 14254 case tcc_reference: 14255 case tcc_comparison: 14256 case tcc_unary: 14257 case tcc_binary: 14258 case tcc_statement: 14259 case tcc_vl_exp: 14260 len = TREE_OPERAND_LENGTH (expr); 14261 for (i = 0; i < len; ++i) 14262 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht); 14263 break; 14264 case tcc_declaration: 14265 fold_checksum_tree (DECL_NAME (expr), ctx, ht); 14266 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht); 14267 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON)) 14268 { 14269 fold_checksum_tree (DECL_SIZE (expr), ctx, ht); 14270 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht); 14271 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht); 14272 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht); 14273 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht); 14274 } 14275 14276 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON)) 14277 { 14278 if (TREE_CODE (expr) == FUNCTION_DECL) 14279 { 14280 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht); 14281 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht); 14282 } 14283 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht); 14284 } 14285 break; 14286 case tcc_type: 14287 if (TREE_CODE (expr) == ENUMERAL_TYPE) 14288 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht); 14289 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht); 14290 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht); 14291 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht); 14292 fold_checksum_tree (TYPE_NAME (expr), ctx, ht); 14293 if (INTEGRAL_TYPE_P (expr) 14294 || SCALAR_FLOAT_TYPE_P (expr)) 14295 { 14296 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht); 14297 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht); 14298 } 14299 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht); 14300 if (TREE_CODE (expr) == RECORD_TYPE 14301 || TREE_CODE (expr) == UNION_TYPE 14302 || TREE_CODE (expr) == QUAL_UNION_TYPE) 14303 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht); 14304 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht); 14305 break; 14306 default: 14307 break; 14308 } 14309} 14310 14311/* Helper function for outputting the checksum of a tree T. When 14312 debugging with gdb, you can "define mynext" to be "next" followed 14313 by "call debug_fold_checksum (op0)", then just trace down till the 14314 outputs differ. */ 14315 14316DEBUG_FUNCTION void 14317debug_fold_checksum (const_tree t) 14318{ 14319 int i; 14320 unsigned char checksum[16]; 14321 struct md5_ctx ctx; 14322 hash_table<pointer_hash<const tree_node> > ht (32); 14323 14324 md5_init_ctx (&ctx); 14325 fold_checksum_tree (t, &ctx, &ht); 14326 md5_finish_ctx (&ctx, checksum); 14327 ht.empty (); 14328 14329 for (i = 0; i < 16; i++) 14330 fprintf (stderr, "%d ", checksum[i]); 14331 14332 fprintf (stderr, "\n"); 14333} 14334 14335#endif 14336 14337/* Fold a unary tree expression with code CODE of type TYPE with an 14338 operand OP0. LOC is the location of the resulting expression. 14339 Return a folded expression if successful. Otherwise, return a tree 14340 expression with code CODE of type TYPE with an operand OP0. */ 14341 14342tree 14343fold_build1_stat_loc (location_t loc, 14344 enum tree_code code, tree type, tree op0 MEM_STAT_DECL) 14345{ 14346 tree tem; 14347#ifdef ENABLE_FOLD_CHECKING 14348 unsigned char checksum_before[16], checksum_after[16]; 14349 struct md5_ctx ctx; 14350 hash_table<pointer_hash<const tree_node> > ht (32); 14351 14352 md5_init_ctx (&ctx); 14353 fold_checksum_tree (op0, &ctx, &ht); 14354 md5_finish_ctx (&ctx, checksum_before); 14355 ht.empty (); 14356#endif 14357 14358 tem = fold_unary_loc (loc, code, type, op0); 14359 if (!tem) 14360 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT); 14361 14362#ifdef ENABLE_FOLD_CHECKING 14363 md5_init_ctx (&ctx); 14364 fold_checksum_tree (op0, &ctx, &ht); 14365 md5_finish_ctx (&ctx, checksum_after); 14366 14367 if (memcmp (checksum_before, checksum_after, 16)) 14368 fold_check_failed (op0, tem); 14369#endif 14370 return tem; 14371} 14372 14373/* Fold a binary tree expression with code CODE of type TYPE with 14374 operands OP0 and OP1. LOC is the location of the resulting 14375 expression. Return a folded expression if successful. Otherwise, 14376 return a tree expression with code CODE of type TYPE with operands 14377 OP0 and OP1. */ 14378 14379tree 14380fold_build2_stat_loc (location_t loc, 14381 enum tree_code code, tree type, tree op0, tree op1 14382 MEM_STAT_DECL) 14383{ 14384 tree tem; 14385#ifdef ENABLE_FOLD_CHECKING 14386 unsigned char checksum_before_op0[16], 14387 checksum_before_op1[16], 14388 checksum_after_op0[16], 14389 checksum_after_op1[16]; 14390 struct md5_ctx ctx; 14391 hash_table<pointer_hash<const tree_node> > ht (32); 14392 14393 md5_init_ctx (&ctx); 14394 fold_checksum_tree (op0, &ctx, &ht); 14395 md5_finish_ctx (&ctx, checksum_before_op0); 14396 ht.empty (); 14397 14398 md5_init_ctx (&ctx); 14399 fold_checksum_tree (op1, &ctx, &ht); 14400 md5_finish_ctx (&ctx, checksum_before_op1); 14401 ht.empty (); 14402#endif 14403 14404 tem = fold_binary_loc (loc, code, type, op0, op1); 14405 if (!tem) 14406 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT); 14407 14408#ifdef ENABLE_FOLD_CHECKING 14409 md5_init_ctx (&ctx); 14410 fold_checksum_tree (op0, &ctx, &ht); 14411 md5_finish_ctx (&ctx, checksum_after_op0); 14412 ht.empty (); 14413 14414 if (memcmp (checksum_before_op0, checksum_after_op0, 16)) 14415 fold_check_failed (op0, tem); 14416 14417 md5_init_ctx (&ctx); 14418 fold_checksum_tree (op1, &ctx, &ht); 14419 md5_finish_ctx (&ctx, checksum_after_op1); 14420 14421 if (memcmp (checksum_before_op1, checksum_after_op1, 16)) 14422 fold_check_failed (op1, tem); 14423#endif 14424 return tem; 14425} 14426 14427/* Fold a ternary tree expression with code CODE of type TYPE with 14428 operands OP0, OP1, and OP2. Return a folded expression if 14429 successful. Otherwise, return a tree expression with code CODE of 14430 type TYPE with operands OP0, OP1, and OP2. */ 14431 14432tree 14433fold_build3_stat_loc (location_t loc, enum tree_code code, tree type, 14434 tree op0, tree op1, tree op2 MEM_STAT_DECL) 14435{ 14436 tree tem; 14437#ifdef ENABLE_FOLD_CHECKING 14438 unsigned char checksum_before_op0[16], 14439 checksum_before_op1[16], 14440 checksum_before_op2[16], 14441 checksum_after_op0[16], 14442 checksum_after_op1[16], 14443 checksum_after_op2[16]; 14444 struct md5_ctx ctx; 14445 hash_table<pointer_hash<const tree_node> > ht (32); 14446 14447 md5_init_ctx (&ctx); 14448 fold_checksum_tree (op0, &ctx, &ht); 14449 md5_finish_ctx (&ctx, checksum_before_op0); 14450 ht.empty (); 14451 14452 md5_init_ctx (&ctx); 14453 fold_checksum_tree (op1, &ctx, &ht); 14454 md5_finish_ctx (&ctx, checksum_before_op1); 14455 ht.empty (); 14456 14457 md5_init_ctx (&ctx); 14458 fold_checksum_tree (op2, &ctx, &ht); 14459 md5_finish_ctx (&ctx, checksum_before_op2); 14460 ht.empty (); 14461#endif 14462 14463 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); 14464 tem = fold_ternary_loc (loc, code, type, op0, op1, op2); 14465 if (!tem) 14466 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT); 14467 14468#ifdef ENABLE_FOLD_CHECKING 14469 md5_init_ctx (&ctx); 14470 fold_checksum_tree (op0, &ctx, &ht); 14471 md5_finish_ctx (&ctx, checksum_after_op0); 14472 ht.empty (); 14473 14474 if (memcmp (checksum_before_op0, checksum_after_op0, 16)) 14475 fold_check_failed (op0, tem); 14476 14477 md5_init_ctx (&ctx); 14478 fold_checksum_tree (op1, &ctx, &ht); 14479 md5_finish_ctx (&ctx, checksum_after_op1); 14480 ht.empty (); 14481 14482 if (memcmp (checksum_before_op1, checksum_after_op1, 16)) 14483 fold_check_failed (op1, tem); 14484 14485 md5_init_ctx (&ctx); 14486 fold_checksum_tree (op2, &ctx, &ht); 14487 md5_finish_ctx (&ctx, checksum_after_op2); 14488 14489 if (memcmp (checksum_before_op2, checksum_after_op2, 16)) 14490 fold_check_failed (op2, tem); 14491#endif 14492 return tem; 14493} 14494 14495/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS 14496 arguments in ARGARRAY, and a null static chain. 14497 Return a folded expression if successful. Otherwise, return a CALL_EXPR 14498 of type TYPE from the given operands as constructed by build_call_array. */ 14499 14500tree 14501fold_build_call_array_loc (location_t loc, tree type, tree fn, 14502 int nargs, tree *argarray) 14503{ 14504 tree tem; 14505#ifdef ENABLE_FOLD_CHECKING 14506 unsigned char checksum_before_fn[16], 14507 checksum_before_arglist[16], 14508 checksum_after_fn[16], 14509 checksum_after_arglist[16]; 14510 struct md5_ctx ctx; 14511 hash_table<pointer_hash<const tree_node> > ht (32); 14512 int i; 14513 14514 md5_init_ctx (&ctx); 14515 fold_checksum_tree (fn, &ctx, &ht); 14516 md5_finish_ctx (&ctx, checksum_before_fn); 14517 ht.empty (); 14518 14519 md5_init_ctx (&ctx); 14520 for (i = 0; i < nargs; i++) 14521 fold_checksum_tree (argarray[i], &ctx, &ht); 14522 md5_finish_ctx (&ctx, checksum_before_arglist); 14523 ht.empty (); 14524#endif 14525 14526 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray); 14527 if (!tem) 14528 tem = build_call_array_loc (loc, type, fn, nargs, argarray); 14529 14530#ifdef ENABLE_FOLD_CHECKING 14531 md5_init_ctx (&ctx); 14532 fold_checksum_tree (fn, &ctx, &ht); 14533 md5_finish_ctx (&ctx, checksum_after_fn); 14534 ht.empty (); 14535 14536 if (memcmp (checksum_before_fn, checksum_after_fn, 16)) 14537 fold_check_failed (fn, tem); 14538 14539 md5_init_ctx (&ctx); 14540 for (i = 0; i < nargs; i++) 14541 fold_checksum_tree (argarray[i], &ctx, &ht); 14542 md5_finish_ctx (&ctx, checksum_after_arglist); 14543 14544 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16)) 14545 fold_check_failed (NULL_TREE, tem); 14546#endif 14547 return tem; 14548} 14549 14550/* Perform constant folding and related simplification of initializer 14551 expression EXPR. These behave identically to "fold_buildN" but ignore 14552 potential run-time traps and exceptions that fold must preserve. */ 14553 14554#define START_FOLD_INIT \ 14555 int saved_signaling_nans = flag_signaling_nans;\ 14556 int saved_trapping_math = flag_trapping_math;\ 14557 int saved_rounding_math = flag_rounding_math;\ 14558 int saved_trapv = flag_trapv;\ 14559 int saved_folding_initializer = folding_initializer;\ 14560 flag_signaling_nans = 0;\ 14561 flag_trapping_math = 0;\ 14562 flag_rounding_math = 0;\ 14563 flag_trapv = 0;\ 14564 folding_initializer = 1; 14565 14566#define END_FOLD_INIT \ 14567 flag_signaling_nans = saved_signaling_nans;\ 14568 flag_trapping_math = saved_trapping_math;\ 14569 flag_rounding_math = saved_rounding_math;\ 14570 flag_trapv = saved_trapv;\ 14571 folding_initializer = saved_folding_initializer; 14572 14573tree 14574fold_build1_initializer_loc (location_t loc, enum tree_code code, 14575 tree type, tree op) 14576{ 14577 tree result; 14578 START_FOLD_INIT; 14579 14580 result = fold_build1_loc (loc, code, type, op); 14581 14582 END_FOLD_INIT; 14583 return result; 14584} 14585 14586tree 14587fold_build2_initializer_loc (location_t loc, enum tree_code code, 14588 tree type, tree op0, tree op1) 14589{ 14590 tree result; 14591 START_FOLD_INIT; 14592 14593 result = fold_build2_loc (loc, code, type, op0, op1); 14594 14595 END_FOLD_INIT; 14596 return result; 14597} 14598 14599tree 14600fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn, 14601 int nargs, tree *argarray) 14602{ 14603 tree result; 14604 START_FOLD_INIT; 14605 14606 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray); 14607 14608 END_FOLD_INIT; 14609 return result; 14610} 14611 14612#undef START_FOLD_INIT 14613#undef END_FOLD_INIT 14614 14615/* Determine if first argument is a multiple of second argument. Return 0 if 14616 it is not, or we cannot easily determined it to be. 14617 14618 An example of the sort of thing we care about (at this point; this routine 14619 could surely be made more general, and expanded to do what the *_DIV_EXPR's 14620 fold cases do now) is discovering that 14621 14622 SAVE_EXPR (I) * SAVE_EXPR (J * 8) 14623 14624 is a multiple of 14625 14626 SAVE_EXPR (J * 8) 14627 14628 when we know that the two SAVE_EXPR (J * 8) nodes are the same node. 14629 14630 This code also handles discovering that 14631 14632 SAVE_EXPR (I) * SAVE_EXPR (J * 8) 14633 14634 is a multiple of 8 so we don't have to worry about dealing with a 14635 possible remainder. 14636 14637 Note that we *look* inside a SAVE_EXPR only to determine how it was 14638 calculated; it is not safe for fold to do much of anything else with the 14639 internals of a SAVE_EXPR, since it cannot know when it will be evaluated 14640 at run time. For example, the latter example above *cannot* be implemented 14641 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at 14642 evaluation time of the original SAVE_EXPR is not necessarily the same at 14643 the time the new expression is evaluated. The only optimization of this 14644 sort that would be valid is changing 14645 14646 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8) 14647 14648 divided by 8 to 14649 14650 SAVE_EXPR (I) * SAVE_EXPR (J) 14651 14652 (where the same SAVE_EXPR (J) is used in the original and the 14653 transformed version). */ 14654 14655int 14656multiple_of_p (tree type, const_tree top, const_tree bottom) 14657{ 14658 if (operand_equal_p (top, bottom, 0)) 14659 return 1; 14660 14661 if (TREE_CODE (type) != INTEGER_TYPE) 14662 return 0; 14663 14664 switch (TREE_CODE (top)) 14665 { 14666 case BIT_AND_EXPR: 14667 /* Bitwise and provides a power of two multiple. If the mask is 14668 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */ 14669 if (!integer_pow2p (bottom)) 14670 return 0; 14671 /* FALLTHRU */ 14672 14673 case MULT_EXPR: 14674 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom) 14675 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom)); 14676 14677 case PLUS_EXPR: 14678 case MINUS_EXPR: 14679 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom) 14680 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom)); 14681 14682 case LSHIFT_EXPR: 14683 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST) 14684 { 14685 tree op1, t1; 14686 14687 op1 = TREE_OPERAND (top, 1); 14688 /* const_binop may not detect overflow correctly, 14689 so check for it explicitly here. */ 14690 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1) 14691 && 0 != (t1 = fold_convert (type, 14692 const_binop (LSHIFT_EXPR, 14693 size_one_node, 14694 op1))) 14695 && !TREE_OVERFLOW (t1)) 14696 return multiple_of_p (type, t1, bottom); 14697 } 14698 return 0; 14699 14700 case NOP_EXPR: 14701 /* Can't handle conversions from non-integral or wider integral type. */ 14702 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE) 14703 || (TYPE_PRECISION (type) 14704 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0))))) 14705 return 0; 14706 14707 /* .. fall through ... */ 14708 14709 case SAVE_EXPR: 14710 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom); 14711 14712 case COND_EXPR: 14713 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom) 14714 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom)); 14715 14716 case INTEGER_CST: 14717 if (TREE_CODE (bottom) != INTEGER_CST 14718 || integer_zerop (bottom) 14719 || (TYPE_UNSIGNED (type) 14720 && (tree_int_cst_sgn (top) < 0 14721 || tree_int_cst_sgn (bottom) < 0))) 14722 return 0; 14723 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom), 14724 SIGNED); 14725 14726 default: 14727 return 0; 14728 } 14729} 14730 14731/* Return true if CODE or TYPE is known to be non-negative. */ 14732 14733static bool 14734tree_simple_nonnegative_warnv_p (enum tree_code code, tree type) 14735{ 14736 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type)) 14737 && truth_value_p (code)) 14738 /* Truth values evaluate to 0 or 1, which is nonnegative unless we 14739 have a signed:1 type (where the value is -1 and 0). */ 14740 return true; 14741 return false; 14742} 14743 14744/* Return true if (CODE OP0) is known to be non-negative. If the return 14745 value is based on the assumption that signed overflow is undefined, 14746 set *STRICT_OVERFLOW_P to true; otherwise, don't change 14747 *STRICT_OVERFLOW_P. */ 14748 14749bool 14750tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0, 14751 bool *strict_overflow_p) 14752{ 14753 if (TYPE_UNSIGNED (type)) 14754 return true; 14755 14756 switch (code) 14757 { 14758 case ABS_EXPR: 14759 /* We can't return 1 if flag_wrapv is set because 14760 ABS_EXPR<INT_MIN> = INT_MIN. */ 14761 if (!INTEGRAL_TYPE_P (type)) 14762 return true; 14763 if (TYPE_OVERFLOW_UNDEFINED (type)) 14764 { 14765 *strict_overflow_p = true; 14766 return true; 14767 } 14768 break; 14769 14770 case NON_LVALUE_EXPR: 14771 case FLOAT_EXPR: 14772 case FIX_TRUNC_EXPR: 14773 return tree_expr_nonnegative_warnv_p (op0, 14774 strict_overflow_p); 14775 14776 CASE_CONVERT: 14777 { 14778 tree inner_type = TREE_TYPE (op0); 14779 tree outer_type = type; 14780 14781 if (TREE_CODE (outer_type) == REAL_TYPE) 14782 { 14783 if (TREE_CODE (inner_type) == REAL_TYPE) 14784 return tree_expr_nonnegative_warnv_p (op0, 14785 strict_overflow_p); 14786 if (INTEGRAL_TYPE_P (inner_type)) 14787 { 14788 if (TYPE_UNSIGNED (inner_type)) 14789 return true; 14790 return tree_expr_nonnegative_warnv_p (op0, 14791 strict_overflow_p); 14792 } 14793 } 14794 else if (INTEGRAL_TYPE_P (outer_type)) 14795 { 14796 if (TREE_CODE (inner_type) == REAL_TYPE) 14797 return tree_expr_nonnegative_warnv_p (op0, 14798 strict_overflow_p); 14799 if (INTEGRAL_TYPE_P (inner_type)) 14800 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type) 14801 && TYPE_UNSIGNED (inner_type); 14802 } 14803 } 14804 break; 14805 14806 default: 14807 return tree_simple_nonnegative_warnv_p (code, type); 14808 } 14809 14810 /* We don't know sign of `t', so be conservative and return false. */ 14811 return false; 14812} 14813 14814/* Return true if (CODE OP0 OP1) is known to be non-negative. If the return 14815 value is based on the assumption that signed overflow is undefined, 14816 set *STRICT_OVERFLOW_P to true; otherwise, don't change 14817 *STRICT_OVERFLOW_P. */ 14818 14819bool 14820tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0, 14821 tree op1, bool *strict_overflow_p) 14822{ 14823 if (TYPE_UNSIGNED (type)) 14824 return true; 14825 14826 switch (code) 14827 { 14828 case POINTER_PLUS_EXPR: 14829 case PLUS_EXPR: 14830 if (FLOAT_TYPE_P (type)) 14831 return (tree_expr_nonnegative_warnv_p (op0, 14832 strict_overflow_p) 14833 && tree_expr_nonnegative_warnv_p (op1, 14834 strict_overflow_p)); 14835 14836 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are 14837 both unsigned and at least 2 bits shorter than the result. */ 14838 if (TREE_CODE (type) == INTEGER_TYPE 14839 && TREE_CODE (op0) == NOP_EXPR 14840 && TREE_CODE (op1) == NOP_EXPR) 14841 { 14842 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0)); 14843 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0)); 14844 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1) 14845 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2)) 14846 { 14847 unsigned int prec = MAX (TYPE_PRECISION (inner1), 14848 TYPE_PRECISION (inner2)) + 1; 14849 return prec < TYPE_PRECISION (type); 14850 } 14851 } 14852 break; 14853 14854 case MULT_EXPR: 14855 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 14856 { 14857 /* x * x is always non-negative for floating point x 14858 or without overflow. */ 14859 if (operand_equal_p (op0, op1, 0) 14860 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p) 14861 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p))) 14862 { 14863 if (ANY_INTEGRAL_TYPE_P (type) 14864 && TYPE_OVERFLOW_UNDEFINED (type)) 14865 *strict_overflow_p = true; 14866 return true; 14867 } 14868 } 14869 14870 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are 14871 both unsigned and their total bits is shorter than the result. */ 14872 if (TREE_CODE (type) == INTEGER_TYPE 14873 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST) 14874 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST)) 14875 { 14876 tree inner0 = (TREE_CODE (op0) == NOP_EXPR) 14877 ? TREE_TYPE (TREE_OPERAND (op0, 0)) 14878 : TREE_TYPE (op0); 14879 tree inner1 = (TREE_CODE (op1) == NOP_EXPR) 14880 ? TREE_TYPE (TREE_OPERAND (op1, 0)) 14881 : TREE_TYPE (op1); 14882 14883 bool unsigned0 = TYPE_UNSIGNED (inner0); 14884 bool unsigned1 = TYPE_UNSIGNED (inner1); 14885 14886 if (TREE_CODE (op0) == INTEGER_CST) 14887 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0; 14888 14889 if (TREE_CODE (op1) == INTEGER_CST) 14890 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0; 14891 14892 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0 14893 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1) 14894 { 14895 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST) 14896 ? tree_int_cst_min_precision (op0, UNSIGNED) 14897 : TYPE_PRECISION (inner0); 14898 14899 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST) 14900 ? tree_int_cst_min_precision (op1, UNSIGNED) 14901 : TYPE_PRECISION (inner1); 14902 14903 return precision0 + precision1 < TYPE_PRECISION (type); 14904 } 14905 } 14906 return false; 14907 14908 case BIT_AND_EXPR: 14909 case MAX_EXPR: 14910 return (tree_expr_nonnegative_warnv_p (op0, 14911 strict_overflow_p) 14912 || tree_expr_nonnegative_warnv_p (op1, 14913 strict_overflow_p)); 14914 14915 case BIT_IOR_EXPR: 14916 case BIT_XOR_EXPR: 14917 case MIN_EXPR: 14918 case RDIV_EXPR: 14919 case TRUNC_DIV_EXPR: 14920 case CEIL_DIV_EXPR: 14921 case FLOOR_DIV_EXPR: 14922 case ROUND_DIV_EXPR: 14923 return (tree_expr_nonnegative_warnv_p (op0, 14924 strict_overflow_p) 14925 && tree_expr_nonnegative_warnv_p (op1, 14926 strict_overflow_p)); 14927 14928 case TRUNC_MOD_EXPR: 14929 case CEIL_MOD_EXPR: 14930 case FLOOR_MOD_EXPR: 14931 case ROUND_MOD_EXPR: 14932 return tree_expr_nonnegative_warnv_p (op0, 14933 strict_overflow_p); 14934 default: 14935 return tree_simple_nonnegative_warnv_p (code, type); 14936 } 14937 14938 /* We don't know sign of `t', so be conservative and return false. */ 14939 return false; 14940} 14941 14942/* Return true if T is known to be non-negative. If the return 14943 value is based on the assumption that signed overflow is undefined, 14944 set *STRICT_OVERFLOW_P to true; otherwise, don't change 14945 *STRICT_OVERFLOW_P. */ 14946 14947bool 14948tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p) 14949{ 14950 if (TYPE_UNSIGNED (TREE_TYPE (t))) 14951 return true; 14952 14953 switch (TREE_CODE (t)) 14954 { 14955 case INTEGER_CST: 14956 return tree_int_cst_sgn (t) >= 0; 14957 14958 case REAL_CST: 14959 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); 14960 14961 case FIXED_CST: 14962 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t)); 14963 14964 case COND_EXPR: 14965 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 14966 strict_overflow_p) 14967 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2), 14968 strict_overflow_p)); 14969 default: 14970 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), 14971 TREE_TYPE (t)); 14972 } 14973 /* We don't know sign of `t', so be conservative and return false. */ 14974 return false; 14975} 14976 14977/* Return true if T is known to be non-negative. If the return 14978 value is based on the assumption that signed overflow is undefined, 14979 set *STRICT_OVERFLOW_P to true; otherwise, don't change 14980 *STRICT_OVERFLOW_P. */ 14981 14982bool 14983tree_call_nonnegative_warnv_p (tree type, tree fndecl, 14984 tree arg0, tree arg1, bool *strict_overflow_p) 14985{ 14986 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 14987 switch (DECL_FUNCTION_CODE (fndecl)) 14988 { 14989 CASE_FLT_FN (BUILT_IN_ACOS): 14990 CASE_FLT_FN (BUILT_IN_ACOSH): 14991 CASE_FLT_FN (BUILT_IN_CABS): 14992 CASE_FLT_FN (BUILT_IN_COSH): 14993 CASE_FLT_FN (BUILT_IN_ERFC): 14994 CASE_FLT_FN (BUILT_IN_EXP): 14995 CASE_FLT_FN (BUILT_IN_EXP10): 14996 CASE_FLT_FN (BUILT_IN_EXP2): 14997 CASE_FLT_FN (BUILT_IN_FABS): 14998 CASE_FLT_FN (BUILT_IN_FDIM): 14999 CASE_FLT_FN (BUILT_IN_HYPOT): 15000 CASE_FLT_FN (BUILT_IN_POW10): 15001 CASE_INT_FN (BUILT_IN_FFS): 15002 CASE_INT_FN (BUILT_IN_PARITY): 15003 CASE_INT_FN (BUILT_IN_POPCOUNT): 15004 CASE_INT_FN (BUILT_IN_CLZ): 15005 CASE_INT_FN (BUILT_IN_CLRSB): 15006 case BUILT_IN_BSWAP32: 15007 case BUILT_IN_BSWAP64: 15008 /* Always true. */ 15009 return true; 15010 15011 CASE_FLT_FN (BUILT_IN_SQRT): 15012 /* sqrt(-0.0) is -0.0. */ 15013 if (!HONOR_SIGNED_ZEROS (element_mode (type))) 15014 return true; 15015 return tree_expr_nonnegative_warnv_p (arg0, 15016 strict_overflow_p); 15017 15018 CASE_FLT_FN (BUILT_IN_ASINH): 15019 CASE_FLT_FN (BUILT_IN_ATAN): 15020 CASE_FLT_FN (BUILT_IN_ATANH): 15021 CASE_FLT_FN (BUILT_IN_CBRT): 15022 CASE_FLT_FN (BUILT_IN_CEIL): 15023 CASE_FLT_FN (BUILT_IN_ERF): 15024 CASE_FLT_FN (BUILT_IN_EXPM1): 15025 CASE_FLT_FN (BUILT_IN_FLOOR): 15026 CASE_FLT_FN (BUILT_IN_FMOD): 15027 CASE_FLT_FN (BUILT_IN_FREXP): 15028 CASE_FLT_FN (BUILT_IN_ICEIL): 15029 CASE_FLT_FN (BUILT_IN_IFLOOR): 15030 CASE_FLT_FN (BUILT_IN_IRINT): 15031 CASE_FLT_FN (BUILT_IN_IROUND): 15032 CASE_FLT_FN (BUILT_IN_LCEIL): 15033 CASE_FLT_FN (BUILT_IN_LDEXP): 15034 CASE_FLT_FN (BUILT_IN_LFLOOR): 15035 CASE_FLT_FN (BUILT_IN_LLCEIL): 15036 CASE_FLT_FN (BUILT_IN_LLFLOOR): 15037 CASE_FLT_FN (BUILT_IN_LLRINT): 15038 CASE_FLT_FN (BUILT_IN_LLROUND): 15039 CASE_FLT_FN (BUILT_IN_LRINT): 15040 CASE_FLT_FN (BUILT_IN_LROUND): 15041 CASE_FLT_FN (BUILT_IN_MODF): 15042 CASE_FLT_FN (BUILT_IN_NEARBYINT): 15043 CASE_FLT_FN (BUILT_IN_RINT): 15044 CASE_FLT_FN (BUILT_IN_ROUND): 15045 CASE_FLT_FN (BUILT_IN_SCALB): 15046 CASE_FLT_FN (BUILT_IN_SCALBLN): 15047 CASE_FLT_FN (BUILT_IN_SCALBN): 15048 CASE_FLT_FN (BUILT_IN_SIGNBIT): 15049 CASE_FLT_FN (BUILT_IN_SIGNIFICAND): 15050 CASE_FLT_FN (BUILT_IN_SINH): 15051 CASE_FLT_FN (BUILT_IN_TANH): 15052 CASE_FLT_FN (BUILT_IN_TRUNC): 15053 /* True if the 1st argument is nonnegative. */ 15054 return tree_expr_nonnegative_warnv_p (arg0, 15055 strict_overflow_p); 15056 15057 CASE_FLT_FN (BUILT_IN_FMAX): 15058 /* True if the 1st OR 2nd arguments are nonnegative. */ 15059 return (tree_expr_nonnegative_warnv_p (arg0, 15060 strict_overflow_p) 15061 || (tree_expr_nonnegative_warnv_p (arg1, 15062 strict_overflow_p))); 15063 15064 CASE_FLT_FN (BUILT_IN_FMIN): 15065 /* True if the 1st AND 2nd arguments are nonnegative. */ 15066 return (tree_expr_nonnegative_warnv_p (arg0, 15067 strict_overflow_p) 15068 && (tree_expr_nonnegative_warnv_p (arg1, 15069 strict_overflow_p))); 15070 15071 CASE_FLT_FN (BUILT_IN_COPYSIGN): 15072 /* True if the 2nd argument is nonnegative. */ 15073 return tree_expr_nonnegative_warnv_p (arg1, 15074 strict_overflow_p); 15075 15076 CASE_FLT_FN (BUILT_IN_POWI): 15077 /* True if the 1st argument is nonnegative or the second 15078 argument is an even integer. */ 15079 if (TREE_CODE (arg1) == INTEGER_CST 15080 && (TREE_INT_CST_LOW (arg1) & 1) == 0) 15081 return true; 15082 return tree_expr_nonnegative_warnv_p (arg0, 15083 strict_overflow_p); 15084 15085 CASE_FLT_FN (BUILT_IN_POW): 15086 /* True if the 1st argument is nonnegative or the second 15087 argument is an even integer valued real. */ 15088 if (TREE_CODE (arg1) == REAL_CST) 15089 { 15090 REAL_VALUE_TYPE c; 15091 HOST_WIDE_INT n; 15092 15093 c = TREE_REAL_CST (arg1); 15094 n = real_to_integer (&c); 15095 if ((n & 1) == 0) 15096 { 15097 REAL_VALUE_TYPE cint; 15098 real_from_integer (&cint, VOIDmode, n, SIGNED); 15099 if (real_identical (&c, &cint)) 15100 return true; 15101 } 15102 } 15103 return tree_expr_nonnegative_warnv_p (arg0, 15104 strict_overflow_p); 15105 15106 default: 15107 break; 15108 } 15109 return tree_simple_nonnegative_warnv_p (CALL_EXPR, 15110 type); 15111} 15112 15113/* Return true if T is known to be non-negative. If the return 15114 value is based on the assumption that signed overflow is undefined, 15115 set *STRICT_OVERFLOW_P to true; otherwise, don't change 15116 *STRICT_OVERFLOW_P. */ 15117 15118static bool 15119tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p) 15120{ 15121 enum tree_code code = TREE_CODE (t); 15122 if (TYPE_UNSIGNED (TREE_TYPE (t))) 15123 return true; 15124 15125 switch (code) 15126 { 15127 case TARGET_EXPR: 15128 { 15129 tree temp = TARGET_EXPR_SLOT (t); 15130 t = TARGET_EXPR_INITIAL (t); 15131 15132 /* If the initializer is non-void, then it's a normal expression 15133 that will be assigned to the slot. */ 15134 if (!VOID_TYPE_P (t)) 15135 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p); 15136 15137 /* Otherwise, the initializer sets the slot in some way. One common 15138 way is an assignment statement at the end of the initializer. */ 15139 while (1) 15140 { 15141 if (TREE_CODE (t) == BIND_EXPR) 15142 t = expr_last (BIND_EXPR_BODY (t)); 15143 else if (TREE_CODE (t) == TRY_FINALLY_EXPR 15144 || TREE_CODE (t) == TRY_CATCH_EXPR) 15145 t = expr_last (TREE_OPERAND (t, 0)); 15146 else if (TREE_CODE (t) == STATEMENT_LIST) 15147 t = expr_last (t); 15148 else 15149 break; 15150 } 15151 if (TREE_CODE (t) == MODIFY_EXPR 15152 && TREE_OPERAND (t, 0) == temp) 15153 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 15154 strict_overflow_p); 15155 15156 return false; 15157 } 15158 15159 case CALL_EXPR: 15160 { 15161 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE; 15162 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE; 15163 15164 return tree_call_nonnegative_warnv_p (TREE_TYPE (t), 15165 get_callee_fndecl (t), 15166 arg0, 15167 arg1, 15168 strict_overflow_p); 15169 } 15170 case COMPOUND_EXPR: 15171 case MODIFY_EXPR: 15172 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 15173 strict_overflow_p); 15174 case BIND_EXPR: 15175 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)), 15176 strict_overflow_p); 15177 case SAVE_EXPR: 15178 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 15179 strict_overflow_p); 15180 15181 default: 15182 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), 15183 TREE_TYPE (t)); 15184 } 15185 15186 /* We don't know sign of `t', so be conservative and return false. */ 15187 return false; 15188} 15189 15190/* Return true if T is known to be non-negative. If the return 15191 value is based on the assumption that signed overflow is undefined, 15192 set *STRICT_OVERFLOW_P to true; otherwise, don't change 15193 *STRICT_OVERFLOW_P. */ 15194 15195bool 15196tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p) 15197{ 15198 enum tree_code code; 15199 if (t == error_mark_node) 15200 return false; 15201 15202 code = TREE_CODE (t); 15203 switch (TREE_CODE_CLASS (code)) 15204 { 15205 case tcc_binary: 15206 case tcc_comparison: 15207 return tree_binary_nonnegative_warnv_p (TREE_CODE (t), 15208 TREE_TYPE (t), 15209 TREE_OPERAND (t, 0), 15210 TREE_OPERAND (t, 1), 15211 strict_overflow_p); 15212 15213 case tcc_unary: 15214 return tree_unary_nonnegative_warnv_p (TREE_CODE (t), 15215 TREE_TYPE (t), 15216 TREE_OPERAND (t, 0), 15217 strict_overflow_p); 15218 15219 case tcc_constant: 15220 case tcc_declaration: 15221 case tcc_reference: 15222 return tree_single_nonnegative_warnv_p (t, strict_overflow_p); 15223 15224 default: 15225 break; 15226 } 15227 15228 switch (code) 15229 { 15230 case TRUTH_AND_EXPR: 15231 case TRUTH_OR_EXPR: 15232 case TRUTH_XOR_EXPR: 15233 return tree_binary_nonnegative_warnv_p (TREE_CODE (t), 15234 TREE_TYPE (t), 15235 TREE_OPERAND (t, 0), 15236 TREE_OPERAND (t, 1), 15237 strict_overflow_p); 15238 case TRUTH_NOT_EXPR: 15239 return tree_unary_nonnegative_warnv_p (TREE_CODE (t), 15240 TREE_TYPE (t), 15241 TREE_OPERAND (t, 0), 15242 strict_overflow_p); 15243 15244 case COND_EXPR: 15245 case CONSTRUCTOR: 15246 case OBJ_TYPE_REF: 15247 case ASSERT_EXPR: 15248 case ADDR_EXPR: 15249 case WITH_SIZE_EXPR: 15250 case SSA_NAME: 15251 return tree_single_nonnegative_warnv_p (t, strict_overflow_p); 15252 15253 default: 15254 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p); 15255 } 15256} 15257 15258/* Return true if `t' is known to be non-negative. Handle warnings 15259 about undefined signed overflow. */ 15260 15261bool 15262tree_expr_nonnegative_p (tree t) 15263{ 15264 bool ret, strict_overflow_p; 15265 15266 strict_overflow_p = false; 15267 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p); 15268 if (strict_overflow_p) 15269 fold_overflow_warning (("assuming signed overflow does not occur when " 15270 "determining that expression is always " 15271 "non-negative"), 15272 WARN_STRICT_OVERFLOW_MISC); 15273 return ret; 15274} 15275 15276 15277/* Return true when (CODE OP0) is an address and is known to be nonzero. 15278 For floating point we further ensure that T is not denormal. 15279 Similar logic is present in nonzero_address in rtlanal.h. 15280 15281 If the return value is based on the assumption that signed overflow 15282 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15283 change *STRICT_OVERFLOW_P. */ 15284 15285bool 15286tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0, 15287 bool *strict_overflow_p) 15288{ 15289 switch (code) 15290 { 15291 case ABS_EXPR: 15292 return tree_expr_nonzero_warnv_p (op0, 15293 strict_overflow_p); 15294 15295 case NOP_EXPR: 15296 { 15297 tree inner_type = TREE_TYPE (op0); 15298 tree outer_type = type; 15299 15300 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type) 15301 && tree_expr_nonzero_warnv_p (op0, 15302 strict_overflow_p)); 15303 } 15304 break; 15305 15306 case NON_LVALUE_EXPR: 15307 return tree_expr_nonzero_warnv_p (op0, 15308 strict_overflow_p); 15309 15310 default: 15311 break; 15312 } 15313 15314 return false; 15315} 15316 15317/* Return true when (CODE OP0 OP1) is an address and is known to be nonzero. 15318 For floating point we further ensure that T is not denormal. 15319 Similar logic is present in nonzero_address in rtlanal.h. 15320 15321 If the return value is based on the assumption that signed overflow 15322 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15323 change *STRICT_OVERFLOW_P. */ 15324 15325bool 15326tree_binary_nonzero_warnv_p (enum tree_code code, 15327 tree type, 15328 tree op0, 15329 tree op1, bool *strict_overflow_p) 15330{ 15331 bool sub_strict_overflow_p; 15332 switch (code) 15333 { 15334 case POINTER_PLUS_EXPR: 15335 case PLUS_EXPR: 15336 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type)) 15337 { 15338 /* With the presence of negative values it is hard 15339 to say something. */ 15340 sub_strict_overflow_p = false; 15341 if (!tree_expr_nonnegative_warnv_p (op0, 15342 &sub_strict_overflow_p) 15343 || !tree_expr_nonnegative_warnv_p (op1, 15344 &sub_strict_overflow_p)) 15345 return false; 15346 /* One of operands must be positive and the other non-negative. */ 15347 /* We don't set *STRICT_OVERFLOW_P here: even if this value 15348 overflows, on a twos-complement machine the sum of two 15349 nonnegative numbers can never be zero. */ 15350 return (tree_expr_nonzero_warnv_p (op0, 15351 strict_overflow_p) 15352 || tree_expr_nonzero_warnv_p (op1, 15353 strict_overflow_p)); 15354 } 15355 break; 15356 15357 case MULT_EXPR: 15358 if (TYPE_OVERFLOW_UNDEFINED (type)) 15359 { 15360 if (tree_expr_nonzero_warnv_p (op0, 15361 strict_overflow_p) 15362 && tree_expr_nonzero_warnv_p (op1, 15363 strict_overflow_p)) 15364 { 15365 *strict_overflow_p = true; 15366 return true; 15367 } 15368 } 15369 break; 15370 15371 case MIN_EXPR: 15372 sub_strict_overflow_p = false; 15373 if (tree_expr_nonzero_warnv_p (op0, 15374 &sub_strict_overflow_p) 15375 && tree_expr_nonzero_warnv_p (op1, 15376 &sub_strict_overflow_p)) 15377 { 15378 if (sub_strict_overflow_p) 15379 *strict_overflow_p = true; 15380 } 15381 break; 15382 15383 case MAX_EXPR: 15384 sub_strict_overflow_p = false; 15385 if (tree_expr_nonzero_warnv_p (op0, 15386 &sub_strict_overflow_p)) 15387 { 15388 if (sub_strict_overflow_p) 15389 *strict_overflow_p = true; 15390 15391 /* When both operands are nonzero, then MAX must be too. */ 15392 if (tree_expr_nonzero_warnv_p (op1, 15393 strict_overflow_p)) 15394 return true; 15395 15396 /* MAX where operand 0 is positive is positive. */ 15397 return tree_expr_nonnegative_warnv_p (op0, 15398 strict_overflow_p); 15399 } 15400 /* MAX where operand 1 is positive is positive. */ 15401 else if (tree_expr_nonzero_warnv_p (op1, 15402 &sub_strict_overflow_p) 15403 && tree_expr_nonnegative_warnv_p (op1, 15404 &sub_strict_overflow_p)) 15405 { 15406 if (sub_strict_overflow_p) 15407 *strict_overflow_p = true; 15408 return true; 15409 } 15410 break; 15411 15412 case BIT_IOR_EXPR: 15413 return (tree_expr_nonzero_warnv_p (op1, 15414 strict_overflow_p) 15415 || tree_expr_nonzero_warnv_p (op0, 15416 strict_overflow_p)); 15417 15418 default: 15419 break; 15420 } 15421 15422 return false; 15423} 15424 15425/* Return true when T is an address and is known to be nonzero. 15426 For floating point we further ensure that T is not denormal. 15427 Similar logic is present in nonzero_address in rtlanal.h. 15428 15429 If the return value is based on the assumption that signed overflow 15430 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15431 change *STRICT_OVERFLOW_P. */ 15432 15433bool 15434tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p) 15435{ 15436 bool sub_strict_overflow_p; 15437 switch (TREE_CODE (t)) 15438 { 15439 case INTEGER_CST: 15440 return !integer_zerop (t); 15441 15442 case ADDR_EXPR: 15443 { 15444 tree base = TREE_OPERAND (t, 0); 15445 15446 if (!DECL_P (base)) 15447 base = get_base_address (base); 15448 15449 if (!base) 15450 return false; 15451 15452 /* For objects in symbol table check if we know they are non-zero. 15453 Don't do anything for variables and functions before symtab is built; 15454 it is quite possible that they will be declared weak later. */ 15455 if (DECL_P (base) && decl_in_symtab_p (base)) 15456 { 15457 struct symtab_node *symbol; 15458 15459 symbol = symtab_node::get_create (base); 15460 if (symbol) 15461 return symbol->nonzero_address (); 15462 else 15463 return false; 15464 } 15465 15466 /* Function local objects are never NULL. */ 15467 if (DECL_P (base) 15468 && (DECL_CONTEXT (base) 15469 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL 15470 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))) 15471 return true; 15472 15473 /* Constants are never weak. */ 15474 if (CONSTANT_CLASS_P (base)) 15475 return true; 15476 15477 return false; 15478 } 15479 15480 case COND_EXPR: 15481 sub_strict_overflow_p = false; 15482 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 15483 &sub_strict_overflow_p) 15484 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2), 15485 &sub_strict_overflow_p)) 15486 { 15487 if (sub_strict_overflow_p) 15488 *strict_overflow_p = true; 15489 return true; 15490 } 15491 break; 15492 15493 default: 15494 break; 15495 } 15496 return false; 15497} 15498 15499/* Given the components of a binary expression CODE, TYPE, OP0 and OP1, 15500 attempt to fold the expression to a constant without modifying TYPE, 15501 OP0 or OP1. 15502 15503 If the expression could be simplified to a constant, then return 15504 the constant. If the expression would not be simplified to a 15505 constant, then return NULL_TREE. */ 15506 15507tree 15508fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1) 15509{ 15510 tree tem = fold_binary (code, type, op0, op1); 15511 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE; 15512} 15513 15514/* Given the components of a unary expression CODE, TYPE and OP0, 15515 attempt to fold the expression to a constant without modifying 15516 TYPE or OP0. 15517 15518 If the expression could be simplified to a constant, then return 15519 the constant. If the expression would not be simplified to a 15520 constant, then return NULL_TREE. */ 15521 15522tree 15523fold_unary_to_constant (enum tree_code code, tree type, tree op0) 15524{ 15525 tree tem = fold_unary (code, type, op0); 15526 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE; 15527} 15528 15529/* If EXP represents referencing an element in a constant string 15530 (either via pointer arithmetic or array indexing), return the 15531 tree representing the value accessed, otherwise return NULL. */ 15532 15533tree 15534fold_read_from_constant_string (tree exp) 15535{ 15536 if ((TREE_CODE (exp) == INDIRECT_REF 15537 || TREE_CODE (exp) == ARRAY_REF) 15538 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE) 15539 { 15540 tree exp1 = TREE_OPERAND (exp, 0); 15541 tree index; 15542 tree string; 15543 location_t loc = EXPR_LOCATION (exp); 15544 15545 if (TREE_CODE (exp) == INDIRECT_REF) 15546 string = string_constant (exp1, &index); 15547 else 15548 { 15549 tree low_bound = array_ref_low_bound (exp); 15550 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1)); 15551 15552 /* Optimize the special-case of a zero lower bound. 15553 15554 We convert the low_bound to sizetype to avoid some problems 15555 with constant folding. (E.g. suppose the lower bound is 1, 15556 and its mode is QI. Without the conversion,l (ARRAY 15557 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) 15558 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ 15559 if (! integer_zerop (low_bound)) 15560 index = size_diffop_loc (loc, index, 15561 fold_convert_loc (loc, sizetype, low_bound)); 15562 15563 string = exp1; 15564 } 15565 15566 if (string 15567 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string))) 15568 && TREE_CODE (string) == STRING_CST 15569 && TREE_CODE (index) == INTEGER_CST 15570 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0 15571 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) 15572 == MODE_INT) 15573 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1)) 15574 return build_int_cst_type (TREE_TYPE (exp), 15575 (TREE_STRING_POINTER (string) 15576 [TREE_INT_CST_LOW (index)])); 15577 } 15578 return NULL; 15579} 15580 15581/* Return the tree for neg (ARG0) when ARG0 is known to be either 15582 an integer constant, real, or fixed-point constant. 15583 15584 TYPE is the type of the result. */ 15585 15586static tree 15587fold_negate_const (tree arg0, tree type) 15588{ 15589 tree t = NULL_TREE; 15590 15591 switch (TREE_CODE (arg0)) 15592 { 15593 case INTEGER_CST: 15594 { 15595 bool overflow; 15596 wide_int val = wi::neg (arg0, &overflow); 15597 t = force_fit_type (type, val, 1, 15598 (overflow | TREE_OVERFLOW (arg0)) 15599 && !TYPE_UNSIGNED (type)); 15600 break; 15601 } 15602 15603 case REAL_CST: 15604 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0))); 15605 break; 15606 15607 case FIXED_CST: 15608 { 15609 FIXED_VALUE_TYPE f; 15610 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR, 15611 &(TREE_FIXED_CST (arg0)), NULL, 15612 TYPE_SATURATING (type)); 15613 t = build_fixed (type, f); 15614 /* Propagate overflow flags. */ 15615 if (overflow_p | TREE_OVERFLOW (arg0)) 15616 TREE_OVERFLOW (t) = 1; 15617 break; 15618 } 15619 15620 default: 15621 gcc_unreachable (); 15622 } 15623 15624 return t; 15625} 15626 15627/* Return the tree for abs (ARG0) when ARG0 is known to be either 15628 an integer constant or real constant. 15629 15630 TYPE is the type of the result. */ 15631 15632tree 15633fold_abs_const (tree arg0, tree type) 15634{ 15635 tree t = NULL_TREE; 15636 15637 switch (TREE_CODE (arg0)) 15638 { 15639 case INTEGER_CST: 15640 { 15641 /* If the value is unsigned or non-negative, then the absolute value 15642 is the same as the ordinary value. */ 15643 if (!wi::neg_p (arg0, TYPE_SIGN (type))) 15644 t = arg0; 15645 15646 /* If the value is negative, then the absolute value is 15647 its negation. */ 15648 else 15649 { 15650 bool overflow; 15651 wide_int val = wi::neg (arg0, &overflow); 15652 t = force_fit_type (type, val, -1, 15653 overflow | TREE_OVERFLOW (arg0)); 15654 } 15655 } 15656 break; 15657 15658 case REAL_CST: 15659 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))) 15660 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0))); 15661 else 15662 t = arg0; 15663 break; 15664 15665 default: 15666 gcc_unreachable (); 15667 } 15668 15669 return t; 15670} 15671 15672/* Return the tree for not (ARG0) when ARG0 is known to be an integer 15673 constant. TYPE is the type of the result. */ 15674 15675static tree 15676fold_not_const (const_tree arg0, tree type) 15677{ 15678 gcc_assert (TREE_CODE (arg0) == INTEGER_CST); 15679 15680 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0)); 15681} 15682 15683/* Given CODE, a relational operator, the target type, TYPE and two 15684 constant operands OP0 and OP1, return the result of the 15685 relational operation. If the result is not a compile time 15686 constant, then return NULL_TREE. */ 15687 15688static tree 15689fold_relational_const (enum tree_code code, tree type, tree op0, tree op1) 15690{ 15691 int result, invert; 15692 15693 /* From here on, the only cases we handle are when the result is 15694 known to be a constant. */ 15695 15696 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST) 15697 { 15698 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0); 15699 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1); 15700 15701 /* Handle the cases where either operand is a NaN. */ 15702 if (real_isnan (c0) || real_isnan (c1)) 15703 { 15704 switch (code) 15705 { 15706 case EQ_EXPR: 15707 case ORDERED_EXPR: 15708 result = 0; 15709 break; 15710 15711 case NE_EXPR: 15712 case UNORDERED_EXPR: 15713 case UNLT_EXPR: 15714 case UNLE_EXPR: 15715 case UNGT_EXPR: 15716 case UNGE_EXPR: 15717 case UNEQ_EXPR: 15718 result = 1; 15719 break; 15720 15721 case LT_EXPR: 15722 case LE_EXPR: 15723 case GT_EXPR: 15724 case GE_EXPR: 15725 case LTGT_EXPR: 15726 if (flag_trapping_math) 15727 return NULL_TREE; 15728 result = 0; 15729 break; 15730 15731 default: 15732 gcc_unreachable (); 15733 } 15734 15735 return constant_boolean_node (result, type); 15736 } 15737 15738 return constant_boolean_node (real_compare (code, c0, c1), type); 15739 } 15740 15741 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST) 15742 { 15743 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0); 15744 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1); 15745 return constant_boolean_node (fixed_compare (code, c0, c1), type); 15746 } 15747 15748 /* Handle equality/inequality of complex constants. */ 15749 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST) 15750 { 15751 tree rcond = fold_relational_const (code, type, 15752 TREE_REALPART (op0), 15753 TREE_REALPART (op1)); 15754 tree icond = fold_relational_const (code, type, 15755 TREE_IMAGPART (op0), 15756 TREE_IMAGPART (op1)); 15757 if (code == EQ_EXPR) 15758 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond); 15759 else if (code == NE_EXPR) 15760 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond); 15761 else 15762 return NULL_TREE; 15763 } 15764 15765 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST) 15766 { 15767 unsigned count = VECTOR_CST_NELTS (op0); 15768 tree *elts = XALLOCAVEC (tree, count); 15769 gcc_assert (VECTOR_CST_NELTS (op1) == count 15770 && TYPE_VECTOR_SUBPARTS (type) == count); 15771 15772 for (unsigned i = 0; i < count; i++) 15773 { 15774 tree elem_type = TREE_TYPE (type); 15775 tree elem0 = VECTOR_CST_ELT (op0, i); 15776 tree elem1 = VECTOR_CST_ELT (op1, i); 15777 15778 tree tem = fold_relational_const (code, elem_type, 15779 elem0, elem1); 15780 15781 if (tem == NULL_TREE) 15782 return NULL_TREE; 15783 15784 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1); 15785 } 15786 15787 return build_vector (type, elts); 15788 } 15789 15790 /* From here on we only handle LT, LE, GT, GE, EQ and NE. 15791 15792 To compute GT, swap the arguments and do LT. 15793 To compute GE, do LT and invert the result. 15794 To compute LE, swap the arguments, do LT and invert the result. 15795 To compute NE, do EQ and invert the result. 15796 15797 Therefore, the code below must handle only EQ and LT. */ 15798 15799 if (code == LE_EXPR || code == GT_EXPR) 15800 { 15801 tree tem = op0; 15802 op0 = op1; 15803 op1 = tem; 15804 code = swap_tree_comparison (code); 15805 } 15806 15807 /* Note that it is safe to invert for real values here because we 15808 have already handled the one case that it matters. */ 15809 15810 invert = 0; 15811 if (code == NE_EXPR || code == GE_EXPR) 15812 { 15813 invert = 1; 15814 code = invert_tree_comparison (code, false); 15815 } 15816 15817 /* Compute a result for LT or EQ if args permit; 15818 Otherwise return T. */ 15819 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST) 15820 { 15821 if (code == EQ_EXPR) 15822 result = tree_int_cst_equal (op0, op1); 15823 else 15824 result = tree_int_cst_lt (op0, op1); 15825 } 15826 else 15827 return NULL_TREE; 15828 15829 if (invert) 15830 result ^= 1; 15831 return constant_boolean_node (result, type); 15832} 15833 15834/* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the 15835 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR 15836 itself. */ 15837 15838tree 15839fold_build_cleanup_point_expr (tree type, tree expr) 15840{ 15841 /* If the expression does not have side effects then we don't have to wrap 15842 it with a cleanup point expression. */ 15843 if (!TREE_SIDE_EFFECTS (expr)) 15844 return expr; 15845 15846 /* If the expression is a return, check to see if the expression inside the 15847 return has no side effects or the right hand side of the modify expression 15848 inside the return. If either don't have side effects set we don't need to 15849 wrap the expression in a cleanup point expression. Note we don't check the 15850 left hand side of the modify because it should always be a return decl. */ 15851 if (TREE_CODE (expr) == RETURN_EXPR) 15852 { 15853 tree op = TREE_OPERAND (expr, 0); 15854 if (!op || !TREE_SIDE_EFFECTS (op)) 15855 return expr; 15856 op = TREE_OPERAND (op, 1); 15857 if (!TREE_SIDE_EFFECTS (op)) 15858 return expr; 15859 } 15860 15861 return build1 (CLEANUP_POINT_EXPR, type, expr); 15862} 15863 15864/* Given a pointer value OP0 and a type TYPE, return a simplified version 15865 of an indirection through OP0, or NULL_TREE if no simplification is 15866 possible. */ 15867 15868tree 15869fold_indirect_ref_1 (location_t loc, tree type, tree op0) 15870{ 15871 tree sub = op0; 15872 tree subtype; 15873 15874 STRIP_NOPS (sub); 15875 subtype = TREE_TYPE (sub); 15876 if (!POINTER_TYPE_P (subtype)) 15877 return NULL_TREE; 15878 15879 if (TREE_CODE (sub) == ADDR_EXPR) 15880 { 15881 tree op = TREE_OPERAND (sub, 0); 15882 tree optype = TREE_TYPE (op); 15883 /* *&CONST_DECL -> to the value of the const decl. */ 15884 if (TREE_CODE (op) == CONST_DECL) 15885 return DECL_INITIAL (op); 15886 /* *&p => p; make sure to handle *&"str"[cst] here. */ 15887 if (type == optype) 15888 { 15889 tree fop = fold_read_from_constant_string (op); 15890 if (fop) 15891 return fop; 15892 else 15893 return op; 15894 } 15895 /* *(foo *)&fooarray => fooarray[0] */ 15896 else if (TREE_CODE (optype) == ARRAY_TYPE 15897 && type == TREE_TYPE (optype) 15898 && (!in_gimple_form 15899 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)) 15900 { 15901 tree type_domain = TYPE_DOMAIN (optype); 15902 tree min_val = size_zero_node; 15903 if (type_domain && TYPE_MIN_VALUE (type_domain)) 15904 min_val = TYPE_MIN_VALUE (type_domain); 15905 if (in_gimple_form 15906 && TREE_CODE (min_val) != INTEGER_CST) 15907 return NULL_TREE; 15908 return build4_loc (loc, ARRAY_REF, type, op, min_val, 15909 NULL_TREE, NULL_TREE); 15910 } 15911 /* *(foo *)&complexfoo => __real__ complexfoo */ 15912 else if (TREE_CODE (optype) == COMPLEX_TYPE 15913 && type == TREE_TYPE (optype)) 15914 return fold_build1_loc (loc, REALPART_EXPR, type, op); 15915 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */ 15916 else if (TREE_CODE (optype) == VECTOR_TYPE 15917 && type == TREE_TYPE (optype)) 15918 { 15919 tree part_width = TYPE_SIZE (type); 15920 tree index = bitsize_int (0); 15921 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index); 15922 } 15923 } 15924 15925 if (TREE_CODE (sub) == POINTER_PLUS_EXPR 15926 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) 15927 { 15928 tree op00 = TREE_OPERAND (sub, 0); 15929 tree op01 = TREE_OPERAND (sub, 1); 15930 15931 STRIP_NOPS (op00); 15932 if (TREE_CODE (op00) == ADDR_EXPR) 15933 { 15934 tree op00type; 15935 op00 = TREE_OPERAND (op00, 0); 15936 op00type = TREE_TYPE (op00); 15937 15938 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */ 15939 if (TREE_CODE (op00type) == VECTOR_TYPE 15940 && type == TREE_TYPE (op00type)) 15941 { 15942 HOST_WIDE_INT offset = tree_to_shwi (op01); 15943 tree part_width = TYPE_SIZE (type); 15944 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT; 15945 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; 15946 tree index = bitsize_int (indexi); 15947 15948 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type)) 15949 return fold_build3_loc (loc, 15950 BIT_FIELD_REF, type, op00, 15951 part_width, index); 15952 15953 } 15954 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ 15955 else if (TREE_CODE (op00type) == COMPLEX_TYPE 15956 && type == TREE_TYPE (op00type)) 15957 { 15958 tree size = TYPE_SIZE_UNIT (type); 15959 if (tree_int_cst_equal (size, op01)) 15960 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00); 15961 } 15962 /* ((foo *)&fooarray)[1] => fooarray[1] */ 15963 else if (TREE_CODE (op00type) == ARRAY_TYPE 15964 && type == TREE_TYPE (op00type)) 15965 { 15966 tree type_domain = TYPE_DOMAIN (op00type); 15967 tree min_val = size_zero_node; 15968 if (type_domain && TYPE_MIN_VALUE (type_domain)) 15969 min_val = TYPE_MIN_VALUE (type_domain); 15970 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01, 15971 TYPE_SIZE_UNIT (type)); 15972 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val); 15973 return build4_loc (loc, ARRAY_REF, type, op00, op01, 15974 NULL_TREE, NULL_TREE); 15975 } 15976 } 15977 } 15978 15979 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ 15980 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE 15981 && type == TREE_TYPE (TREE_TYPE (subtype)) 15982 && (!in_gimple_form 15983 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)) 15984 { 15985 tree type_domain; 15986 tree min_val = size_zero_node; 15987 sub = build_fold_indirect_ref_loc (loc, sub); 15988 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); 15989 if (type_domain && TYPE_MIN_VALUE (type_domain)) 15990 min_val = TYPE_MIN_VALUE (type_domain); 15991 if (in_gimple_form 15992 && TREE_CODE (min_val) != INTEGER_CST) 15993 return NULL_TREE; 15994 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE, 15995 NULL_TREE); 15996 } 15997 15998 return NULL_TREE; 15999} 16000 16001/* Builds an expression for an indirection through T, simplifying some 16002 cases. */ 16003 16004tree 16005build_fold_indirect_ref_loc (location_t loc, tree t) 16006{ 16007 tree type = TREE_TYPE (TREE_TYPE (t)); 16008 tree sub = fold_indirect_ref_1 (loc, type, t); 16009 16010 if (sub) 16011 return sub; 16012 16013 return build1_loc (loc, INDIRECT_REF, type, t); 16014} 16015 16016/* Given an INDIRECT_REF T, return either T or a simplified version. */ 16017 16018tree 16019fold_indirect_ref_loc (location_t loc, tree t) 16020{ 16021 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0)); 16022 16023 if (sub) 16024 return sub; 16025 else 16026 return t; 16027} 16028 16029/* Strip non-trapping, non-side-effecting tree nodes from an expression 16030 whose result is ignored. The type of the returned tree need not be 16031 the same as the original expression. */ 16032 16033tree 16034fold_ignored_result (tree t) 16035{ 16036 if (!TREE_SIDE_EFFECTS (t)) 16037 return integer_zero_node; 16038 16039 for (;;) 16040 switch (TREE_CODE_CLASS (TREE_CODE (t))) 16041 { 16042 case tcc_unary: 16043 t = TREE_OPERAND (t, 0); 16044 break; 16045 16046 case tcc_binary: 16047 case tcc_comparison: 16048 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))) 16049 t = TREE_OPERAND (t, 0); 16050 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))) 16051 t = TREE_OPERAND (t, 1); 16052 else 16053 return t; 16054 break; 16055 16056 case tcc_expression: 16057 switch (TREE_CODE (t)) 16058 { 16059 case COMPOUND_EXPR: 16060 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))) 16061 return t; 16062 t = TREE_OPERAND (t, 0); 16063 break; 16064 16065 case COND_EXPR: 16066 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)) 16067 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2))) 16068 return t; 16069 t = TREE_OPERAND (t, 0); 16070 break; 16071 16072 default: 16073 return t; 16074 } 16075 break; 16076 16077 default: 16078 return t; 16079 } 16080} 16081 16082/* Return the value of VALUE, rounded up to a multiple of DIVISOR. */ 16083 16084tree 16085round_up_loc (location_t loc, tree value, unsigned int divisor) 16086{ 16087 tree div = NULL_TREE; 16088 16089 if (divisor == 1) 16090 return value; 16091 16092 /* See if VALUE is already a multiple of DIVISOR. If so, we don't 16093 have to do anything. Only do this when we are not given a const, 16094 because in that case, this check is more expensive than just 16095 doing it. */ 16096 if (TREE_CODE (value) != INTEGER_CST) 16097 { 16098 div = build_int_cst (TREE_TYPE (value), divisor); 16099 16100 if (multiple_of_p (TREE_TYPE (value), value, div)) 16101 return value; 16102 } 16103 16104 /* If divisor is a power of two, simplify this to bit manipulation. */ 16105 if (divisor == (divisor & -divisor)) 16106 { 16107 if (TREE_CODE (value) == INTEGER_CST) 16108 { 16109 wide_int val = value; 16110 bool overflow_p; 16111 16112 if ((val & (divisor - 1)) == 0) 16113 return value; 16114 16115 overflow_p = TREE_OVERFLOW (value); 16116 val += divisor - 1; 16117 val &= - (int) divisor; 16118 if (val == 0) 16119 overflow_p = true; 16120 16121 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p); 16122 } 16123 else 16124 { 16125 tree t; 16126 16127 t = build_int_cst (TREE_TYPE (value), divisor - 1); 16128 value = size_binop_loc (loc, PLUS_EXPR, value, t); 16129 t = build_int_cst (TREE_TYPE (value), - (int) divisor); 16130 value = size_binop_loc (loc, BIT_AND_EXPR, value, t); 16131 } 16132 } 16133 else 16134 { 16135 if (!div) 16136 div = build_int_cst (TREE_TYPE (value), divisor); 16137 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div); 16138 value = size_binop_loc (loc, MULT_EXPR, value, div); 16139 } 16140 16141 return value; 16142} 16143 16144/* Likewise, but round down. */ 16145 16146tree 16147round_down_loc (location_t loc, tree value, int divisor) 16148{ 16149 tree div = NULL_TREE; 16150 16151 gcc_assert (divisor > 0); 16152 if (divisor == 1) 16153 return value; 16154 16155 /* See if VALUE is already a multiple of DIVISOR. If so, we don't 16156 have to do anything. Only do this when we are not given a const, 16157 because in that case, this check is more expensive than just 16158 doing it. */ 16159 if (TREE_CODE (value) != INTEGER_CST) 16160 { 16161 div = build_int_cst (TREE_TYPE (value), divisor); 16162 16163 if (multiple_of_p (TREE_TYPE (value), value, div)) 16164 return value; 16165 } 16166 16167 /* If divisor is a power of two, simplify this to bit manipulation. */ 16168 if (divisor == (divisor & -divisor)) 16169 { 16170 tree t; 16171 16172 t = build_int_cst (TREE_TYPE (value), -divisor); 16173 value = size_binop_loc (loc, BIT_AND_EXPR, value, t); 16174 } 16175 else 16176 { 16177 if (!div) 16178 div = build_int_cst (TREE_TYPE (value), divisor); 16179 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div); 16180 value = size_binop_loc (loc, MULT_EXPR, value, div); 16181 } 16182 16183 return value; 16184} 16185 16186/* Returns the pointer to the base of the object addressed by EXP and 16187 extracts the information about the offset of the access, storing it 16188 to PBITPOS and POFFSET. */ 16189 16190static tree 16191split_address_to_core_and_offset (tree exp, 16192 HOST_WIDE_INT *pbitpos, tree *poffset) 16193{ 16194 tree core; 16195 machine_mode mode; 16196 int unsignedp, volatilep; 16197 HOST_WIDE_INT bitsize; 16198 location_t loc = EXPR_LOCATION (exp); 16199 16200 if (TREE_CODE (exp) == ADDR_EXPR) 16201 { 16202 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos, 16203 poffset, &mode, &unsignedp, &volatilep, 16204 false); 16205 core = build_fold_addr_expr_loc (loc, core); 16206 } 16207 else 16208 { 16209 core = exp; 16210 *pbitpos = 0; 16211 *poffset = NULL_TREE; 16212 } 16213 16214 return core; 16215} 16216 16217/* Returns true if addresses of E1 and E2 differ by a constant, false 16218 otherwise. If they do, E1 - E2 is stored in *DIFF. */ 16219 16220bool 16221ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff) 16222{ 16223 tree core1, core2; 16224 HOST_WIDE_INT bitpos1, bitpos2; 16225 tree toffset1, toffset2, tdiff, type; 16226 16227 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1); 16228 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2); 16229 16230 if (bitpos1 % BITS_PER_UNIT != 0 16231 || bitpos2 % BITS_PER_UNIT != 0 16232 || !operand_equal_p (core1, core2, 0)) 16233 return false; 16234 16235 if (toffset1 && toffset2) 16236 { 16237 type = TREE_TYPE (toffset1); 16238 if (type != TREE_TYPE (toffset2)) 16239 toffset2 = fold_convert (type, toffset2); 16240 16241 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2); 16242 if (!cst_and_fits_in_hwi (tdiff)) 16243 return false; 16244 16245 *diff = int_cst_value (tdiff); 16246 } 16247 else if (toffset1 || toffset2) 16248 { 16249 /* If only one of the offsets is non-constant, the difference cannot 16250 be a constant. */ 16251 return false; 16252 } 16253 else 16254 *diff = 0; 16255 16256 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT; 16257 return true; 16258} 16259 16260/* Simplify the floating point expression EXP when the sign of the 16261 result is not significant. Return NULL_TREE if no simplification 16262 is possible. */ 16263 16264tree 16265fold_strip_sign_ops (tree exp) 16266{ 16267 tree arg0, arg1; 16268 location_t loc = EXPR_LOCATION (exp); 16269 16270 switch (TREE_CODE (exp)) 16271 { 16272 case ABS_EXPR: 16273 case NEGATE_EXPR: 16274 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0)); 16275 return arg0 ? arg0 : TREE_OPERAND (exp, 0); 16276 16277 case MULT_EXPR: 16278 case RDIV_EXPR: 16279 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp))) 16280 return NULL_TREE; 16281 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0)); 16282 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); 16283 if (arg0 != NULL_TREE || arg1 != NULL_TREE) 16284 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp), 16285 arg0 ? arg0 : TREE_OPERAND (exp, 0), 16286 arg1 ? arg1 : TREE_OPERAND (exp, 1)); 16287 break; 16288 16289 case COMPOUND_EXPR: 16290 arg0 = TREE_OPERAND (exp, 0); 16291 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); 16292 if (arg1) 16293 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1); 16294 break; 16295 16296 case COND_EXPR: 16297 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); 16298 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2)); 16299 if (arg0 || arg1) 16300 return fold_build3_loc (loc, 16301 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0), 16302 arg0 ? arg0 : TREE_OPERAND (exp, 1), 16303 arg1 ? arg1 : TREE_OPERAND (exp, 2)); 16304 break; 16305 16306 case CALL_EXPR: 16307 { 16308 const enum built_in_function fcode = builtin_mathfn_code (exp); 16309 switch (fcode) 16310 { 16311 CASE_FLT_FN (BUILT_IN_COPYSIGN): 16312 /* Strip copysign function call, return the 1st argument. */ 16313 arg0 = CALL_EXPR_ARG (exp, 0); 16314 arg1 = CALL_EXPR_ARG (exp, 1); 16315 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1); 16316 16317 default: 16318 /* Strip sign ops from the argument of "odd" math functions. */ 16319 if (negate_mathfn_p (fcode)) 16320 { 16321 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0)); 16322 if (arg0) 16323 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0); 16324 } 16325 break; 16326 } 16327 } 16328 break; 16329 16330 default: 16331 break; 16332 } 16333 return NULL_TREE; 16334} 16335 16336/* Return OFF converted to a pointer offset type suitable as offset for 16337 POINTER_PLUS_EXPR. Use location LOC for this conversion. */ 16338tree 16339convert_to_ptrofftype_loc (location_t loc, tree off) 16340{ 16341 return fold_convert_loc (loc, sizetype, off); 16342} 16343 16344/* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */ 16345tree 16346fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off) 16347{ 16348 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr), 16349 ptr, convert_to_ptrofftype_loc (loc, off)); 16350} 16351 16352/* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */ 16353tree 16354fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off) 16355{ 16356 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr), 16357 ptr, size_int (off)); 16358} 16359