expr.c revision 259563
1/* Convert tree expression to rtl instructions, for GNU compiler. 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, 4 Inc. 5 6This file is part of GCC. 7 8GCC is free software; you can redistribute it and/or modify it under 9the terms of the GNU General Public License as published by the Free 10Software Foundation; either version 2, or (at your option) any later 11version. 12 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14WARRANTY; without even the implied warranty of MERCHANTABILITY or 15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16for more details. 17 18You should have received a copy of the GNU General Public License 19along with GCC; see the file COPYING. If not, write to the Free 20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 2102110-1301, USA. */ 22 23#include "config.h" 24#include "system.h" 25#include "coretypes.h" 26#include "tm.h" 27#include "machmode.h" 28#include "real.h" 29#include "rtl.h" 30#include "tree.h" 31#include "flags.h" 32#include "regs.h" 33#include "hard-reg-set.h" 34#include "except.h" 35#include "function.h" 36#include "insn-config.h" 37#include "insn-attr.h" 38/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ 39#include "expr.h" 40#include "optabs.h" 41#include "libfuncs.h" 42#include "recog.h" 43#include "reload.h" 44#include "output.h" 45#include "typeclass.h" 46#include "toplev.h" 47#include "ggc.h" 48#include "langhooks.h" 49#include "intl.h" 50#include "tm_p.h" 51#include "tree-iterator.h" 52#include "tree-pass.h" 53#include "tree-flow.h" 54#include "target.h" 55#include "timevar.h" 56 57/* Decide whether a function's arguments should be processed 58 from first to last or from last to first. 59 60 They should if the stack and args grow in opposite directions, but 61 only if we have push insns. */ 62 63#ifdef PUSH_ROUNDING 64 65#ifndef PUSH_ARGS_REVERSED 66#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) 67#define PUSH_ARGS_REVERSED /* If it's last to first. */ 68#endif 69#endif 70 71#endif 72 73#ifndef STACK_PUSH_CODE 74#ifdef STACK_GROWS_DOWNWARD 75#define STACK_PUSH_CODE PRE_DEC 76#else 77#define STACK_PUSH_CODE PRE_INC 78#endif 79#endif 80 81 82/* If this is nonzero, we do not bother generating VOLATILE 83 around volatile memory references, and we are willing to 84 output indirect addresses. If cse is to follow, we reject 85 indirect addresses so a useful potential cse is generated; 86 if it is used only once, instruction combination will produce 87 the same indirect address eventually. */ 88int cse_not_expected; 89 90/* This structure is used by move_by_pieces to describe the move to 91 be performed. */ 92struct move_by_pieces 93{ 94 rtx to; 95 rtx to_addr; 96 int autinc_to; 97 int explicit_inc_to; 98 rtx from; 99 rtx from_addr; 100 int autinc_from; 101 int explicit_inc_from; 102 unsigned HOST_WIDE_INT len; 103 HOST_WIDE_INT offset; 104 int reverse; 105}; 106 107/* This structure is used by store_by_pieces to describe the clear to 108 be performed. */ 109 110struct store_by_pieces 111{ 112 rtx to; 113 rtx to_addr; 114 int autinc_to; 115 int explicit_inc_to; 116 unsigned HOST_WIDE_INT len; 117 HOST_WIDE_INT offset; 118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode); 119 void *constfundata; 120 int reverse; 121}; 122 123static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT, 124 unsigned int, 125 unsigned int); 126static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode, 127 struct move_by_pieces *); 128static bool block_move_libcall_safe_for_call_parm (void); 129static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned); 130static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool); 131static tree emit_block_move_libcall_fn (int); 132static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); 133static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode); 134static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); 135static void store_by_pieces_1 (struct store_by_pieces *, unsigned int); 136static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode, 137 struct store_by_pieces *); 138static rtx clear_storage_via_libcall (rtx, rtx, bool); 139static tree clear_storage_libcall_fn (int); 140static rtx compress_float_constant (rtx, rtx); 141static rtx get_subtarget (rtx); 142static void store_constructor_field (rtx, unsigned HOST_WIDE_INT, 143 HOST_WIDE_INT, enum machine_mode, 144 tree, tree, int, int); 145static void store_constructor (tree, rtx, int, HOST_WIDE_INT); 146static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode, 147 tree, tree, int); 148 149static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree); 150 151static int is_aligning_offset (tree, tree); 152static void expand_operands (tree, tree, rtx, rtx*, rtx*, 153 enum expand_modifier); 154static rtx reduce_to_bit_field_precision (rtx, rtx, tree); 155static rtx do_store_flag (tree, rtx, enum machine_mode, int); 156#ifdef PUSH_ROUNDING 157static void emit_single_push_insn (enum machine_mode, rtx, tree); 158#endif 159static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx); 160static rtx const_vector_from_tree (tree); 161static void write_complex_part (rtx, rtx, bool); 162 163/* Record for each mode whether we can move a register directly to or 164 from an object of that mode in memory. If we can't, we won't try 165 to use that mode directly when accessing a field of that mode. */ 166 167static char direct_load[NUM_MACHINE_MODES]; 168static char direct_store[NUM_MACHINE_MODES]; 169 170/* Record for each mode whether we can float-extend from memory. */ 171 172static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; 173 174/* This macro is used to determine whether move_by_pieces should be called 175 to perform a structure copy. */ 176#ifndef MOVE_BY_PIECES_P 177#define MOVE_BY_PIECES_P(SIZE, ALIGN) \ 178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \ 179 < (unsigned int) MOVE_RATIO) 180#endif 181 182/* This macro is used to determine whether clear_by_pieces should be 183 called to clear storage. */ 184#ifndef CLEAR_BY_PIECES_P 185#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \ 186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ 187 < (unsigned int) CLEAR_RATIO) 188#endif 189 190/* This macro is used to determine whether store_by_pieces should be 191 called to "memset" storage with byte values other than zero, or 192 to "memcpy" storage when the source is a constant string. */ 193#ifndef STORE_BY_PIECES_P 194#define STORE_BY_PIECES_P(SIZE, ALIGN) \ 195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ 196 < (unsigned int) MOVE_RATIO) 197#endif 198 199/* This array records the insn_code of insns to perform block moves. */ 200enum insn_code movmem_optab[NUM_MACHINE_MODES]; 201 202/* This array records the insn_code of insns to perform block sets. */ 203enum insn_code setmem_optab[NUM_MACHINE_MODES]; 204 205/* These arrays record the insn_code of three different kinds of insns 206 to perform block compares. */ 207enum insn_code cmpstr_optab[NUM_MACHINE_MODES]; 208enum insn_code cmpstrn_optab[NUM_MACHINE_MODES]; 209enum insn_code cmpmem_optab[NUM_MACHINE_MODES]; 210 211/* Synchronization primitives. */ 212enum insn_code sync_add_optab[NUM_MACHINE_MODES]; 213enum insn_code sync_sub_optab[NUM_MACHINE_MODES]; 214enum insn_code sync_ior_optab[NUM_MACHINE_MODES]; 215enum insn_code sync_and_optab[NUM_MACHINE_MODES]; 216enum insn_code sync_xor_optab[NUM_MACHINE_MODES]; 217enum insn_code sync_nand_optab[NUM_MACHINE_MODES]; 218enum insn_code sync_old_add_optab[NUM_MACHINE_MODES]; 219enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES]; 220enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES]; 221enum insn_code sync_old_and_optab[NUM_MACHINE_MODES]; 222enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES]; 223enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES]; 224enum insn_code sync_new_add_optab[NUM_MACHINE_MODES]; 225enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES]; 226enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES]; 227enum insn_code sync_new_and_optab[NUM_MACHINE_MODES]; 228enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES]; 229enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES]; 230enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES]; 231enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES]; 232enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES]; 233enum insn_code sync_lock_release[NUM_MACHINE_MODES]; 234 235/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */ 236 237#ifndef SLOW_UNALIGNED_ACCESS 238#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT 239#endif 240 241/* This is run once per compilation to set up which modes can be used 242 directly in memory and to initialize the block move optab. */ 243 244void 245init_expr_once (void) 246{ 247 rtx insn, pat; 248 enum machine_mode mode; 249 int num_clobbers; 250 rtx mem, mem1; 251 rtx reg; 252 253 /* Try indexing by frame ptr and try by stack ptr. 254 It is known that on the Convex the stack ptr isn't a valid index. 255 With luck, one or the other is valid on any machine. */ 256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx); 257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx); 258 259 /* A scratch register we can modify in-place below to avoid 260 useless RTL allocations. */ 261 reg = gen_rtx_REG (VOIDmode, -1); 262 263 insn = rtx_alloc (INSN); 264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX); 265 PATTERN (insn) = pat; 266 267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES; 268 mode = (enum machine_mode) ((int) mode + 1)) 269 { 270 int regno; 271 272 direct_load[(int) mode] = direct_store[(int) mode] = 0; 273 PUT_MODE (mem, mode); 274 PUT_MODE (mem1, mode); 275 PUT_MODE (reg, mode); 276 277 /* See if there is some register that can be used in this mode and 278 directly loaded or stored from memory. */ 279 280 if (mode != VOIDmode && mode != BLKmode) 281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER 282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0); 283 regno++) 284 { 285 if (! HARD_REGNO_MODE_OK (regno, mode)) 286 continue; 287 288 REGNO (reg) = regno; 289 290 SET_SRC (pat) = mem; 291 SET_DEST (pat) = reg; 292 if (recog (pat, insn, &num_clobbers) >= 0) 293 direct_load[(int) mode] = 1; 294 295 SET_SRC (pat) = mem1; 296 SET_DEST (pat) = reg; 297 if (recog (pat, insn, &num_clobbers) >= 0) 298 direct_load[(int) mode] = 1; 299 300 SET_SRC (pat) = reg; 301 SET_DEST (pat) = mem; 302 if (recog (pat, insn, &num_clobbers) >= 0) 303 direct_store[(int) mode] = 1; 304 305 SET_SRC (pat) = reg; 306 SET_DEST (pat) = mem1; 307 if (recog (pat, insn, &num_clobbers) >= 0) 308 direct_store[(int) mode] = 1; 309 } 310 } 311 312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000)); 313 314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode; 315 mode = GET_MODE_WIDER_MODE (mode)) 316 { 317 enum machine_mode srcmode; 318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode; 319 srcmode = GET_MODE_WIDER_MODE (srcmode)) 320 { 321 enum insn_code ic; 322 323 ic = can_extend_p (mode, srcmode, 0); 324 if (ic == CODE_FOR_nothing) 325 continue; 326 327 PUT_MODE (mem, srcmode); 328 329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode)) 330 float_extend_from_mem[mode][srcmode] = true; 331 } 332 } 333} 334 335/* This is run at the start of compiling a function. */ 336 337void 338init_expr (void) 339{ 340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status)); 341} 342 343/* Copy data from FROM to TO, where the machine modes are not the same. 344 Both modes may be integer, or both may be floating. 345 UNSIGNEDP should be nonzero if FROM is an unsigned type. 346 This causes zero-extension instead of sign-extension. */ 347 348void 349convert_move (rtx to, rtx from, int unsignedp) 350{ 351 enum machine_mode to_mode = GET_MODE (to); 352 enum machine_mode from_mode = GET_MODE (from); 353 int to_real = SCALAR_FLOAT_MODE_P (to_mode); 354 int from_real = SCALAR_FLOAT_MODE_P (from_mode); 355 enum insn_code code; 356 rtx libcall; 357 358 /* rtx code for making an equivalent value. */ 359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN 360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND)); 361 362 363 gcc_assert (to_real == from_real); 364 gcc_assert (to_mode != BLKmode); 365 gcc_assert (from_mode != BLKmode); 366 367 /* If the source and destination are already the same, then there's 368 nothing to do. */ 369 if (to == from) 370 return; 371 372 /* If FROM is a SUBREG that indicates that we have already done at least 373 the required extension, strip it. We don't handle such SUBREGs as 374 TO here. */ 375 376 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from) 377 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from))) 378 >= GET_MODE_SIZE (to_mode)) 379 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp) 380 from = gen_lowpart (to_mode, from), from_mode = to_mode; 381 382 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to)); 383 384 if (to_mode == from_mode 385 || (from_mode == VOIDmode && CONSTANT_P (from))) 386 { 387 emit_move_insn (to, from); 388 return; 389 } 390 391 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) 392 { 393 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode)); 394 395 if (VECTOR_MODE_P (to_mode)) 396 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); 397 else 398 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0); 399 400 emit_move_insn (to, from); 401 return; 402 } 403 404 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT) 405 { 406 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp); 407 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp); 408 return; 409 } 410 411 if (to_real) 412 { 413 rtx value, insns; 414 convert_optab tab; 415 416 gcc_assert ((GET_MODE_PRECISION (from_mode) 417 != GET_MODE_PRECISION (to_mode)) 418 || (DECIMAL_FLOAT_MODE_P (from_mode) 419 != DECIMAL_FLOAT_MODE_P (to_mode))); 420 421 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode)) 422 /* Conversion between decimal float and binary float, same size. */ 423 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab; 424 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)) 425 tab = sext_optab; 426 else 427 tab = trunc_optab; 428 429 /* Try converting directly if the insn is supported. */ 430 431 code = tab->handlers[to_mode][from_mode].insn_code; 432 if (code != CODE_FOR_nothing) 433 { 434 emit_unop_insn (code, to, from, 435 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE); 436 return; 437 } 438 439 /* Otherwise use a libcall. */ 440 libcall = tab->handlers[to_mode][from_mode].libfunc; 441 442 /* Is this conversion implemented yet? */ 443 gcc_assert (libcall); 444 445 start_sequence (); 446 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, 447 1, from, from_mode); 448 insns = get_insns (); 449 end_sequence (); 450 emit_libcall_block (insns, to, value, 451 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode, 452 from) 453 : gen_rtx_FLOAT_EXTEND (to_mode, from)); 454 return; 455 } 456 457 /* Handle pointer conversion. */ /* SPEE 900220. */ 458 /* Targets are expected to provide conversion insns between PxImode and 459 xImode for all MODE_PARTIAL_INT modes they use, but no others. */ 460 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT) 461 { 462 enum machine_mode full_mode 463 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT); 464 465 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code 466 != CODE_FOR_nothing); 467 468 if (full_mode != from_mode) 469 from = convert_to_mode (full_mode, from, unsignedp); 470 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code, 471 to, from, UNKNOWN); 472 return; 473 } 474 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT) 475 { 476 rtx new_from; 477 enum machine_mode full_mode 478 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT); 479 480 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code 481 != CODE_FOR_nothing); 482 483 if (to_mode == full_mode) 484 { 485 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, 486 to, from, UNKNOWN); 487 return; 488 } 489 490 new_from = gen_reg_rtx (full_mode); 491 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, 492 new_from, from, UNKNOWN); 493 494 /* else proceed to integer conversions below. */ 495 from_mode = full_mode; 496 from = new_from; 497 } 498 499 /* Now both modes are integers. */ 500 501 /* Handle expanding beyond a word. */ 502 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode) 503 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD) 504 { 505 rtx insns; 506 rtx lowpart; 507 rtx fill_value; 508 rtx lowfrom; 509 int i; 510 enum machine_mode lowpart_mode; 511 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD); 512 513 /* Try converting directly if the insn is supported. */ 514 if ((code = can_extend_p (to_mode, from_mode, unsignedp)) 515 != CODE_FOR_nothing) 516 { 517 /* If FROM is a SUBREG, put it into a register. Do this 518 so that we always generate the same set of insns for 519 better cse'ing; if an intermediate assignment occurred, 520 we won't be doing the operation directly on the SUBREG. */ 521 if (optimize > 0 && GET_CODE (from) == SUBREG) 522 from = force_reg (from_mode, from); 523 emit_unop_insn (code, to, from, equiv_code); 524 return; 525 } 526 /* Next, try converting via full word. */ 527 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD 528 && ((code = can_extend_p (to_mode, word_mode, unsignedp)) 529 != CODE_FOR_nothing)) 530 { 531 if (REG_P (to)) 532 { 533 if (reg_overlap_mentioned_p (to, from)) 534 from = force_reg (from_mode, from); 535 emit_insn (gen_rtx_CLOBBER (VOIDmode, to)); 536 } 537 convert_move (gen_lowpart (word_mode, to), from, unsignedp); 538 emit_unop_insn (code, to, 539 gen_lowpart (word_mode, to), equiv_code); 540 return; 541 } 542 543 /* No special multiword conversion insn; do it by hand. */ 544 start_sequence (); 545 546 /* Since we will turn this into a no conflict block, we must ensure 547 that the source does not overlap the target. */ 548 549 if (reg_overlap_mentioned_p (to, from)) 550 from = force_reg (from_mode, from); 551 552 /* Get a copy of FROM widened to a word, if necessary. */ 553 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD) 554 lowpart_mode = word_mode; 555 else 556 lowpart_mode = from_mode; 557 558 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp); 559 560 lowpart = gen_lowpart (lowpart_mode, to); 561 emit_move_insn (lowpart, lowfrom); 562 563 /* Compute the value to put in each remaining word. */ 564 if (unsignedp) 565 fill_value = const0_rtx; 566 else 567 { 568#ifdef HAVE_slt 569 if (HAVE_slt 570 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode 571 && STORE_FLAG_VALUE == -1) 572 { 573 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX, 574 lowpart_mode, 0); 575 fill_value = gen_reg_rtx (word_mode); 576 emit_insn (gen_slt (fill_value)); 577 } 578 else 579#endif 580 { 581 fill_value 582 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom, 583 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1), 584 NULL_RTX, 0); 585 fill_value = convert_to_mode (word_mode, fill_value, 1); 586 } 587 } 588 589 /* Fill the remaining words. */ 590 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++) 591 { 592 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i); 593 rtx subword = operand_subword (to, index, 1, to_mode); 594 595 gcc_assert (subword); 596 597 if (fill_value != subword) 598 emit_move_insn (subword, fill_value); 599 } 600 601 insns = get_insns (); 602 end_sequence (); 603 604 emit_no_conflict_block (insns, to, from, NULL_RTX, 605 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from))); 606 return; 607 } 608 609 /* Truncating multi-word to a word or less. */ 610 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD 611 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD) 612 { 613 if (!((MEM_P (from) 614 && ! MEM_VOLATILE_P (from) 615 && direct_load[(int) to_mode] 616 && ! mode_dependent_address_p (XEXP (from, 0))) 617 || REG_P (from) 618 || GET_CODE (from) == SUBREG)) 619 from = force_reg (from_mode, from); 620 convert_move (to, gen_lowpart (word_mode, from), 0); 621 return; 622 } 623 624 /* Now follow all the conversions between integers 625 no more than a word long. */ 626 627 /* For truncation, usually we can just refer to FROM in a narrower mode. */ 628 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode) 629 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), 630 GET_MODE_BITSIZE (from_mode))) 631 { 632 if (!((MEM_P (from) 633 && ! MEM_VOLATILE_P (from) 634 && direct_load[(int) to_mode] 635 && ! mode_dependent_address_p (XEXP (from, 0))) 636 || REG_P (from) 637 || GET_CODE (from) == SUBREG)) 638 from = force_reg (from_mode, from); 639 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER 640 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode)) 641 from = copy_to_reg (from); 642 emit_move_insn (to, gen_lowpart (to_mode, from)); 643 return; 644 } 645 646 /* Handle extension. */ 647 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode)) 648 { 649 /* Convert directly if that works. */ 650 if ((code = can_extend_p (to_mode, from_mode, unsignedp)) 651 != CODE_FOR_nothing) 652 { 653 emit_unop_insn (code, to, from, equiv_code); 654 return; 655 } 656 else 657 { 658 enum machine_mode intermediate; 659 rtx tmp; 660 tree shift_amount; 661 662 /* Search for a mode to convert via. */ 663 for (intermediate = from_mode; intermediate != VOIDmode; 664 intermediate = GET_MODE_WIDER_MODE (intermediate)) 665 if (((can_extend_p (to_mode, intermediate, unsignedp) 666 != CODE_FOR_nothing) 667 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate) 668 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), 669 GET_MODE_BITSIZE (intermediate)))) 670 && (can_extend_p (intermediate, from_mode, unsignedp) 671 != CODE_FOR_nothing)) 672 { 673 convert_move (to, convert_to_mode (intermediate, from, 674 unsignedp), unsignedp); 675 return; 676 } 677 678 /* No suitable intermediate mode. 679 Generate what we need with shifts. */ 680 shift_amount = build_int_cst (NULL_TREE, 681 GET_MODE_BITSIZE (to_mode) 682 - GET_MODE_BITSIZE (from_mode)); 683 from = gen_lowpart (to_mode, force_reg (from_mode, from)); 684 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount, 685 to, unsignedp); 686 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount, 687 to, unsignedp); 688 if (tmp != to) 689 emit_move_insn (to, tmp); 690 return; 691 } 692 } 693 694 /* Support special truncate insns for certain modes. */ 695 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing) 696 { 697 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code, 698 to, from, UNKNOWN); 699 return; 700 } 701 702 /* Handle truncation of volatile memrefs, and so on; 703 the things that couldn't be truncated directly, 704 and for which there was no special instruction. 705 706 ??? Code above formerly short-circuited this, for most integer 707 mode pairs, with a force_reg in from_mode followed by a recursive 708 call to this routine. Appears always to have been wrong. */ 709 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)) 710 { 711 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from)); 712 emit_move_insn (to, temp); 713 return; 714 } 715 716 /* Mode combination is not recognized. */ 717 gcc_unreachable (); 718} 719 720/* Return an rtx for a value that would result 721 from converting X to mode MODE. 722 Both X and MODE may be floating, or both integer. 723 UNSIGNEDP is nonzero if X is an unsigned value. 724 This can be done by referring to a part of X in place 725 or by copying to a new temporary with conversion. */ 726 727rtx 728convert_to_mode (enum machine_mode mode, rtx x, int unsignedp) 729{ 730 return convert_modes (mode, VOIDmode, x, unsignedp); 731} 732 733/* Return an rtx for a value that would result 734 from converting X from mode OLDMODE to mode MODE. 735 Both modes may be floating, or both integer. 736 UNSIGNEDP is nonzero if X is an unsigned value. 737 738 This can be done by referring to a part of X in place 739 or by copying to a new temporary with conversion. 740 741 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */ 742 743rtx 744convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp) 745{ 746 rtx temp; 747 748 /* If FROM is a SUBREG that indicates that we have already done at least 749 the required extension, strip it. */ 750 751 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) 752 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode) 753 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp) 754 x = gen_lowpart (mode, x); 755 756 if (GET_MODE (x) != VOIDmode) 757 oldmode = GET_MODE (x); 758 759 if (mode == oldmode) 760 return x; 761 762 /* There is one case that we must handle specially: If we are converting 763 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and 764 we are to interpret the constant as unsigned, gen_lowpart will do 765 the wrong if the constant appears negative. What we want to do is 766 make the high-order word of the constant zero, not all ones. */ 767 768 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT 769 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT 770 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0) 771 { 772 HOST_WIDE_INT val = INTVAL (x); 773 774 if (oldmode != VOIDmode 775 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode)) 776 { 777 int width = GET_MODE_BITSIZE (oldmode); 778 779 /* We need to zero extend VAL. */ 780 val &= ((HOST_WIDE_INT) 1 << width) - 1; 781 } 782 783 return immed_double_const (val, (HOST_WIDE_INT) 0, mode); 784 } 785 786 /* We can do this with a gen_lowpart if both desired and current modes 787 are integer, and this is either a constant integer, a register, or a 788 non-volatile MEM. Except for the constant case where MODE is no 789 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */ 790 791 if ((GET_CODE (x) == CONST_INT 792 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) 793 || (GET_MODE_CLASS (mode) == MODE_INT 794 && GET_MODE_CLASS (oldmode) == MODE_INT 795 && (GET_CODE (x) == CONST_DOUBLE 796 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode) 797 && ((MEM_P (x) && ! MEM_VOLATILE_P (x) 798 && direct_load[(int) mode]) 799 || (REG_P (x) 800 && (! HARD_REGISTER_P (x) 801 || HARD_REGNO_MODE_OK (REGNO (x), mode)) 802 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), 803 GET_MODE_BITSIZE (GET_MODE (x))))))))) 804 { 805 /* ?? If we don't know OLDMODE, we have to assume here that 806 X does not need sign- or zero-extension. This may not be 807 the case, but it's the best we can do. */ 808 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode 809 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode)) 810 { 811 HOST_WIDE_INT val = INTVAL (x); 812 int width = GET_MODE_BITSIZE (oldmode); 813 814 /* We must sign or zero-extend in this case. Start by 815 zero-extending, then sign extend if we need to. */ 816 val &= ((HOST_WIDE_INT) 1 << width) - 1; 817 if (! unsignedp 818 && (val & ((HOST_WIDE_INT) 1 << (width - 1)))) 819 val |= (HOST_WIDE_INT) (-1) << width; 820 821 return gen_int_mode (val, mode); 822 } 823 824 return gen_lowpart (mode, x); 825 } 826 827 /* Converting from integer constant into mode is always equivalent to an 828 subreg operation. */ 829 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode) 830 { 831 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode)); 832 return simplify_gen_subreg (mode, x, oldmode, 0); 833 } 834 835 temp = gen_reg_rtx (mode); 836 convert_move (temp, x, unsignedp); 837 return temp; 838} 839 840/* STORE_MAX_PIECES is the number of bytes at a time that we can 841 store efficiently. Due to internal GCC limitations, this is 842 MOVE_MAX_PIECES limited by the number of bytes GCC can represent 843 for an immediate constant. */ 844 845#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT)) 846 847/* Determine whether the LEN bytes can be moved by using several move 848 instructions. Return nonzero if a call to move_by_pieces should 849 succeed. */ 850 851int 852can_move_by_pieces (unsigned HOST_WIDE_INT len, 853 unsigned int align ATTRIBUTE_UNUSED) 854{ 855 return MOVE_BY_PIECES_P (len, align); 856} 857 858/* Generate several move instructions to copy LEN bytes from block FROM to 859 block TO. (These are MEM rtx's with BLKmode). 860 861 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is 862 used to push FROM to the stack. 863 864 ALIGN is maximum stack alignment we can assume. 865 866 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala 867 mempcpy, and if ENDP is 2 return memory the end minus one byte ala 868 stpcpy. */ 869 870rtx 871move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, 872 unsigned int align, int endp) 873{ 874 struct move_by_pieces data; 875 rtx to_addr, from_addr = XEXP (from, 0); 876 unsigned int max_size = MOVE_MAX_PIECES + 1; 877 enum machine_mode mode = VOIDmode, tmode; 878 enum insn_code icode; 879 880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from)); 881 882 data.offset = 0; 883 data.from_addr = from_addr; 884 if (to) 885 { 886 to_addr = XEXP (to, 0); 887 data.to = to; 888 data.autinc_to 889 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC 890 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); 891 data.reverse 892 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); 893 } 894 else 895 { 896 to_addr = NULL_RTX; 897 data.to = NULL_RTX; 898 data.autinc_to = 1; 899#ifdef STACK_GROWS_DOWNWARD 900 data.reverse = 1; 901#else 902 data.reverse = 0; 903#endif 904 } 905 data.to_addr = to_addr; 906 data.from = from; 907 data.autinc_from 908 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC 909 || GET_CODE (from_addr) == POST_INC 910 || GET_CODE (from_addr) == POST_DEC); 911 912 data.explicit_inc_from = 0; 913 data.explicit_inc_to = 0; 914 if (data.reverse) data.offset = len; 915 data.len = len; 916 917 /* If copying requires more than two move insns, 918 copy addresses to registers (to make displacements shorter) 919 and use post-increment if available. */ 920 if (!(data.autinc_from && data.autinc_to) 921 && move_by_pieces_ninsns (len, align, max_size) > 2) 922 { 923 /* Find the mode of the largest move... */ 924 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 925 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 926 if (GET_MODE_SIZE (tmode) < max_size) 927 mode = tmode; 928 929 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) 930 { 931 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len)); 932 data.autinc_from = 1; 933 data.explicit_inc_from = -1; 934 } 935 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from) 936 { 937 data.from_addr = copy_addr_to_reg (from_addr); 938 data.autinc_from = 1; 939 data.explicit_inc_from = 1; 940 } 941 if (!data.autinc_from && CONSTANT_P (from_addr)) 942 data.from_addr = copy_addr_to_reg (from_addr); 943 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) 944 { 945 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len)); 946 data.autinc_to = 1; 947 data.explicit_inc_to = -1; 948 } 949 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to) 950 { 951 data.to_addr = copy_addr_to_reg (to_addr); 952 data.autinc_to = 1; 953 data.explicit_inc_to = 1; 954 } 955 if (!data.autinc_to && CONSTANT_P (to_addr)) 956 data.to_addr = copy_addr_to_reg (to_addr); 957 } 958 959 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); 960 if (align >= GET_MODE_ALIGNMENT (tmode)) 961 align = GET_MODE_ALIGNMENT (tmode); 962 else 963 { 964 enum machine_mode xmode; 965 966 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; 967 tmode != VOIDmode; 968 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) 969 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES 970 || SLOW_UNALIGNED_ACCESS (tmode, align)) 971 break; 972 973 align = MAX (align, GET_MODE_ALIGNMENT (xmode)); 974 } 975 976 /* First move what we can in the largest integer mode, then go to 977 successively smaller modes. */ 978 979 while (max_size > 1) 980 { 981 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 982 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 983 if (GET_MODE_SIZE (tmode) < max_size) 984 mode = tmode; 985 986 if (mode == VOIDmode) 987 break; 988 989 icode = mov_optab->handlers[(int) mode].insn_code; 990 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) 991 move_by_pieces_1 (GEN_FCN (icode), mode, &data); 992 993 max_size = GET_MODE_SIZE (mode); 994 } 995 996 /* The code above should have handled everything. */ 997 gcc_assert (!data.len); 998 999 if (endp) 1000 { 1001 rtx to1; 1002 1003 gcc_assert (!data.reverse); 1004 if (data.autinc_to) 1005 { 1006 if (endp == 2) 1007 { 1008 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) 1009 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); 1010 else 1011 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr, 1012 -1)); 1013 } 1014 to1 = adjust_automodify_address (data.to, QImode, data.to_addr, 1015 data.offset); 1016 } 1017 else 1018 { 1019 if (endp == 2) 1020 --data.offset; 1021 to1 = adjust_address (data.to, QImode, data.offset); 1022 } 1023 return to1; 1024 } 1025 else 1026 return data.to; 1027} 1028 1029/* Return number of insns required to move L bytes by pieces. 1030 ALIGN (in bits) is maximum alignment we can assume. */ 1031 1032static unsigned HOST_WIDE_INT 1033move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align, 1034 unsigned int max_size) 1035{ 1036 unsigned HOST_WIDE_INT n_insns = 0; 1037 enum machine_mode tmode; 1038 1039 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); 1040 if (align >= GET_MODE_ALIGNMENT (tmode)) 1041 align = GET_MODE_ALIGNMENT (tmode); 1042 else 1043 { 1044 enum machine_mode tmode, xmode; 1045 1046 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; 1047 tmode != VOIDmode; 1048 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) 1049 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES 1050 || SLOW_UNALIGNED_ACCESS (tmode, align)) 1051 break; 1052 1053 align = MAX (align, GET_MODE_ALIGNMENT (xmode)); 1054 } 1055 1056 while (max_size > 1) 1057 { 1058 enum machine_mode mode = VOIDmode; 1059 enum insn_code icode; 1060 1061 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 1062 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 1063 if (GET_MODE_SIZE (tmode) < max_size) 1064 mode = tmode; 1065 1066 if (mode == VOIDmode) 1067 break; 1068 1069 icode = mov_optab->handlers[(int) mode].insn_code; 1070 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) 1071 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode); 1072 1073 max_size = GET_MODE_SIZE (mode); 1074 } 1075 1076 gcc_assert (!l); 1077 return n_insns; 1078} 1079 1080/* Subroutine of move_by_pieces. Move as many bytes as appropriate 1081 with move instructions for mode MODE. GENFUN is the gen_... function 1082 to make a move insn for that mode. DATA has all the other info. */ 1083 1084static void 1085move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode, 1086 struct move_by_pieces *data) 1087{ 1088 unsigned int size = GET_MODE_SIZE (mode); 1089 rtx to1 = NULL_RTX, from1; 1090 1091 while (data->len >= size) 1092 { 1093 if (data->reverse) 1094 data->offset -= size; 1095 1096 if (data->to) 1097 { 1098 if (data->autinc_to) 1099 to1 = adjust_automodify_address (data->to, mode, data->to_addr, 1100 data->offset); 1101 else 1102 to1 = adjust_address (data->to, mode, data->offset); 1103 } 1104 1105 if (data->autinc_from) 1106 from1 = adjust_automodify_address (data->from, mode, data->from_addr, 1107 data->offset); 1108 else 1109 from1 = adjust_address (data->from, mode, data->offset); 1110 1111 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) 1112 emit_insn (gen_add2_insn (data->to_addr, 1113 GEN_INT (-(HOST_WIDE_INT)size))); 1114 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) 1115 emit_insn (gen_add2_insn (data->from_addr, 1116 GEN_INT (-(HOST_WIDE_INT)size))); 1117 1118 if (data->to) 1119 emit_insn ((*genfun) (to1, from1)); 1120 else 1121 { 1122#ifdef PUSH_ROUNDING 1123 emit_single_push_insn (mode, from1, NULL); 1124#else 1125 gcc_unreachable (); 1126#endif 1127 } 1128 1129 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) 1130 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); 1131 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0) 1132 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size))); 1133 1134 if (! data->reverse) 1135 data->offset += size; 1136 1137 data->len -= size; 1138 } 1139} 1140 1141/* Emit code to move a block Y to a block X. This may be done with 1142 string-move instructions, with multiple scalar move instructions, 1143 or with a library call. 1144 1145 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode. 1146 SIZE is an rtx that says how long they are. 1147 ALIGN is the maximum alignment we can assume they have. 1148 METHOD describes what kind of copy this is, and what mechanisms may be used. 1149 1150 Return the address of the new block, if memcpy is called and returns it, 1151 0 otherwise. */ 1152 1153rtx 1154emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method) 1155{ 1156 bool may_use_call; 1157 rtx retval = 0; 1158 unsigned int align; 1159 1160 switch (method) 1161 { 1162 case BLOCK_OP_NORMAL: 1163 case BLOCK_OP_TAILCALL: 1164 may_use_call = true; 1165 break; 1166 1167 case BLOCK_OP_CALL_PARM: 1168 may_use_call = block_move_libcall_safe_for_call_parm (); 1169 1170 /* Make inhibit_defer_pop nonzero around the library call 1171 to force it to pop the arguments right away. */ 1172 NO_DEFER_POP; 1173 break; 1174 1175 case BLOCK_OP_NO_LIBCALL: 1176 may_use_call = false; 1177 break; 1178 1179 default: 1180 gcc_unreachable (); 1181 } 1182 1183 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); 1184 1185 gcc_assert (MEM_P (x)); 1186 gcc_assert (MEM_P (y)); 1187 gcc_assert (size); 1188 1189 /* Make sure we've got BLKmode addresses; store_one_arg can decide that 1190 block copy is more efficient for other large modes, e.g. DCmode. */ 1191 x = adjust_address (x, BLKmode, 0); 1192 y = adjust_address (y, BLKmode, 0); 1193 1194 /* Set MEM_SIZE as appropriate for this block copy. The main place this 1195 can be incorrect is coming from __builtin_memcpy. */ 1196 if (GET_CODE (size) == CONST_INT) 1197 { 1198 if (INTVAL (size) == 0) 1199 return 0; 1200 1201 x = shallow_copy_rtx (x); 1202 y = shallow_copy_rtx (y); 1203 set_mem_size (x, size); 1204 set_mem_size (y, size); 1205 } 1206 1207 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align)) 1208 move_by_pieces (x, y, INTVAL (size), align, 0); 1209 else if (emit_block_move_via_movmem (x, y, size, align)) 1210 ; 1211 else if (may_use_call) 1212 retval = emit_block_move_via_libcall (x, y, size, 1213 method == BLOCK_OP_TAILCALL); 1214 else 1215 emit_block_move_via_loop (x, y, size, align); 1216 1217 if (method == BLOCK_OP_CALL_PARM) 1218 OK_DEFER_POP; 1219 1220 return retval; 1221} 1222 1223/* A subroutine of emit_block_move. Returns true if calling the 1224 block move libcall will not clobber any parameters which may have 1225 already been placed on the stack. */ 1226 1227static bool 1228block_move_libcall_safe_for_call_parm (void) 1229{ 1230 /* If arguments are pushed on the stack, then they're safe. */ 1231 if (PUSH_ARGS) 1232 return true; 1233 1234 /* If registers go on the stack anyway, any argument is sure to clobber 1235 an outgoing argument. */ 1236#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE) 1237 { 1238 tree fn = emit_block_move_libcall_fn (false); 1239 (void) fn; 1240 if (REG_PARM_STACK_SPACE (fn) != 0) 1241 return false; 1242 } 1243#endif 1244 1245 /* If any argument goes in memory, then it might clobber an outgoing 1246 argument. */ 1247 { 1248 CUMULATIVE_ARGS args_so_far; 1249 tree fn, arg; 1250 1251 fn = emit_block_move_libcall_fn (false); 1252 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3); 1253 1254 arg = TYPE_ARG_TYPES (TREE_TYPE (fn)); 1255 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg)) 1256 { 1257 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg)); 1258 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1); 1259 if (!tmp || !REG_P (tmp)) 1260 return false; 1261 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1)) 1262 return false; 1263 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1); 1264 } 1265 } 1266 return true; 1267} 1268 1269/* A subroutine of emit_block_move. Expand a movmem pattern; 1270 return true if successful. */ 1271 1272static bool 1273emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align) 1274{ 1275 rtx opalign = GEN_INT (align / BITS_PER_UNIT); 1276 int save_volatile_ok = volatile_ok; 1277 enum machine_mode mode; 1278 1279 /* Since this is a move insn, we don't care about volatility. */ 1280 volatile_ok = 1; 1281 1282 /* Try the most limited insn first, because there's no point 1283 including more than one in the machine description unless 1284 the more limited one has some advantage. */ 1285 1286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; 1287 mode = GET_MODE_WIDER_MODE (mode)) 1288 { 1289 enum insn_code code = movmem_optab[(int) mode]; 1290 insn_operand_predicate_fn pred; 1291 1292 if (code != CODE_FOR_nothing 1293 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT 1294 here because if SIZE is less than the mode mask, as it is 1295 returned by the macro, it will definitely be less than the 1296 actual mode mask. */ 1297 && ((GET_CODE (size) == CONST_INT 1298 && ((unsigned HOST_WIDE_INT) INTVAL (size) 1299 <= (GET_MODE_MASK (mode) >> 1))) 1300 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) 1301 && ((pred = insn_data[(int) code].operand[0].predicate) == 0 1302 || (*pred) (x, BLKmode)) 1303 && ((pred = insn_data[(int) code].operand[1].predicate) == 0 1304 || (*pred) (y, BLKmode)) 1305 && ((pred = insn_data[(int) code].operand[3].predicate) == 0 1306 || (*pred) (opalign, VOIDmode))) 1307 { 1308 rtx op2; 1309 rtx last = get_last_insn (); 1310 rtx pat; 1311 1312 op2 = convert_to_mode (mode, size, 1); 1313 pred = insn_data[(int) code].operand[2].predicate; 1314 if (pred != 0 && ! (*pred) (op2, mode)) 1315 op2 = copy_to_mode_reg (mode, op2); 1316 1317 /* ??? When called via emit_block_move_for_call, it'd be 1318 nice if there were some way to inform the backend, so 1319 that it doesn't fail the expansion because it thinks 1320 emitting the libcall would be more efficient. */ 1321 1322 pat = GEN_FCN ((int) code) (x, y, op2, opalign); 1323 if (pat) 1324 { 1325 emit_insn (pat); 1326 volatile_ok = save_volatile_ok; 1327 return true; 1328 } 1329 else 1330 delete_insns_since (last); 1331 } 1332 } 1333 1334 volatile_ok = save_volatile_ok; 1335 return false; 1336} 1337 1338/* A subroutine of emit_block_move. Expand a call to memcpy. 1339 Return the return value from memcpy, 0 otherwise. */ 1340 1341static rtx 1342emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall) 1343{ 1344 rtx dst_addr, src_addr; 1345 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree; 1346 enum machine_mode size_mode; 1347 rtx retval; 1348 1349 /* Emit code to copy the addresses of DST and SRC and SIZE into new 1350 pseudos. We can then place those new pseudos into a VAR_DECL and 1351 use them later. */ 1352 1353 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0)); 1354 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0)); 1355 1356 dst_addr = convert_memory_address (ptr_mode, dst_addr); 1357 src_addr = convert_memory_address (ptr_mode, src_addr); 1358 1359 dst_tree = make_tree (ptr_type_node, dst_addr); 1360 src_tree = make_tree (ptr_type_node, src_addr); 1361 1362 size_mode = TYPE_MODE (sizetype); 1363 1364 size = convert_to_mode (size_mode, size, 1); 1365 size = copy_to_mode_reg (size_mode, size); 1366 1367 /* It is incorrect to use the libcall calling conventions to call 1368 memcpy in this context. This could be a user call to memcpy and 1369 the user may wish to examine the return value from memcpy. For 1370 targets where libcalls and normal calls have different conventions 1371 for returning pointers, we could end up generating incorrect code. */ 1372 1373 size_tree = make_tree (sizetype, size); 1374 1375 fn = emit_block_move_libcall_fn (true); 1376 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE); 1377 arg_list = tree_cons (NULL_TREE, src_tree, arg_list); 1378 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list); 1379 1380 /* Now we have to build up the CALL_EXPR itself. */ 1381 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); 1382 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), 1383 call_expr, arg_list, NULL_TREE); 1384 CALL_EXPR_TAILCALL (call_expr) = tailcall; 1385 1386 retval = expand_normal (call_expr); 1387 1388 return retval; 1389} 1390 1391/* A subroutine of emit_block_move_via_libcall. Create the tree node 1392 for the function we use for block copies. The first time FOR_CALL 1393 is true, we call assemble_external. */ 1394 1395static GTY(()) tree block_move_fn; 1396 1397void 1398init_block_move_fn (const char *asmspec) 1399{ 1400 if (!block_move_fn) 1401 { 1402 tree args, fn; 1403 1404 fn = get_identifier ("memcpy"); 1405 args = build_function_type_list (ptr_type_node, ptr_type_node, 1406 const_ptr_type_node, sizetype, 1407 NULL_TREE); 1408 1409 fn = build_decl (FUNCTION_DECL, fn, args); 1410 DECL_EXTERNAL (fn) = 1; 1411 TREE_PUBLIC (fn) = 1; 1412 DECL_ARTIFICIAL (fn) = 1; 1413 TREE_NOTHROW (fn) = 1; 1414 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT; 1415 DECL_VISIBILITY_SPECIFIED (fn) = 1; 1416 1417 block_move_fn = fn; 1418 } 1419 1420 if (asmspec) 1421 set_user_assembler_name (block_move_fn, asmspec); 1422} 1423 1424static tree 1425emit_block_move_libcall_fn (int for_call) 1426{ 1427 static bool emitted_extern; 1428 1429 if (!block_move_fn) 1430 init_block_move_fn (NULL); 1431 1432 if (for_call && !emitted_extern) 1433 { 1434 emitted_extern = true; 1435 make_decl_rtl (block_move_fn); 1436 assemble_external (block_move_fn); 1437 } 1438 1439 return block_move_fn; 1440} 1441 1442/* A subroutine of emit_block_move. Copy the data via an explicit 1443 loop. This is used only when libcalls are forbidden. */ 1444/* ??? It'd be nice to copy in hunks larger than QImode. */ 1445 1446static void 1447emit_block_move_via_loop (rtx x, rtx y, rtx size, 1448 unsigned int align ATTRIBUTE_UNUSED) 1449{ 1450 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp; 1451 enum machine_mode iter_mode; 1452 1453 iter_mode = GET_MODE (size); 1454 if (iter_mode == VOIDmode) 1455 iter_mode = word_mode; 1456 1457 top_label = gen_label_rtx (); 1458 cmp_label = gen_label_rtx (); 1459 iter = gen_reg_rtx (iter_mode); 1460 1461 emit_move_insn (iter, const0_rtx); 1462 1463 x_addr = force_operand (XEXP (x, 0), NULL_RTX); 1464 y_addr = force_operand (XEXP (y, 0), NULL_RTX); 1465 do_pending_stack_adjust (); 1466 1467 emit_jump (cmp_label); 1468 emit_label (top_label); 1469 1470 tmp = convert_modes (Pmode, iter_mode, iter, true); 1471 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp); 1472 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp); 1473 x = change_address (x, QImode, x_addr); 1474 y = change_address (y, QImode, y_addr); 1475 1476 emit_move_insn (x, y); 1477 1478 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter, 1479 true, OPTAB_LIB_WIDEN); 1480 if (tmp != iter) 1481 emit_move_insn (iter, tmp); 1482 1483 emit_label (cmp_label); 1484 1485 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode, 1486 true, top_label); 1487} 1488 1489/* Copy all or part of a value X into registers starting at REGNO. 1490 The number of registers to be filled is NREGS. */ 1491 1492void 1493move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode) 1494{ 1495 int i; 1496#ifdef HAVE_load_multiple 1497 rtx pat; 1498 rtx last; 1499#endif 1500 1501 if (nregs == 0) 1502 return; 1503 1504 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) 1505 x = validize_mem (force_const_mem (mode, x)); 1506 1507 /* See if the machine can do this with a load multiple insn. */ 1508#ifdef HAVE_load_multiple 1509 if (HAVE_load_multiple) 1510 { 1511 last = get_last_insn (); 1512 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x, 1513 GEN_INT (nregs)); 1514 if (pat) 1515 { 1516 emit_insn (pat); 1517 return; 1518 } 1519 else 1520 delete_insns_since (last); 1521 } 1522#endif 1523 1524 for (i = 0; i < nregs; i++) 1525 emit_move_insn (gen_rtx_REG (word_mode, regno + i), 1526 operand_subword_force (x, i, mode)); 1527} 1528 1529/* Copy all or part of a BLKmode value X out of registers starting at REGNO. 1530 The number of registers to be filled is NREGS. */ 1531 1532void 1533move_block_from_reg (int regno, rtx x, int nregs) 1534{ 1535 int i; 1536 1537 if (nregs == 0) 1538 return; 1539 1540 /* See if the machine can do this with a store multiple insn. */ 1541#ifdef HAVE_store_multiple 1542 if (HAVE_store_multiple) 1543 { 1544 rtx last = get_last_insn (); 1545 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno), 1546 GEN_INT (nregs)); 1547 if (pat) 1548 { 1549 emit_insn (pat); 1550 return; 1551 } 1552 else 1553 delete_insns_since (last); 1554 } 1555#endif 1556 1557 for (i = 0; i < nregs; i++) 1558 { 1559 rtx tem = operand_subword (x, i, 1, BLKmode); 1560 1561 gcc_assert (tem); 1562 1563 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i)); 1564 } 1565} 1566 1567/* Generate a PARALLEL rtx for a new non-consecutive group of registers from 1568 ORIG, where ORIG is a non-consecutive group of registers represented by 1569 a PARALLEL. The clone is identical to the original except in that the 1570 original set of registers is replaced by a new set of pseudo registers. 1571 The new set has the same modes as the original set. */ 1572 1573rtx 1574gen_group_rtx (rtx orig) 1575{ 1576 int i, length; 1577 rtx *tmps; 1578 1579 gcc_assert (GET_CODE (orig) == PARALLEL); 1580 1581 length = XVECLEN (orig, 0); 1582 tmps = alloca (sizeof (rtx) * length); 1583 1584 /* Skip a NULL entry in first slot. */ 1585 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1; 1586 1587 if (i) 1588 tmps[0] = 0; 1589 1590 for (; i < length; i++) 1591 { 1592 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0)); 1593 rtx offset = XEXP (XVECEXP (orig, 0, i), 1); 1594 1595 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset); 1596 } 1597 1598 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps)); 1599} 1600 1601/* A subroutine of emit_group_load. Arguments as for emit_group_load, 1602 except that values are placed in TMPS[i], and must later be moved 1603 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */ 1604 1605static void 1606emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize) 1607{ 1608 rtx src; 1609 int start, i; 1610 enum machine_mode m = GET_MODE (orig_src); 1611 1612 gcc_assert (GET_CODE (dst) == PARALLEL); 1613 1614 if (m != VOIDmode 1615 && !SCALAR_INT_MODE_P (m) 1616 && !MEM_P (orig_src) 1617 && GET_CODE (orig_src) != CONCAT) 1618 { 1619 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src)); 1620 if (imode == BLKmode) 1621 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0); 1622 else 1623 src = gen_reg_rtx (imode); 1624 if (imode != BLKmode) 1625 src = gen_lowpart (GET_MODE (orig_src), src); 1626 emit_move_insn (src, orig_src); 1627 /* ...and back again. */ 1628 if (imode != BLKmode) 1629 src = gen_lowpart (imode, src); 1630 emit_group_load_1 (tmps, dst, src, type, ssize); 1631 return; 1632 } 1633 1634 /* Check for a NULL entry, used to indicate that the parameter goes 1635 both on the stack and in registers. */ 1636 if (XEXP (XVECEXP (dst, 0, 0), 0)) 1637 start = 0; 1638 else 1639 start = 1; 1640 1641 /* Process the pieces. */ 1642 for (i = start; i < XVECLEN (dst, 0); i++) 1643 { 1644 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); 1645 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); 1646 unsigned int bytelen = GET_MODE_SIZE (mode); 1647 int shift = 0; 1648 1649 /* Handle trailing fragments that run over the size of the struct. */ 1650 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) 1651 { 1652 /* Arrange to shift the fragment to where it belongs. 1653 extract_bit_field loads to the lsb of the reg. */ 1654 if ( 1655#ifdef BLOCK_REG_PADDING 1656 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start) 1657 == (BYTES_BIG_ENDIAN ? upward : downward) 1658#else 1659 BYTES_BIG_ENDIAN 1660#endif 1661 ) 1662 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; 1663 bytelen = ssize - bytepos; 1664 gcc_assert (bytelen > 0); 1665 } 1666 1667 /* If we won't be loading directly from memory, protect the real source 1668 from strange tricks we might play; but make sure that the source can 1669 be loaded directly into the destination. */ 1670 src = orig_src; 1671 if (!MEM_P (orig_src) 1672 && (!CONSTANT_P (orig_src) 1673 || (GET_MODE (orig_src) != mode 1674 && GET_MODE (orig_src) != VOIDmode))) 1675 { 1676 if (GET_MODE (orig_src) == VOIDmode) 1677 src = gen_reg_rtx (mode); 1678 else 1679 src = gen_reg_rtx (GET_MODE (orig_src)); 1680 1681 emit_move_insn (src, orig_src); 1682 } 1683 1684 /* Optimize the access just a bit. */ 1685 if (MEM_P (src) 1686 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src)) 1687 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)) 1688 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 1689 && bytelen == GET_MODE_SIZE (mode)) 1690 { 1691 tmps[i] = gen_reg_rtx (mode); 1692 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos)); 1693 } 1694 else if (COMPLEX_MODE_P (mode) 1695 && GET_MODE (src) == mode 1696 && bytelen == GET_MODE_SIZE (mode)) 1697 /* Let emit_move_complex do the bulk of the work. */ 1698 tmps[i] = src; 1699 else if (GET_CODE (src) == CONCAT) 1700 { 1701 unsigned int slen = GET_MODE_SIZE (GET_MODE (src)); 1702 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0))); 1703 1704 if ((bytepos == 0 && bytelen == slen0) 1705 || (bytepos != 0 && bytepos + bytelen <= slen)) 1706 { 1707 /* The following assumes that the concatenated objects all 1708 have the same size. In this case, a simple calculation 1709 can be used to determine the object and the bit field 1710 to be extracted. */ 1711 tmps[i] = XEXP (src, bytepos / slen0); 1712 if (! CONSTANT_P (tmps[i]) 1713 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)) 1714 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT, 1715 (bytepos % slen0) * BITS_PER_UNIT, 1716 1, NULL_RTX, mode, mode); 1717 } 1718 else 1719 { 1720 rtx mem; 1721 1722 gcc_assert (!bytepos); 1723 mem = assign_stack_temp (GET_MODE (src), slen, 0); 1724 emit_move_insn (mem, src); 1725 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT, 1726 0, 1, NULL_RTX, mode, mode); 1727 } 1728 } 1729 /* FIXME: A SIMD parallel will eventually lead to a subreg of a 1730 SIMD register, which is currently broken. While we get GCC 1731 to emit proper RTL for these cases, let's dump to memory. */ 1732 else if (VECTOR_MODE_P (GET_MODE (dst)) 1733 && REG_P (src)) 1734 { 1735 int slen = GET_MODE_SIZE (GET_MODE (src)); 1736 rtx mem; 1737 1738 mem = assign_stack_temp (GET_MODE (src), slen, 0); 1739 emit_move_insn (mem, src); 1740 tmps[i] = adjust_address (mem, mode, (int) bytepos); 1741 } 1742 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode 1743 && XVECLEN (dst, 0) > 1) 1744 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos); 1745 else if (CONSTANT_P (src) 1746 || (REG_P (src) && GET_MODE (src) == mode)) 1747 tmps[i] = src; 1748 else 1749 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, 1750 bytepos * BITS_PER_UNIT, 1, NULL_RTX, 1751 mode, mode); 1752 1753 if (shift) 1754 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i], 1755 build_int_cst (NULL_TREE, shift), tmps[i], 0); 1756 } 1757} 1758 1759/* Emit code to move a block SRC of type TYPE to a block DST, 1760 where DST is non-consecutive registers represented by a PARALLEL. 1761 SSIZE represents the total size of block ORIG_SRC in bytes, or -1 1762 if not known. */ 1763 1764void 1765emit_group_load (rtx dst, rtx src, tree type, int ssize) 1766{ 1767 rtx *tmps; 1768 int i; 1769 1770 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0)); 1771 emit_group_load_1 (tmps, dst, src, type, ssize); 1772 1773 /* Copy the extracted pieces into the proper (probable) hard regs. */ 1774 for (i = 0; i < XVECLEN (dst, 0); i++) 1775 { 1776 rtx d = XEXP (XVECEXP (dst, 0, i), 0); 1777 if (d == NULL) 1778 continue; 1779 emit_move_insn (d, tmps[i]); 1780 } 1781} 1782 1783/* Similar, but load SRC into new pseudos in a format that looks like 1784 PARALLEL. This can later be fed to emit_group_move to get things 1785 in the right place. */ 1786 1787rtx 1788emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize) 1789{ 1790 rtvec vec; 1791 int i; 1792 1793 vec = rtvec_alloc (XVECLEN (parallel, 0)); 1794 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize); 1795 1796 /* Convert the vector to look just like the original PARALLEL, except 1797 with the computed values. */ 1798 for (i = 0; i < XVECLEN (parallel, 0); i++) 1799 { 1800 rtx e = XVECEXP (parallel, 0, i); 1801 rtx d = XEXP (e, 0); 1802 1803 if (d) 1804 { 1805 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i)); 1806 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1)); 1807 } 1808 RTVEC_ELT (vec, i) = e; 1809 } 1810 1811 return gen_rtx_PARALLEL (GET_MODE (parallel), vec); 1812} 1813 1814/* Emit code to move a block SRC to block DST, where SRC and DST are 1815 non-consecutive groups of registers, each represented by a PARALLEL. */ 1816 1817void 1818emit_group_move (rtx dst, rtx src) 1819{ 1820 int i; 1821 1822 gcc_assert (GET_CODE (src) == PARALLEL 1823 && GET_CODE (dst) == PARALLEL 1824 && XVECLEN (src, 0) == XVECLEN (dst, 0)); 1825 1826 /* Skip first entry if NULL. */ 1827 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++) 1828 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), 1829 XEXP (XVECEXP (src, 0, i), 0)); 1830} 1831 1832/* Move a group of registers represented by a PARALLEL into pseudos. */ 1833 1834rtx 1835emit_group_move_into_temps (rtx src) 1836{ 1837 rtvec vec = rtvec_alloc (XVECLEN (src, 0)); 1838 int i; 1839 1840 for (i = 0; i < XVECLEN (src, 0); i++) 1841 { 1842 rtx e = XVECEXP (src, 0, i); 1843 rtx d = XEXP (e, 0); 1844 1845 if (d) 1846 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1)); 1847 RTVEC_ELT (vec, i) = e; 1848 } 1849 1850 return gen_rtx_PARALLEL (GET_MODE (src), vec); 1851} 1852 1853/* Emit code to move a block SRC to a block ORIG_DST of type TYPE, 1854 where SRC is non-consecutive registers represented by a PARALLEL. 1855 SSIZE represents the total size of block ORIG_DST, or -1 if not 1856 known. */ 1857 1858void 1859emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) 1860{ 1861 rtx *tmps, dst; 1862 int start, finish, i; 1863 enum machine_mode m = GET_MODE (orig_dst); 1864 1865 gcc_assert (GET_CODE (src) == PARALLEL); 1866 1867 if (!SCALAR_INT_MODE_P (m) 1868 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT) 1869 { 1870 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst)); 1871 if (imode == BLKmode) 1872 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0); 1873 else 1874 dst = gen_reg_rtx (imode); 1875 emit_group_store (dst, src, type, ssize); 1876 if (imode != BLKmode) 1877 dst = gen_lowpart (GET_MODE (orig_dst), dst); 1878 emit_move_insn (orig_dst, dst); 1879 return; 1880 } 1881 1882 /* Check for a NULL entry, used to indicate that the parameter goes 1883 both on the stack and in registers. */ 1884 if (XEXP (XVECEXP (src, 0, 0), 0)) 1885 start = 0; 1886 else 1887 start = 1; 1888 finish = XVECLEN (src, 0); 1889 1890 tmps = alloca (sizeof (rtx) * finish); 1891 1892 /* Copy the (probable) hard regs into pseudos. */ 1893 for (i = start; i < finish; i++) 1894 { 1895 rtx reg = XEXP (XVECEXP (src, 0, i), 0); 1896 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER) 1897 { 1898 tmps[i] = gen_reg_rtx (GET_MODE (reg)); 1899 emit_move_insn (tmps[i], reg); 1900 } 1901 else 1902 tmps[i] = reg; 1903 } 1904 1905 /* If we won't be storing directly into memory, protect the real destination 1906 from strange tricks we might play. */ 1907 dst = orig_dst; 1908 if (GET_CODE (dst) == PARALLEL) 1909 { 1910 rtx temp; 1911 1912 /* We can get a PARALLEL dst if there is a conditional expression in 1913 a return statement. In that case, the dst and src are the same, 1914 so no action is necessary. */ 1915 if (rtx_equal_p (dst, src)) 1916 return; 1917 1918 /* It is unclear if we can ever reach here, but we may as well handle 1919 it. Allocate a temporary, and split this into a store/load to/from 1920 the temporary. */ 1921 1922 temp = assign_stack_temp (GET_MODE (dst), ssize, 0); 1923 emit_group_store (temp, src, type, ssize); 1924 emit_group_load (dst, temp, type, ssize); 1925 return; 1926 } 1927 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT) 1928 { 1929 enum machine_mode outer = GET_MODE (dst); 1930 enum machine_mode inner; 1931 HOST_WIDE_INT bytepos; 1932 bool done = false; 1933 rtx temp; 1934 1935 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER) 1936 dst = gen_reg_rtx (outer); 1937 1938 /* Make life a bit easier for combine. */ 1939 /* If the first element of the vector is the low part 1940 of the destination mode, use a paradoxical subreg to 1941 initialize the destination. */ 1942 if (start < finish) 1943 { 1944 inner = GET_MODE (tmps[start]); 1945 bytepos = subreg_lowpart_offset (inner, outer); 1946 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos) 1947 { 1948 temp = simplify_gen_subreg (outer, tmps[start], 1949 inner, 0); 1950 if (temp) 1951 { 1952 emit_move_insn (dst, temp); 1953 done = true; 1954 start++; 1955 } 1956 } 1957 } 1958 1959 /* If the first element wasn't the low part, try the last. */ 1960 if (!done 1961 && start < finish - 1) 1962 { 1963 inner = GET_MODE (tmps[finish - 1]); 1964 bytepos = subreg_lowpart_offset (inner, outer); 1965 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos) 1966 { 1967 temp = simplify_gen_subreg (outer, tmps[finish - 1], 1968 inner, 0); 1969 if (temp) 1970 { 1971 emit_move_insn (dst, temp); 1972 done = true; 1973 finish--; 1974 } 1975 } 1976 } 1977 1978 /* Otherwise, simply initialize the result to zero. */ 1979 if (!done) 1980 emit_move_insn (dst, CONST0_RTX (outer)); 1981 } 1982 1983 /* Process the pieces. */ 1984 for (i = start; i < finish; i++) 1985 { 1986 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); 1987 enum machine_mode mode = GET_MODE (tmps[i]); 1988 unsigned int bytelen = GET_MODE_SIZE (mode); 1989 rtx dest = dst; 1990 1991 /* Handle trailing fragments that run over the size of the struct. */ 1992 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) 1993 { 1994 /* store_bit_field always takes its value from the lsb. 1995 Move the fragment to the lsb if it's not already there. */ 1996 if ( 1997#ifdef BLOCK_REG_PADDING 1998 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start) 1999 == (BYTES_BIG_ENDIAN ? upward : downward) 2000#else 2001 BYTES_BIG_ENDIAN 2002#endif 2003 ) 2004 { 2005 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; 2006 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i], 2007 build_int_cst (NULL_TREE, shift), 2008 tmps[i], 0); 2009 } 2010 bytelen = ssize - bytepos; 2011 } 2012 2013 if (GET_CODE (dst) == CONCAT) 2014 { 2015 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) 2016 dest = XEXP (dst, 0); 2017 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) 2018 { 2019 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); 2020 dest = XEXP (dst, 1); 2021 } 2022 else 2023 { 2024 gcc_assert (bytepos == 0 && XVECLEN (src, 0)); 2025 dest = assign_stack_temp (GET_MODE (dest), 2026 GET_MODE_SIZE (GET_MODE (dest)), 0); 2027 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos), 2028 tmps[i]); 2029 dst = dest; 2030 break; 2031 } 2032 } 2033 2034 /* Optimize the access just a bit. */ 2035 if (MEM_P (dest) 2036 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest)) 2037 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)) 2038 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 2039 && bytelen == GET_MODE_SIZE (mode)) 2040 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]); 2041 else 2042 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, 2043 mode, tmps[i]); 2044 } 2045 2046 /* Copy from the pseudo into the (probable) hard reg. */ 2047 if (orig_dst != dst) 2048 emit_move_insn (orig_dst, dst); 2049} 2050 2051/* Generate code to copy a BLKmode object of TYPE out of a 2052 set of registers starting with SRCREG into TGTBLK. If TGTBLK 2053 is null, a stack temporary is created. TGTBLK is returned. 2054 2055 The purpose of this routine is to handle functions that return 2056 BLKmode structures in registers. Some machines (the PA for example) 2057 want to return all small structures in registers regardless of the 2058 structure's alignment. */ 2059 2060rtx 2061copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type) 2062{ 2063 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); 2064 rtx src = NULL, dst = NULL; 2065 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); 2066 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0; 2067 2068 if (tgtblk == 0) 2069 { 2070 tgtblk = assign_temp (build_qualified_type (type, 2071 (TYPE_QUALS (type) 2072 | TYPE_QUAL_CONST)), 2073 0, 1, 1); 2074 preserve_temp_slots (tgtblk); 2075 } 2076 2077 /* This code assumes srcreg is at least a full word. If it isn't, copy it 2078 into a new pseudo which is a full word. */ 2079 2080 if (GET_MODE (srcreg) != BLKmode 2081 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD) 2082 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type)); 2083 2084 /* If the structure doesn't take up a whole number of words, see whether 2085 SRCREG is padded on the left or on the right. If it's on the left, 2086 set PADDING_CORRECTION to the number of bits to skip. 2087 2088 In most ABIs, the structure will be returned at the least end of 2089 the register, which translates to right padding on little-endian 2090 targets and left padding on big-endian targets. The opposite 2091 holds if the structure is returned at the most significant 2092 end of the register. */ 2093 if (bytes % UNITS_PER_WORD != 0 2094 && (targetm.calls.return_in_msb (type) 2095 ? !BYTES_BIG_ENDIAN 2096 : BYTES_BIG_ENDIAN)) 2097 padding_correction 2098 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT)); 2099 2100 /* Copy the structure BITSIZE bites at a time. 2101 2102 We could probably emit more efficient code for machines which do not use 2103 strict alignment, but it doesn't seem worth the effort at the current 2104 time. */ 2105 for (bitpos = 0, xbitpos = padding_correction; 2106 bitpos < bytes * BITS_PER_UNIT; 2107 bitpos += bitsize, xbitpos += bitsize) 2108 { 2109 /* We need a new source operand each time xbitpos is on a 2110 word boundary and when xbitpos == padding_correction 2111 (the first time through). */ 2112 if (xbitpos % BITS_PER_WORD == 0 2113 || xbitpos == padding_correction) 2114 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, 2115 GET_MODE (srcreg)); 2116 2117 /* We need a new destination operand each time bitpos is on 2118 a word boundary. */ 2119 if (bitpos % BITS_PER_WORD == 0) 2120 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode); 2121 2122 /* Use xbitpos for the source extraction (right justified) and 2123 xbitpos for the destination store (left justified). */ 2124 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode, 2125 extract_bit_field (src, bitsize, 2126 xbitpos % BITS_PER_WORD, 1, 2127 NULL_RTX, word_mode, word_mode)); 2128 } 2129 2130 return tgtblk; 2131} 2132 2133/* Add a USE expression for REG to the (possibly empty) list pointed 2134 to by CALL_FUSAGE. REG must denote a hard register. */ 2135 2136void 2137use_reg (rtx *call_fusage, rtx reg) 2138{ 2139 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER); 2140 2141 *call_fusage 2142 = gen_rtx_EXPR_LIST (VOIDmode, 2143 gen_rtx_USE (VOIDmode, reg), *call_fusage); 2144} 2145 2146/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs, 2147 starting at REGNO. All of these registers must be hard registers. */ 2148 2149void 2150use_regs (rtx *call_fusage, int regno, int nregs) 2151{ 2152 int i; 2153 2154 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER); 2155 2156 for (i = 0; i < nregs; i++) 2157 use_reg (call_fusage, regno_reg_rtx[regno + i]); 2158} 2159 2160/* Add USE expressions to *CALL_FUSAGE for each REG contained in the 2161 PARALLEL REGS. This is for calls that pass values in multiple 2162 non-contiguous locations. The Irix 6 ABI has examples of this. */ 2163 2164void 2165use_group_regs (rtx *call_fusage, rtx regs) 2166{ 2167 int i; 2168 2169 for (i = 0; i < XVECLEN (regs, 0); i++) 2170 { 2171 rtx reg = XEXP (XVECEXP (regs, 0, i), 0); 2172 2173 /* A NULL entry means the parameter goes both on the stack and in 2174 registers. This can also be a MEM for targets that pass values 2175 partially on the stack and partially in registers. */ 2176 if (reg != 0 && REG_P (reg)) 2177 use_reg (call_fusage, reg); 2178 } 2179} 2180 2181 2182/* Determine whether the LEN bytes generated by CONSTFUN can be 2183 stored to memory using several move instructions. CONSTFUNDATA is 2184 a pointer which will be passed as argument in every CONSTFUN call. 2185 ALIGN is maximum alignment we can assume. Return nonzero if a 2186 call to store_by_pieces should succeed. */ 2187 2188int 2189can_store_by_pieces (unsigned HOST_WIDE_INT len, 2190 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), 2191 void *constfundata, unsigned int align) 2192{ 2193 unsigned HOST_WIDE_INT l; 2194 unsigned int max_size; 2195 HOST_WIDE_INT offset = 0; 2196 enum machine_mode mode, tmode; 2197 enum insn_code icode; 2198 int reverse; 2199 rtx cst; 2200 2201 if (len == 0) 2202 return 1; 2203 2204 if (! STORE_BY_PIECES_P (len, align)) 2205 return 0; 2206 2207 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); 2208 if (align >= GET_MODE_ALIGNMENT (tmode)) 2209 align = GET_MODE_ALIGNMENT (tmode); 2210 else 2211 { 2212 enum machine_mode xmode; 2213 2214 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; 2215 tmode != VOIDmode; 2216 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) 2217 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES 2218 || SLOW_UNALIGNED_ACCESS (tmode, align)) 2219 break; 2220 2221 align = MAX (align, GET_MODE_ALIGNMENT (xmode)); 2222 } 2223 2224 /* We would first store what we can in the largest integer mode, then go to 2225 successively smaller modes. */ 2226 2227 for (reverse = 0; 2228 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT); 2229 reverse++) 2230 { 2231 l = len; 2232 mode = VOIDmode; 2233 max_size = STORE_MAX_PIECES + 1; 2234 while (max_size > 1) 2235 { 2236 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 2237 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 2238 if (GET_MODE_SIZE (tmode) < max_size) 2239 mode = tmode; 2240 2241 if (mode == VOIDmode) 2242 break; 2243 2244 icode = mov_optab->handlers[(int) mode].insn_code; 2245 if (icode != CODE_FOR_nothing 2246 && align >= GET_MODE_ALIGNMENT (mode)) 2247 { 2248 unsigned int size = GET_MODE_SIZE (mode); 2249 2250 while (l >= size) 2251 { 2252 if (reverse) 2253 offset -= size; 2254 2255 cst = (*constfun) (constfundata, offset, mode); 2256 if (!LEGITIMATE_CONSTANT_P (cst)) 2257 return 0; 2258 2259 if (!reverse) 2260 offset += size; 2261 2262 l -= size; 2263 } 2264 } 2265 2266 max_size = GET_MODE_SIZE (mode); 2267 } 2268 2269 /* The code above should have handled everything. */ 2270 gcc_assert (!l); 2271 } 2272 2273 return 1; 2274} 2275 2276/* Generate several move instructions to store LEN bytes generated by 2277 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a 2278 pointer which will be passed as argument in every CONSTFUN call. 2279 ALIGN is maximum alignment we can assume. 2280 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala 2281 mempcpy, and if ENDP is 2 return memory the end minus one byte ala 2282 stpcpy. */ 2283 2284rtx 2285store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, 2286 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), 2287 void *constfundata, unsigned int align, int endp) 2288{ 2289 struct store_by_pieces data; 2290 2291 if (len == 0) 2292 { 2293 gcc_assert (endp != 2); 2294 return to; 2295 } 2296 2297 gcc_assert (STORE_BY_PIECES_P (len, align)); 2298 data.constfun = constfun; 2299 data.constfundata = constfundata; 2300 data.len = len; 2301 data.to = to; 2302 store_by_pieces_1 (&data, align); 2303 if (endp) 2304 { 2305 rtx to1; 2306 2307 gcc_assert (!data.reverse); 2308 if (data.autinc_to) 2309 { 2310 if (endp == 2) 2311 { 2312 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) 2313 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); 2314 else 2315 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr, 2316 -1)); 2317 } 2318 to1 = adjust_automodify_address (data.to, QImode, data.to_addr, 2319 data.offset); 2320 } 2321 else 2322 { 2323 if (endp == 2) 2324 --data.offset; 2325 to1 = adjust_address (data.to, QImode, data.offset); 2326 } 2327 return to1; 2328 } 2329 else 2330 return data.to; 2331} 2332 2333/* Generate several move instructions to clear LEN bytes of block TO. (A MEM 2334 rtx with BLKmode). ALIGN is maximum alignment we can assume. */ 2335 2336static void 2337clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align) 2338{ 2339 struct store_by_pieces data; 2340 2341 if (len == 0) 2342 return; 2343 2344 data.constfun = clear_by_pieces_1; 2345 data.constfundata = NULL; 2346 data.len = len; 2347 data.to = to; 2348 store_by_pieces_1 (&data, align); 2349} 2350 2351/* Callback routine for clear_by_pieces. 2352 Return const0_rtx unconditionally. */ 2353 2354static rtx 2355clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED, 2356 HOST_WIDE_INT offset ATTRIBUTE_UNUSED, 2357 enum machine_mode mode ATTRIBUTE_UNUSED) 2358{ 2359 return const0_rtx; 2360} 2361 2362/* Subroutine of clear_by_pieces and store_by_pieces. 2363 Generate several move instructions to store LEN bytes of block TO. (A MEM 2364 rtx with BLKmode). ALIGN is maximum alignment we can assume. */ 2365 2366static void 2367store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, 2368 unsigned int align ATTRIBUTE_UNUSED) 2369{ 2370 rtx to_addr = XEXP (data->to, 0); 2371 unsigned int max_size = STORE_MAX_PIECES + 1; 2372 enum machine_mode mode = VOIDmode, tmode; 2373 enum insn_code icode; 2374 2375 data->offset = 0; 2376 data->to_addr = to_addr; 2377 data->autinc_to 2378 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC 2379 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); 2380 2381 data->explicit_inc_to = 0; 2382 data->reverse 2383 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); 2384 if (data->reverse) 2385 data->offset = data->len; 2386 2387 /* If storing requires more than two move insns, 2388 copy addresses to registers (to make displacements shorter) 2389 and use post-increment if available. */ 2390 if (!data->autinc_to 2391 && move_by_pieces_ninsns (data->len, align, max_size) > 2) 2392 { 2393 /* Determine the main mode we'll be using. */ 2394 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 2395 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 2396 if (GET_MODE_SIZE (tmode) < max_size) 2397 mode = tmode; 2398 2399 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) 2400 { 2401 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len)); 2402 data->autinc_to = 1; 2403 data->explicit_inc_to = -1; 2404 } 2405 2406 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse 2407 && ! data->autinc_to) 2408 { 2409 data->to_addr = copy_addr_to_reg (to_addr); 2410 data->autinc_to = 1; 2411 data->explicit_inc_to = 1; 2412 } 2413 2414 if ( !data->autinc_to && CONSTANT_P (to_addr)) 2415 data->to_addr = copy_addr_to_reg (to_addr); 2416 } 2417 2418 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); 2419 if (align >= GET_MODE_ALIGNMENT (tmode)) 2420 align = GET_MODE_ALIGNMENT (tmode); 2421 else 2422 { 2423 enum machine_mode xmode; 2424 2425 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; 2426 tmode != VOIDmode; 2427 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) 2428 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES 2429 || SLOW_UNALIGNED_ACCESS (tmode, align)) 2430 break; 2431 2432 align = MAX (align, GET_MODE_ALIGNMENT (xmode)); 2433 } 2434 2435 /* First store what we can in the largest integer mode, then go to 2436 successively smaller modes. */ 2437 2438 while (max_size > 1) 2439 { 2440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 2441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 2442 if (GET_MODE_SIZE (tmode) < max_size) 2443 mode = tmode; 2444 2445 if (mode == VOIDmode) 2446 break; 2447 2448 icode = mov_optab->handlers[(int) mode].insn_code; 2449 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) 2450 store_by_pieces_2 (GEN_FCN (icode), mode, data); 2451 2452 max_size = GET_MODE_SIZE (mode); 2453 } 2454 2455 /* The code above should have handled everything. */ 2456 gcc_assert (!data->len); 2457} 2458 2459/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate 2460 with move instructions for mode MODE. GENFUN is the gen_... function 2461 to make a move insn for that mode. DATA has all the other info. */ 2462 2463static void 2464store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode, 2465 struct store_by_pieces *data) 2466{ 2467 unsigned int size = GET_MODE_SIZE (mode); 2468 rtx to1, cst; 2469 2470 while (data->len >= size) 2471 { 2472 if (data->reverse) 2473 data->offset -= size; 2474 2475 if (data->autinc_to) 2476 to1 = adjust_automodify_address (data->to, mode, data->to_addr, 2477 data->offset); 2478 else 2479 to1 = adjust_address (data->to, mode, data->offset); 2480 2481 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) 2482 emit_insn (gen_add2_insn (data->to_addr, 2483 GEN_INT (-(HOST_WIDE_INT) size))); 2484 2485 cst = (*data->constfun) (data->constfundata, data->offset, mode); 2486 emit_insn ((*genfun) (to1, cst)); 2487 2488 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) 2489 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); 2490 2491 if (! data->reverse) 2492 data->offset += size; 2493 2494 data->len -= size; 2495 } 2496} 2497 2498/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is 2499 its length in bytes. */ 2500 2501rtx 2502clear_storage (rtx object, rtx size, enum block_op_methods method) 2503{ 2504 enum machine_mode mode = GET_MODE (object); 2505 unsigned int align; 2506 2507 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL); 2508 2509 /* If OBJECT is not BLKmode and SIZE is the same size as its mode, 2510 just move a zero. Otherwise, do this a piece at a time. */ 2511 if (mode != BLKmode 2512 && GET_CODE (size) == CONST_INT 2513 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode)) 2514 { 2515 rtx zero = CONST0_RTX (mode); 2516 if (zero != NULL) 2517 { 2518 emit_move_insn (object, zero); 2519 return NULL; 2520 } 2521 2522 if (COMPLEX_MODE_P (mode)) 2523 { 2524 zero = CONST0_RTX (GET_MODE_INNER (mode)); 2525 if (zero != NULL) 2526 { 2527 write_complex_part (object, zero, 0); 2528 write_complex_part (object, zero, 1); 2529 return NULL; 2530 } 2531 } 2532 } 2533 2534 if (size == const0_rtx) 2535 return NULL; 2536 2537 align = MEM_ALIGN (object); 2538 2539 if (GET_CODE (size) == CONST_INT 2540 && CLEAR_BY_PIECES_P (INTVAL (size), align)) 2541 clear_by_pieces (object, INTVAL (size), align); 2542 else if (set_storage_via_setmem (object, size, const0_rtx, align)) 2543 ; 2544 else 2545 return clear_storage_via_libcall (object, size, 2546 method == BLOCK_OP_TAILCALL); 2547 2548 return NULL; 2549} 2550 2551/* A subroutine of clear_storage. Expand a call to memset. 2552 Return the return value of memset, 0 otherwise. */ 2553 2554static rtx 2555clear_storage_via_libcall (rtx object, rtx size, bool tailcall) 2556{ 2557 tree call_expr, arg_list, fn, object_tree, size_tree; 2558 enum machine_mode size_mode; 2559 rtx retval; 2560 2561 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then 2562 place those into new pseudos into a VAR_DECL and use them later. */ 2563 2564 object = copy_to_mode_reg (Pmode, XEXP (object, 0)); 2565 2566 size_mode = TYPE_MODE (sizetype); 2567 size = convert_to_mode (size_mode, size, 1); 2568 size = copy_to_mode_reg (size_mode, size); 2569 2570 /* It is incorrect to use the libcall calling conventions to call 2571 memset in this context. This could be a user call to memset and 2572 the user may wish to examine the return value from memset. For 2573 targets where libcalls and normal calls have different conventions 2574 for returning pointers, we could end up generating incorrect code. */ 2575 2576 object_tree = make_tree (ptr_type_node, object); 2577 size_tree = make_tree (sizetype, size); 2578 2579 fn = clear_storage_libcall_fn (true); 2580 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE); 2581 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list); 2582 arg_list = tree_cons (NULL_TREE, object_tree, arg_list); 2583 2584 /* Now we have to build up the CALL_EXPR itself. */ 2585 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); 2586 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), 2587 call_expr, arg_list, NULL_TREE); 2588 CALL_EXPR_TAILCALL (call_expr) = tailcall; 2589 2590 retval = expand_normal (call_expr); 2591 2592 return retval; 2593} 2594 2595/* A subroutine of clear_storage_via_libcall. Create the tree node 2596 for the function we use for block clears. The first time FOR_CALL 2597 is true, we call assemble_external. */ 2598 2599static GTY(()) tree block_clear_fn; 2600 2601void 2602init_block_clear_fn (const char *asmspec) 2603{ 2604 if (!block_clear_fn) 2605 { 2606 tree fn, args; 2607 2608 fn = get_identifier ("memset"); 2609 args = build_function_type_list (ptr_type_node, ptr_type_node, 2610 integer_type_node, sizetype, 2611 NULL_TREE); 2612 2613 fn = build_decl (FUNCTION_DECL, fn, args); 2614 DECL_EXTERNAL (fn) = 1; 2615 TREE_PUBLIC (fn) = 1; 2616 DECL_ARTIFICIAL (fn) = 1; 2617 TREE_NOTHROW (fn) = 1; 2618 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT; 2619 DECL_VISIBILITY_SPECIFIED (fn) = 1; 2620 2621 block_clear_fn = fn; 2622 } 2623 2624 if (asmspec) 2625 set_user_assembler_name (block_clear_fn, asmspec); 2626} 2627 2628static tree 2629clear_storage_libcall_fn (int for_call) 2630{ 2631 static bool emitted_extern; 2632 2633 if (!block_clear_fn) 2634 init_block_clear_fn (NULL); 2635 2636 if (for_call && !emitted_extern) 2637 { 2638 emitted_extern = true; 2639 make_decl_rtl (block_clear_fn); 2640 assemble_external (block_clear_fn); 2641 } 2642 2643 return block_clear_fn; 2644} 2645 2646/* Expand a setmem pattern; return true if successful. */ 2647 2648bool 2649set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align) 2650{ 2651 /* Try the most limited insn first, because there's no point 2652 including more than one in the machine description unless 2653 the more limited one has some advantage. */ 2654 2655 rtx opalign = GEN_INT (align / BITS_PER_UNIT); 2656 enum machine_mode mode; 2657 2658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; 2659 mode = GET_MODE_WIDER_MODE (mode)) 2660 { 2661 enum insn_code code = setmem_optab[(int) mode]; 2662 insn_operand_predicate_fn pred; 2663 2664 if (code != CODE_FOR_nothing 2665 /* We don't need MODE to be narrower than 2666 BITS_PER_HOST_WIDE_INT here because if SIZE is less than 2667 the mode mask, as it is returned by the macro, it will 2668 definitely be less than the actual mode mask. */ 2669 && ((GET_CODE (size) == CONST_INT 2670 && ((unsigned HOST_WIDE_INT) INTVAL (size) 2671 <= (GET_MODE_MASK (mode) >> 1))) 2672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) 2673 && ((pred = insn_data[(int) code].operand[0].predicate) == 0 2674 || (*pred) (object, BLKmode)) 2675 && ((pred = insn_data[(int) code].operand[3].predicate) == 0 2676 || (*pred) (opalign, VOIDmode))) 2677 { 2678 rtx opsize, opchar; 2679 enum machine_mode char_mode; 2680 rtx last = get_last_insn (); 2681 rtx pat; 2682 2683 opsize = convert_to_mode (mode, size, 1); 2684 pred = insn_data[(int) code].operand[1].predicate; 2685 if (pred != 0 && ! (*pred) (opsize, mode)) 2686 opsize = copy_to_mode_reg (mode, opsize); 2687 2688 opchar = val; 2689 char_mode = insn_data[(int) code].operand[2].mode; 2690 if (char_mode != VOIDmode) 2691 { 2692 opchar = convert_to_mode (char_mode, opchar, 1); 2693 pred = insn_data[(int) code].operand[2].predicate; 2694 if (pred != 0 && ! (*pred) (opchar, char_mode)) 2695 opchar = copy_to_mode_reg (char_mode, opchar); 2696 } 2697 2698 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign); 2699 if (pat) 2700 { 2701 emit_insn (pat); 2702 return true; 2703 } 2704 else 2705 delete_insns_since (last); 2706 } 2707 } 2708 2709 return false; 2710} 2711 2712 2713/* Write to one of the components of the complex value CPLX. Write VAL to 2714 the real part if IMAG_P is false, and the imaginary part if its true. */ 2715 2716static void 2717write_complex_part (rtx cplx, rtx val, bool imag_p) 2718{ 2719 enum machine_mode cmode; 2720 enum machine_mode imode; 2721 unsigned ibitsize; 2722 2723 if (GET_CODE (cplx) == CONCAT) 2724 { 2725 emit_move_insn (XEXP (cplx, imag_p), val); 2726 return; 2727 } 2728 2729 cmode = GET_MODE (cplx); 2730 imode = GET_MODE_INNER (cmode); 2731 ibitsize = GET_MODE_BITSIZE (imode); 2732 2733 /* For MEMs simplify_gen_subreg may generate an invalid new address 2734 because, e.g., the original address is considered mode-dependent 2735 by the target, which restricts simplify_subreg from invoking 2736 adjust_address_nv. Instead of preparing fallback support for an 2737 invalid address, we call adjust_address_nv directly. */ 2738 if (MEM_P (cplx)) 2739 { 2740 emit_move_insn (adjust_address_nv (cplx, imode, 2741 imag_p ? GET_MODE_SIZE (imode) : 0), 2742 val); 2743 return; 2744 } 2745 2746 /* If the sub-object is at least word sized, then we know that subregging 2747 will work. This special case is important, since store_bit_field 2748 wants to operate on integer modes, and there's rarely an OImode to 2749 correspond to TCmode. */ 2750 if (ibitsize >= BITS_PER_WORD 2751 /* For hard regs we have exact predicates. Assume we can split 2752 the original object if it spans an even number of hard regs. 2753 This special case is important for SCmode on 64-bit platforms 2754 where the natural size of floating-point regs is 32-bit. */ 2755 || (REG_P (cplx) 2756 && REGNO (cplx) < FIRST_PSEUDO_REGISTER 2757 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)) 2758 { 2759 rtx part = simplify_gen_subreg (imode, cplx, cmode, 2760 imag_p ? GET_MODE_SIZE (imode) : 0); 2761 if (part) 2762 { 2763 emit_move_insn (part, val); 2764 return; 2765 } 2766 else 2767 /* simplify_gen_subreg may fail for sub-word MEMs. */ 2768 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD); 2769 } 2770 2771 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val); 2772} 2773 2774/* Extract one of the components of the complex value CPLX. Extract the 2775 real part if IMAG_P is false, and the imaginary part if it's true. */ 2776 2777static rtx 2778read_complex_part (rtx cplx, bool imag_p) 2779{ 2780 enum machine_mode cmode, imode; 2781 unsigned ibitsize; 2782 2783 if (GET_CODE (cplx) == CONCAT) 2784 return XEXP (cplx, imag_p); 2785 2786 cmode = GET_MODE (cplx); 2787 imode = GET_MODE_INNER (cmode); 2788 ibitsize = GET_MODE_BITSIZE (imode); 2789 2790 /* Special case reads from complex constants that got spilled to memory. */ 2791 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF) 2792 { 2793 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0)); 2794 if (decl && TREE_CODE (decl) == COMPLEX_CST) 2795 { 2796 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl); 2797 if (CONSTANT_CLASS_P (part)) 2798 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL); 2799 } 2800 } 2801 2802 /* For MEMs simplify_gen_subreg may generate an invalid new address 2803 because, e.g., the original address is considered mode-dependent 2804 by the target, which restricts simplify_subreg from invoking 2805 adjust_address_nv. Instead of preparing fallback support for an 2806 invalid address, we call adjust_address_nv directly. */ 2807 if (MEM_P (cplx)) 2808 return adjust_address_nv (cplx, imode, 2809 imag_p ? GET_MODE_SIZE (imode) : 0); 2810 2811 /* If the sub-object is at least word sized, then we know that subregging 2812 will work. This special case is important, since extract_bit_field 2813 wants to operate on integer modes, and there's rarely an OImode to 2814 correspond to TCmode. */ 2815 if (ibitsize >= BITS_PER_WORD 2816 /* For hard regs we have exact predicates. Assume we can split 2817 the original object if it spans an even number of hard regs. 2818 This special case is important for SCmode on 64-bit platforms 2819 where the natural size of floating-point regs is 32-bit. */ 2820 || (REG_P (cplx) 2821 && REGNO (cplx) < FIRST_PSEUDO_REGISTER 2822 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)) 2823 { 2824 rtx ret = simplify_gen_subreg (imode, cplx, cmode, 2825 imag_p ? GET_MODE_SIZE (imode) : 0); 2826 if (ret) 2827 return ret; 2828 else 2829 /* simplify_gen_subreg may fail for sub-word MEMs. */ 2830 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD); 2831 } 2832 2833 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 2834 true, NULL_RTX, imode, imode); 2835} 2836 2837/* A subroutine of emit_move_insn_1. Yet another lowpart generator. 2838 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be 2839 represented in NEW_MODE. If FORCE is true, this will never happen, as 2840 we'll force-create a SUBREG if needed. */ 2841 2842static rtx 2843emit_move_change_mode (enum machine_mode new_mode, 2844 enum machine_mode old_mode, rtx x, bool force) 2845{ 2846 rtx ret; 2847 2848 if (MEM_P (x)) 2849 { 2850 /* We don't have to worry about changing the address since the 2851 size in bytes is supposed to be the same. */ 2852 if (reload_in_progress) 2853 { 2854 /* Copy the MEM to change the mode and move any 2855 substitutions from the old MEM to the new one. */ 2856 ret = adjust_address_nv (x, new_mode, 0); 2857 copy_replacements (x, ret); 2858 } 2859 else 2860 ret = adjust_address (x, new_mode, 0); 2861 } 2862 else 2863 { 2864 /* Note that we do want simplify_subreg's behavior of validating 2865 that the new mode is ok for a hard register. If we were to use 2866 simplify_gen_subreg, we would create the subreg, but would 2867 probably run into the target not being able to implement it. */ 2868 /* Except, of course, when FORCE is true, when this is exactly what 2869 we want. Which is needed for CCmodes on some targets. */ 2870 if (force) 2871 ret = simplify_gen_subreg (new_mode, x, old_mode, 0); 2872 else 2873 ret = simplify_subreg (new_mode, x, old_mode, 0); 2874 } 2875 2876 return ret; 2877} 2878 2879/* A subroutine of emit_move_insn_1. Generate a move from Y into X using 2880 an integer mode of the same size as MODE. Returns the instruction 2881 emitted, or NULL if such a move could not be generated. */ 2882 2883static rtx 2884emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force) 2885{ 2886 enum machine_mode imode; 2887 enum insn_code code; 2888 2889 /* There must exist a mode of the exact size we require. */ 2890 imode = int_mode_for_mode (mode); 2891 if (imode == BLKmode) 2892 return NULL_RTX; 2893 2894 /* The target must support moves in this mode. */ 2895 code = mov_optab->handlers[imode].insn_code; 2896 if (code == CODE_FOR_nothing) 2897 return NULL_RTX; 2898 2899 x = emit_move_change_mode (imode, mode, x, force); 2900 if (x == NULL_RTX) 2901 return NULL_RTX; 2902 y = emit_move_change_mode (imode, mode, y, force); 2903 if (y == NULL_RTX) 2904 return NULL_RTX; 2905 return emit_insn (GEN_FCN (code) (x, y)); 2906} 2907 2908/* A subroutine of emit_move_insn_1. X is a push_operand in MODE. 2909 Return an equivalent MEM that does not use an auto-increment. */ 2910 2911static rtx 2912emit_move_resolve_push (enum machine_mode mode, rtx x) 2913{ 2914 enum rtx_code code = GET_CODE (XEXP (x, 0)); 2915 HOST_WIDE_INT adjust; 2916 rtx temp; 2917 2918 adjust = GET_MODE_SIZE (mode); 2919#ifdef PUSH_ROUNDING 2920 adjust = PUSH_ROUNDING (adjust); 2921#endif 2922 if (code == PRE_DEC || code == POST_DEC) 2923 adjust = -adjust; 2924 else if (code == PRE_MODIFY || code == POST_MODIFY) 2925 { 2926 rtx expr = XEXP (XEXP (x, 0), 1); 2927 HOST_WIDE_INT val; 2928 2929 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS); 2930 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT); 2931 val = INTVAL (XEXP (expr, 1)); 2932 if (GET_CODE (expr) == MINUS) 2933 val = -val; 2934 gcc_assert (adjust == val || adjust == -val); 2935 adjust = val; 2936 } 2937 2938 /* Do not use anti_adjust_stack, since we don't want to update 2939 stack_pointer_delta. */ 2940 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx, 2941 GEN_INT (adjust), stack_pointer_rtx, 2942 0, OPTAB_LIB_WIDEN); 2943 if (temp != stack_pointer_rtx) 2944 emit_move_insn (stack_pointer_rtx, temp); 2945 2946 switch (code) 2947 { 2948 case PRE_INC: 2949 case PRE_DEC: 2950 case PRE_MODIFY: 2951 temp = stack_pointer_rtx; 2952 break; 2953 case POST_INC: 2954 case POST_DEC: 2955 case POST_MODIFY: 2956 temp = plus_constant (stack_pointer_rtx, -adjust); 2957 break; 2958 default: 2959 gcc_unreachable (); 2960 } 2961 2962 return replace_equiv_address (x, temp); 2963} 2964 2965/* A subroutine of emit_move_complex. Generate a move from Y into X. 2966 X is known to satisfy push_operand, and MODE is known to be complex. 2967 Returns the last instruction emitted. */ 2968 2969static rtx 2970emit_move_complex_push (enum machine_mode mode, rtx x, rtx y) 2971{ 2972 enum machine_mode submode = GET_MODE_INNER (mode); 2973 bool imag_first; 2974 2975#ifdef PUSH_ROUNDING 2976 unsigned int submodesize = GET_MODE_SIZE (submode); 2977 2978 /* In case we output to the stack, but the size is smaller than the 2979 machine can push exactly, we need to use move instructions. */ 2980 if (PUSH_ROUNDING (submodesize) != submodesize) 2981 { 2982 x = emit_move_resolve_push (mode, x); 2983 return emit_move_insn (x, y); 2984 } 2985#endif 2986 2987 /* Note that the real part always precedes the imag part in memory 2988 regardless of machine's endianness. */ 2989 switch (GET_CODE (XEXP (x, 0))) 2990 { 2991 case PRE_DEC: 2992 case POST_DEC: 2993 imag_first = true; 2994 break; 2995 case PRE_INC: 2996 case POST_INC: 2997 imag_first = false; 2998 break; 2999 default: 3000 gcc_unreachable (); 3001 } 3002 3003 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), 3004 read_complex_part (y, imag_first)); 3005 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), 3006 read_complex_part (y, !imag_first)); 3007} 3008 3009/* A subroutine of emit_move_insn_1. Generate a move from Y into X. 3010 MODE is known to be complex. Returns the last instruction emitted. */ 3011 3012static rtx 3013emit_move_complex (enum machine_mode mode, rtx x, rtx y) 3014{ 3015 bool try_int; 3016 3017 /* Need to take special care for pushes, to maintain proper ordering 3018 of the data, and possibly extra padding. */ 3019 if (push_operand (x, mode)) 3020 return emit_move_complex_push (mode, x, y); 3021 3022 /* See if we can coerce the target into moving both values at once. */ 3023 3024 /* Move floating point as parts. */ 3025 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT 3026 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing) 3027 try_int = false; 3028 /* Not possible if the values are inherently not adjacent. */ 3029 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT) 3030 try_int = false; 3031 /* Is possible if both are registers (or subregs of registers). */ 3032 else if (register_operand (x, mode) && register_operand (y, mode)) 3033 try_int = true; 3034 /* If one of the operands is a memory, and alignment constraints 3035 are friendly enough, we may be able to do combined memory operations. 3036 We do not attempt this if Y is a constant because that combination is 3037 usually better with the by-parts thing below. */ 3038 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y)) 3039 && (!STRICT_ALIGNMENT 3040 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT)) 3041 try_int = true; 3042 else 3043 try_int = false; 3044 3045 if (try_int) 3046 { 3047 rtx ret; 3048 3049 /* For memory to memory moves, optimal behavior can be had with the 3050 existing block move logic. */ 3051 if (MEM_P (x) && MEM_P (y)) 3052 { 3053 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)), 3054 BLOCK_OP_NO_LIBCALL); 3055 return get_last_insn (); 3056 } 3057 3058 ret = emit_move_via_integer (mode, x, y, true); 3059 if (ret) 3060 return ret; 3061 } 3062 3063 /* Show the output dies here. This is necessary for SUBREGs 3064 of pseudos since we cannot track their lifetimes correctly; 3065 hard regs shouldn't appear here except as return values. */ 3066 if (!reload_completed && !reload_in_progress 3067 && REG_P (x) && !reg_overlap_mentioned_p (x, y)) 3068 emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); 3069 3070 write_complex_part (x, read_complex_part (y, false), false); 3071 write_complex_part (x, read_complex_part (y, true), true); 3072 return get_last_insn (); 3073} 3074 3075/* A subroutine of emit_move_insn_1. Generate a move from Y into X. 3076 MODE is known to be MODE_CC. Returns the last instruction emitted. */ 3077 3078static rtx 3079emit_move_ccmode (enum machine_mode mode, rtx x, rtx y) 3080{ 3081 rtx ret; 3082 3083 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */ 3084 if (mode != CCmode) 3085 { 3086 enum insn_code code = mov_optab->handlers[CCmode].insn_code; 3087 if (code != CODE_FOR_nothing) 3088 { 3089 x = emit_move_change_mode (CCmode, mode, x, true); 3090 y = emit_move_change_mode (CCmode, mode, y, true); 3091 return emit_insn (GEN_FCN (code) (x, y)); 3092 } 3093 } 3094 3095 /* Otherwise, find the MODE_INT mode of the same width. */ 3096 ret = emit_move_via_integer (mode, x, y, false); 3097 gcc_assert (ret != NULL); 3098 return ret; 3099} 3100 3101/* Return true if word I of OP lies entirely in the 3102 undefined bits of a paradoxical subreg. */ 3103 3104static bool 3105undefined_operand_subword_p (rtx op, int i) 3106{ 3107 enum machine_mode innermode, innermostmode; 3108 int offset; 3109 if (GET_CODE (op) != SUBREG) 3110 return false; 3111 innermode = GET_MODE (op); 3112 innermostmode = GET_MODE (SUBREG_REG (op)); 3113 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op); 3114 /* The SUBREG_BYTE represents offset, as if the value were stored in 3115 memory, except for a paradoxical subreg where we define 3116 SUBREG_BYTE to be 0; undo this exception as in 3117 simplify_subreg. */ 3118 if (SUBREG_BYTE (op) == 0 3119 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode)) 3120 { 3121 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode)); 3122 if (WORDS_BIG_ENDIAN) 3123 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; 3124 if (BYTES_BIG_ENDIAN) 3125 offset += difference % UNITS_PER_WORD; 3126 } 3127 if (offset >= GET_MODE_SIZE (innermostmode) 3128 || offset <= -GET_MODE_SIZE (word_mode)) 3129 return true; 3130 return false; 3131} 3132 3133/* A subroutine of emit_move_insn_1. Generate a move from Y into X. 3134 MODE is any multi-word or full-word mode that lacks a move_insn 3135 pattern. Note that you will get better code if you define such 3136 patterns, even if they must turn into multiple assembler instructions. */ 3137 3138static rtx 3139emit_move_multi_word (enum machine_mode mode, rtx x, rtx y) 3140{ 3141 rtx last_insn = 0; 3142 rtx seq, inner; 3143 bool need_clobber; 3144 int i; 3145 3146 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD); 3147 3148 /* If X is a push on the stack, do the push now and replace 3149 X with a reference to the stack pointer. */ 3150 if (push_operand (x, mode)) 3151 x = emit_move_resolve_push (mode, x); 3152 3153 /* If we are in reload, see if either operand is a MEM whose address 3154 is scheduled for replacement. */ 3155 if (reload_in_progress && MEM_P (x) 3156 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0)) 3157 x = replace_equiv_address_nv (x, inner); 3158 if (reload_in_progress && MEM_P (y) 3159 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0)) 3160 y = replace_equiv_address_nv (y, inner); 3161 3162 start_sequence (); 3163 3164 need_clobber = false; 3165 for (i = 0; 3166 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; 3167 i++) 3168 { 3169 rtx xpart = operand_subword (x, i, 1, mode); 3170 rtx ypart; 3171 3172 /* Do not generate code for a move if it would come entirely 3173 from the undefined bits of a paradoxical subreg. */ 3174 if (undefined_operand_subword_p (y, i)) 3175 continue; 3176 3177 ypart = operand_subword (y, i, 1, mode); 3178 3179 /* If we can't get a part of Y, put Y into memory if it is a 3180 constant. Otherwise, force it into a register. Then we must 3181 be able to get a part of Y. */ 3182 if (ypart == 0 && CONSTANT_P (y)) 3183 { 3184 y = use_anchored_address (force_const_mem (mode, y)); 3185 ypart = operand_subword (y, i, 1, mode); 3186 } 3187 else if (ypart == 0) 3188 ypart = operand_subword_force (y, i, mode); 3189 3190 gcc_assert (xpart && ypart); 3191 3192 need_clobber |= (GET_CODE (xpart) == SUBREG); 3193 3194 last_insn = emit_move_insn (xpart, ypart); 3195 } 3196 3197 seq = get_insns (); 3198 end_sequence (); 3199 3200 /* Show the output dies here. This is necessary for SUBREGs 3201 of pseudos since we cannot track their lifetimes correctly; 3202 hard regs shouldn't appear here except as return values. 3203 We never want to emit such a clobber after reload. */ 3204 if (x != y 3205 && ! (reload_in_progress || reload_completed) 3206 && need_clobber != 0) 3207 emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); 3208 3209 emit_insn (seq); 3210 3211 return last_insn; 3212} 3213 3214/* Low level part of emit_move_insn. 3215 Called just like emit_move_insn, but assumes X and Y 3216 are basically valid. */ 3217 3218rtx 3219emit_move_insn_1 (rtx x, rtx y) 3220{ 3221 enum machine_mode mode = GET_MODE (x); 3222 enum insn_code code; 3223 3224 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE); 3225 3226 code = mov_optab->handlers[mode].insn_code; 3227 if (code != CODE_FOR_nothing) 3228 return emit_insn (GEN_FCN (code) (x, y)); 3229 3230 /* Expand complex moves by moving real part and imag part. */ 3231 if (COMPLEX_MODE_P (mode)) 3232 return emit_move_complex (mode, x, y); 3233 3234 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT) 3235 { 3236 rtx result = emit_move_via_integer (mode, x, y, true); 3237 3238 /* If we can't find an integer mode, use multi words. */ 3239 if (result) 3240 return result; 3241 else 3242 return emit_move_multi_word (mode, x, y); 3243 } 3244 3245 if (GET_MODE_CLASS (mode) == MODE_CC) 3246 return emit_move_ccmode (mode, x, y); 3247 3248 /* Try using a move pattern for the corresponding integer mode. This is 3249 only safe when simplify_subreg can convert MODE constants into integer 3250 constants. At present, it can only do this reliably if the value 3251 fits within a HOST_WIDE_INT. */ 3252 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) 3253 { 3254 rtx ret = emit_move_via_integer (mode, x, y, false); 3255 if (ret) 3256 return ret; 3257 } 3258 3259 return emit_move_multi_word (mode, x, y); 3260} 3261 3262/* Generate code to copy Y into X. 3263 Both Y and X must have the same mode, except that 3264 Y can be a constant with VOIDmode. 3265 This mode cannot be BLKmode; use emit_block_move for that. 3266 3267 Return the last instruction emitted. */ 3268 3269rtx 3270emit_move_insn (rtx x, rtx y) 3271{ 3272 enum machine_mode mode = GET_MODE (x); 3273 rtx y_cst = NULL_RTX; 3274 rtx last_insn, set; 3275 3276 gcc_assert (mode != BLKmode 3277 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode)); 3278 3279 if (CONSTANT_P (y)) 3280 { 3281 if (optimize 3282 && SCALAR_FLOAT_MODE_P (GET_MODE (x)) 3283 && (last_insn = compress_float_constant (x, y))) 3284 return last_insn; 3285 3286 y_cst = y; 3287 3288 if (!LEGITIMATE_CONSTANT_P (y)) 3289 { 3290 y = force_const_mem (mode, y); 3291 3292 /* If the target's cannot_force_const_mem prevented the spill, 3293 assume that the target's move expanders will also take care 3294 of the non-legitimate constant. */ 3295 if (!y) 3296 y = y_cst; 3297 else 3298 y = use_anchored_address (y); 3299 } 3300 } 3301 3302 /* If X or Y are memory references, verify that their addresses are valid 3303 for the machine. */ 3304 if (MEM_P (x) 3305 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0)) 3306 && ! push_operand (x, GET_MODE (x))) 3307 || (flag_force_addr 3308 && CONSTANT_ADDRESS_P (XEXP (x, 0))))) 3309 x = validize_mem (x); 3310 3311 if (MEM_P (y) 3312 && (! memory_address_p (GET_MODE (y), XEXP (y, 0)) 3313 || (flag_force_addr 3314 && CONSTANT_ADDRESS_P (XEXP (y, 0))))) 3315 y = validize_mem (y); 3316 3317 gcc_assert (mode != BLKmode); 3318 3319 last_insn = emit_move_insn_1 (x, y); 3320 3321 if (y_cst && REG_P (x) 3322 && (set = single_set (last_insn)) != NULL_RTX 3323 && SET_DEST (set) == x 3324 && ! rtx_equal_p (y_cst, SET_SRC (set))) 3325 set_unique_reg_note (last_insn, REG_EQUAL, y_cst); 3326 3327 return last_insn; 3328} 3329 3330/* If Y is representable exactly in a narrower mode, and the target can 3331 perform the extension directly from constant or memory, then emit the 3332 move as an extension. */ 3333 3334static rtx 3335compress_float_constant (rtx x, rtx y) 3336{ 3337 enum machine_mode dstmode = GET_MODE (x); 3338 enum machine_mode orig_srcmode = GET_MODE (y); 3339 enum machine_mode srcmode; 3340 REAL_VALUE_TYPE r; 3341 int oldcost, newcost; 3342 3343 REAL_VALUE_FROM_CONST_DOUBLE (r, y); 3344 3345 if (LEGITIMATE_CONSTANT_P (y)) 3346 oldcost = rtx_cost (y, SET); 3347 else 3348 oldcost = rtx_cost (force_const_mem (dstmode, y), SET); 3349 3350 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode)); 3351 srcmode != orig_srcmode; 3352 srcmode = GET_MODE_WIDER_MODE (srcmode)) 3353 { 3354 enum insn_code ic; 3355 rtx trunc_y, last_insn; 3356 3357 /* Skip if the target can't extend this way. */ 3358 ic = can_extend_p (dstmode, srcmode, 0); 3359 if (ic == CODE_FOR_nothing) 3360 continue; 3361 3362 /* Skip if the narrowed value isn't exact. */ 3363 if (! exact_real_truncate (srcmode, &r)) 3364 continue; 3365 3366 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode); 3367 3368 if (LEGITIMATE_CONSTANT_P (trunc_y)) 3369 { 3370 /* Skip if the target needs extra instructions to perform 3371 the extension. */ 3372 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode)) 3373 continue; 3374 /* This is valid, but may not be cheaper than the original. */ 3375 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET); 3376 if (oldcost < newcost) 3377 continue; 3378 } 3379 else if (float_extend_from_mem[dstmode][srcmode]) 3380 { 3381 trunc_y = force_const_mem (srcmode, trunc_y); 3382 /* This is valid, but may not be cheaper than the original. */ 3383 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET); 3384 if (oldcost < newcost) 3385 continue; 3386 trunc_y = validize_mem (trunc_y); 3387 } 3388 else 3389 continue; 3390 3391 /* For CSE's benefit, force the compressed constant pool entry 3392 into a new pseudo. This constant may be used in different modes, 3393 and if not, combine will put things back together for us. */ 3394 trunc_y = force_reg (srcmode, trunc_y); 3395 emit_unop_insn (ic, x, trunc_y, UNKNOWN); 3396 last_insn = get_last_insn (); 3397 3398 if (REG_P (x)) 3399 set_unique_reg_note (last_insn, REG_EQUAL, y); 3400 3401 return last_insn; 3402 } 3403 3404 return NULL_RTX; 3405} 3406 3407/* Pushing data onto the stack. */ 3408 3409/* Push a block of length SIZE (perhaps variable) 3410 and return an rtx to address the beginning of the block. 3411 The value may be virtual_outgoing_args_rtx. 3412 3413 EXTRA is the number of bytes of padding to push in addition to SIZE. 3414 BELOW nonzero means this padding comes at low addresses; 3415 otherwise, the padding comes at high addresses. */ 3416 3417rtx 3418push_block (rtx size, int extra, int below) 3419{ 3420 rtx temp; 3421 3422 size = convert_modes (Pmode, ptr_mode, size, 1); 3423 if (CONSTANT_P (size)) 3424 anti_adjust_stack (plus_constant (size, extra)); 3425 else if (REG_P (size) && extra == 0) 3426 anti_adjust_stack (size); 3427 else 3428 { 3429 temp = copy_to_mode_reg (Pmode, size); 3430 if (extra != 0) 3431 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra), 3432 temp, 0, OPTAB_LIB_WIDEN); 3433 anti_adjust_stack (temp); 3434 } 3435 3436#ifndef STACK_GROWS_DOWNWARD 3437 if (0) 3438#else 3439 if (1) 3440#endif 3441 { 3442 temp = virtual_outgoing_args_rtx; 3443 if (extra != 0 && below) 3444 temp = plus_constant (temp, extra); 3445 } 3446 else 3447 { 3448 if (GET_CODE (size) == CONST_INT) 3449 temp = plus_constant (virtual_outgoing_args_rtx, 3450 -INTVAL (size) - (below ? 0 : extra)); 3451 else if (extra != 0 && !below) 3452 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, 3453 negate_rtx (Pmode, plus_constant (size, extra))); 3454 else 3455 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, 3456 negate_rtx (Pmode, size)); 3457 } 3458 3459 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp); 3460} 3461 3462#ifdef PUSH_ROUNDING 3463 3464/* Emit single push insn. */ 3465 3466static void 3467emit_single_push_insn (enum machine_mode mode, rtx x, tree type) 3468{ 3469 rtx dest_addr; 3470 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); 3471 rtx dest; 3472 enum insn_code icode; 3473 insn_operand_predicate_fn pred; 3474 3475 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); 3476 /* If there is push pattern, use it. Otherwise try old way of throwing 3477 MEM representing push operation to move expander. */ 3478 icode = push_optab->handlers[(int) mode].insn_code; 3479 if (icode != CODE_FOR_nothing) 3480 { 3481 if (((pred = insn_data[(int) icode].operand[0].predicate) 3482 && !((*pred) (x, mode)))) 3483 x = force_reg (mode, x); 3484 emit_insn (GEN_FCN (icode) (x)); 3485 return; 3486 } 3487 if (GET_MODE_SIZE (mode) == rounded_size) 3488 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); 3489 /* If we are to pad downward, adjust the stack pointer first and 3490 then store X into the stack location using an offset. This is 3491 because emit_move_insn does not know how to pad; it does not have 3492 access to type. */ 3493 else if (FUNCTION_ARG_PADDING (mode, type) == downward) 3494 { 3495 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode); 3496 HOST_WIDE_INT offset; 3497 3498 emit_move_insn (stack_pointer_rtx, 3499 expand_binop (Pmode, 3500#ifdef STACK_GROWS_DOWNWARD 3501 sub_optab, 3502#else 3503 add_optab, 3504#endif 3505 stack_pointer_rtx, 3506 GEN_INT (rounded_size), 3507 NULL_RTX, 0, OPTAB_LIB_WIDEN)); 3508 3509 offset = (HOST_WIDE_INT) padding_size; 3510#ifdef STACK_GROWS_DOWNWARD 3511 if (STACK_PUSH_CODE == POST_DEC) 3512 /* We have already decremented the stack pointer, so get the 3513 previous value. */ 3514 offset += (HOST_WIDE_INT) rounded_size; 3515#else 3516 if (STACK_PUSH_CODE == POST_INC) 3517 /* We have already incremented the stack pointer, so get the 3518 previous value. */ 3519 offset -= (HOST_WIDE_INT) rounded_size; 3520#endif 3521 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset)); 3522 } 3523 else 3524 { 3525#ifdef STACK_GROWS_DOWNWARD 3526 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */ 3527 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, 3528 GEN_INT (-(HOST_WIDE_INT) rounded_size)); 3529#else 3530 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */ 3531 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, 3532 GEN_INT (rounded_size)); 3533#endif 3534 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); 3535 } 3536 3537 dest = gen_rtx_MEM (mode, dest_addr); 3538 3539 if (type != 0) 3540 { 3541 set_mem_attributes (dest, type, 1); 3542 3543 if (flag_optimize_sibling_calls) 3544 /* Function incoming arguments may overlap with sibling call 3545 outgoing arguments and we cannot allow reordering of reads 3546 from function arguments with stores to outgoing arguments 3547 of sibling calls. */ 3548 set_mem_alias_set (dest, 0); 3549 } 3550 emit_move_insn (dest, x); 3551} 3552#endif 3553 3554/* Generate code to push X onto the stack, assuming it has mode MODE and 3555 type TYPE. 3556 MODE is redundant except when X is a CONST_INT (since they don't 3557 carry mode info). 3558 SIZE is an rtx for the size of data to be copied (in bytes), 3559 needed only if X is BLKmode. 3560 3561 ALIGN (in bits) is maximum alignment we can assume. 3562 3563 If PARTIAL and REG are both nonzero, then copy that many of the first 3564 bytes of X into registers starting with REG, and push the rest of X. 3565 The amount of space pushed is decreased by PARTIAL bytes. 3566 REG must be a hard register in this case. 3567 If REG is zero but PARTIAL is not, take any all others actions for an 3568 argument partially in registers, but do not actually load any 3569 registers. 3570 3571 EXTRA is the amount in bytes of extra space to leave next to this arg. 3572 This is ignored if an argument block has already been allocated. 3573 3574 On a machine that lacks real push insns, ARGS_ADDR is the address of 3575 the bottom of the argument block for this call. We use indexing off there 3576 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a 3577 argument block has not been preallocated. 3578 3579 ARGS_SO_FAR is the size of args previously pushed for this call. 3580 3581 REG_PARM_STACK_SPACE is nonzero if functions require stack space 3582 for arguments passed in registers. If nonzero, it will be the number 3583 of bytes required. */ 3584 3585void 3586emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, 3587 unsigned int align, int partial, rtx reg, int extra, 3588 rtx args_addr, rtx args_so_far, int reg_parm_stack_space, 3589 rtx alignment_pad) 3590{ 3591 rtx xinner; 3592 enum direction stack_direction 3593#ifdef STACK_GROWS_DOWNWARD 3594 = downward; 3595#else 3596 = upward; 3597#endif 3598 3599 /* Decide where to pad the argument: `downward' for below, 3600 `upward' for above, or `none' for don't pad it. 3601 Default is below for small data on big-endian machines; else above. */ 3602 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type); 3603 3604 /* Invert direction if stack is post-decrement. 3605 FIXME: why? */ 3606 if (STACK_PUSH_CODE == POST_DEC) 3607 if (where_pad != none) 3608 where_pad = (where_pad == downward ? upward : downward); 3609 3610 xinner = x; 3611 3612 if (mode == BLKmode) 3613 { 3614 /* Copy a block into the stack, entirely or partially. */ 3615 3616 rtx temp; 3617 int used; 3618 int offset; 3619 int skip; 3620 3621 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT); 3622 used = partial - offset; 3623 3624 gcc_assert (size); 3625 3626 /* USED is now the # of bytes we need not copy to the stack 3627 because registers will take care of them. */ 3628 3629 if (partial != 0) 3630 xinner = adjust_address (xinner, BLKmode, used); 3631 3632 /* If the partial register-part of the arg counts in its stack size, 3633 skip the part of stack space corresponding to the registers. 3634 Otherwise, start copying to the beginning of the stack space, 3635 by setting SKIP to 0. */ 3636 skip = (reg_parm_stack_space == 0) ? 0 : used; 3637 3638#ifdef PUSH_ROUNDING 3639 /* Do it with several push insns if that doesn't take lots of insns 3640 and if there is no difficulty with push insns that skip bytes 3641 on the stack for alignment purposes. */ 3642 if (args_addr == 0 3643 && PUSH_ARGS 3644 && GET_CODE (size) == CONST_INT 3645 && skip == 0 3646 && MEM_ALIGN (xinner) >= align 3647 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)) 3648 /* Here we avoid the case of a structure whose weak alignment 3649 forces many pushes of a small amount of data, 3650 and such small pushes do rounding that causes trouble. */ 3651 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align)) 3652 || align >= BIGGEST_ALIGNMENT 3653 || (PUSH_ROUNDING (align / BITS_PER_UNIT) 3654 == (align / BITS_PER_UNIT))) 3655 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size)) 3656 { 3657 /* Push padding now if padding above and stack grows down, 3658 or if padding below and stack grows up. 3659 But if space already allocated, this has already been done. */ 3660 if (extra && args_addr == 0 3661 && where_pad != none && where_pad != stack_direction) 3662 anti_adjust_stack (GEN_INT (extra)); 3663 3664 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0); 3665 } 3666 else 3667#endif /* PUSH_ROUNDING */ 3668 { 3669 rtx target; 3670 3671 /* Otherwise make space on the stack and copy the data 3672 to the address of that space. */ 3673 3674 /* Deduct words put into registers from the size we must copy. */ 3675 if (partial != 0) 3676 { 3677 if (GET_CODE (size) == CONST_INT) 3678 size = GEN_INT (INTVAL (size) - used); 3679 else 3680 size = expand_binop (GET_MODE (size), sub_optab, size, 3681 GEN_INT (used), NULL_RTX, 0, 3682 OPTAB_LIB_WIDEN); 3683 } 3684 3685 /* Get the address of the stack space. 3686 In this case, we do not deal with EXTRA separately. 3687 A single stack adjust will do. */ 3688 if (! args_addr) 3689 { 3690 temp = push_block (size, extra, where_pad == downward); 3691 extra = 0; 3692 } 3693 else if (GET_CODE (args_so_far) == CONST_INT) 3694 temp = memory_address (BLKmode, 3695 plus_constant (args_addr, 3696 skip + INTVAL (args_so_far))); 3697 else 3698 temp = memory_address (BLKmode, 3699 plus_constant (gen_rtx_PLUS (Pmode, 3700 args_addr, 3701 args_so_far), 3702 skip)); 3703 3704 if (!ACCUMULATE_OUTGOING_ARGS) 3705 { 3706 /* If the source is referenced relative to the stack pointer, 3707 copy it to another register to stabilize it. We do not need 3708 to do this if we know that we won't be changing sp. */ 3709 3710 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp) 3711 || reg_mentioned_p (virtual_outgoing_args_rtx, temp)) 3712 temp = copy_to_reg (temp); 3713 } 3714 3715 target = gen_rtx_MEM (BLKmode, temp); 3716 3717 /* We do *not* set_mem_attributes here, because incoming arguments 3718 may overlap with sibling call outgoing arguments and we cannot 3719 allow reordering of reads from function arguments with stores 3720 to outgoing arguments of sibling calls. We do, however, want 3721 to record the alignment of the stack slot. */ 3722 /* ALIGN may well be better aligned than TYPE, e.g. due to 3723 PARM_BOUNDARY. Assume the caller isn't lying. */ 3724 set_mem_align (target, align); 3725 3726 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM); 3727 } 3728 } 3729 else if (partial > 0) 3730 { 3731 /* Scalar partly in registers. */ 3732 3733 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD; 3734 int i; 3735 int not_stack; 3736 /* # bytes of start of argument 3737 that we must make space for but need not store. */ 3738 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT); 3739 int args_offset = INTVAL (args_so_far); 3740 int skip; 3741 3742 /* Push padding now if padding above and stack grows down, 3743 or if padding below and stack grows up. 3744 But if space already allocated, this has already been done. */ 3745 if (extra && args_addr == 0 3746 && where_pad != none && where_pad != stack_direction) 3747 anti_adjust_stack (GEN_INT (extra)); 3748 3749 /* If we make space by pushing it, we might as well push 3750 the real data. Otherwise, we can leave OFFSET nonzero 3751 and leave the space uninitialized. */ 3752 if (args_addr == 0) 3753 offset = 0; 3754 3755 /* Now NOT_STACK gets the number of words that we don't need to 3756 allocate on the stack. Convert OFFSET to words too. */ 3757 not_stack = (partial - offset) / UNITS_PER_WORD; 3758 offset /= UNITS_PER_WORD; 3759 3760 /* If the partial register-part of the arg counts in its stack size, 3761 skip the part of stack space corresponding to the registers. 3762 Otherwise, start copying to the beginning of the stack space, 3763 by setting SKIP to 0. */ 3764 skip = (reg_parm_stack_space == 0) ? 0 : not_stack; 3765 3766 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) 3767 x = validize_mem (force_const_mem (mode, x)); 3768 3769 /* If X is a hard register in a non-integer mode, copy it into a pseudo; 3770 SUBREGs of such registers are not allowed. */ 3771 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER 3772 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)) 3773 x = copy_to_reg (x); 3774 3775 /* Loop over all the words allocated on the stack for this arg. */ 3776 /* We can do it by words, because any scalar bigger than a word 3777 has a size a multiple of a word. */ 3778#ifndef PUSH_ARGS_REVERSED 3779 for (i = not_stack; i < size; i++) 3780#else 3781 for (i = size - 1; i >= not_stack; i--) 3782#endif 3783 if (i >= not_stack + offset) 3784 emit_push_insn (operand_subword_force (x, i, mode), 3785 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX, 3786 0, args_addr, 3787 GEN_INT (args_offset + ((i - not_stack + skip) 3788 * UNITS_PER_WORD)), 3789 reg_parm_stack_space, alignment_pad); 3790 } 3791 else 3792 { 3793 rtx addr; 3794 rtx dest; 3795 3796 /* Push padding now if padding above and stack grows down, 3797 or if padding below and stack grows up. 3798 But if space already allocated, this has already been done. */ 3799 if (extra && args_addr == 0 3800 && where_pad != none && where_pad != stack_direction) 3801 anti_adjust_stack (GEN_INT (extra)); 3802 3803#ifdef PUSH_ROUNDING 3804 if (args_addr == 0 && PUSH_ARGS) 3805 emit_single_push_insn (mode, x, type); 3806 else 3807#endif 3808 { 3809 if (GET_CODE (args_so_far) == CONST_INT) 3810 addr 3811 = memory_address (mode, 3812 plus_constant (args_addr, 3813 INTVAL (args_so_far))); 3814 else 3815 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr, 3816 args_so_far)); 3817 dest = gen_rtx_MEM (mode, addr); 3818 3819 /* We do *not* set_mem_attributes here, because incoming arguments 3820 may overlap with sibling call outgoing arguments and we cannot 3821 allow reordering of reads from function arguments with stores 3822 to outgoing arguments of sibling calls. We do, however, want 3823 to record the alignment of the stack slot. */ 3824 /* ALIGN may well be better aligned than TYPE, e.g. due to 3825 PARM_BOUNDARY. Assume the caller isn't lying. */ 3826 set_mem_align (dest, align); 3827 3828 emit_move_insn (dest, x); 3829 } 3830 } 3831 3832 /* If part should go in registers, copy that part 3833 into the appropriate registers. Do this now, at the end, 3834 since mem-to-mem copies above may do function calls. */ 3835 if (partial > 0 && reg != 0) 3836 { 3837 /* Handle calls that pass values in multiple non-contiguous locations. 3838 The Irix 6 ABI has examples of this. */ 3839 if (GET_CODE (reg) == PARALLEL) 3840 emit_group_load (reg, x, type, -1); 3841 else 3842 { 3843 gcc_assert (partial % UNITS_PER_WORD == 0); 3844 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode); 3845 } 3846 } 3847 3848 if (extra && args_addr == 0 && where_pad == stack_direction) 3849 anti_adjust_stack (GEN_INT (extra)); 3850 3851 if (alignment_pad && args_addr == 0) 3852 anti_adjust_stack (alignment_pad); 3853} 3854 3855/* Return X if X can be used as a subtarget in a sequence of arithmetic 3856 operations. */ 3857 3858static rtx 3859get_subtarget (rtx x) 3860{ 3861 return (optimize 3862 || x == 0 3863 /* Only registers can be subtargets. */ 3864 || !REG_P (x) 3865 /* Don't use hard regs to avoid extending their life. */ 3866 || REGNO (x) < FIRST_PSEUDO_REGISTER 3867 ? 0 : x); 3868} 3869 3870/* A subroutine of expand_assignment. Optimize FIELD op= VAL, where 3871 FIELD is a bitfield. Returns true if the optimization was successful, 3872 and there's nothing else to do. */ 3873 3874static bool 3875optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize, 3876 unsigned HOST_WIDE_INT bitpos, 3877 enum machine_mode mode1, rtx str_rtx, 3878 tree to, tree src) 3879{ 3880 enum machine_mode str_mode = GET_MODE (str_rtx); 3881 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode); 3882 tree op0, op1; 3883 rtx value, result; 3884 optab binop; 3885 3886 if (mode1 != VOIDmode 3887 || bitsize >= BITS_PER_WORD 3888 || str_bitsize > BITS_PER_WORD 3889 || TREE_SIDE_EFFECTS (to) 3890 || TREE_THIS_VOLATILE (to)) 3891 return false; 3892 3893 STRIP_NOPS (src); 3894 if (!BINARY_CLASS_P (src) 3895 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE) 3896 return false; 3897 3898 op0 = TREE_OPERAND (src, 0); 3899 op1 = TREE_OPERAND (src, 1); 3900 STRIP_NOPS (op0); 3901 3902 if (!operand_equal_p (to, op0, 0)) 3903 return false; 3904 3905 if (MEM_P (str_rtx)) 3906 { 3907 unsigned HOST_WIDE_INT offset1; 3908 3909 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD) 3910 str_mode = word_mode; 3911 str_mode = get_best_mode (bitsize, bitpos, 3912 MEM_ALIGN (str_rtx), str_mode, 0); 3913 if (str_mode == VOIDmode) 3914 return false; 3915 str_bitsize = GET_MODE_BITSIZE (str_mode); 3916 3917 offset1 = bitpos; 3918 bitpos %= str_bitsize; 3919 offset1 = (offset1 - bitpos) / BITS_PER_UNIT; 3920 str_rtx = adjust_address (str_rtx, str_mode, offset1); 3921 } 3922 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG) 3923 return false; 3924 3925 /* If the bit field covers the whole REG/MEM, store_field 3926 will likely generate better code. */ 3927 if (bitsize >= str_bitsize) 3928 return false; 3929 3930 /* We can't handle fields split across multiple entities. */ 3931 if (bitpos + bitsize > str_bitsize) 3932 return false; 3933 3934 if (BYTES_BIG_ENDIAN) 3935 bitpos = str_bitsize - bitpos - bitsize; 3936 3937 switch (TREE_CODE (src)) 3938 { 3939 case PLUS_EXPR: 3940 case MINUS_EXPR: 3941 /* For now, just optimize the case of the topmost bitfield 3942 where we don't need to do any masking and also 3943 1 bit bitfields where xor can be used. 3944 We might win by one instruction for the other bitfields 3945 too if insv/extv instructions aren't used, so that 3946 can be added later. */ 3947 if (bitpos + bitsize != str_bitsize 3948 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST)) 3949 break; 3950 3951 value = expand_expr (op1, NULL_RTX, str_mode, 0); 3952 value = convert_modes (str_mode, 3953 TYPE_MODE (TREE_TYPE (op1)), value, 3954 TYPE_UNSIGNED (TREE_TYPE (op1))); 3955 3956 /* We may be accessing data outside the field, which means 3957 we can alias adjacent data. */ 3958 if (MEM_P (str_rtx)) 3959 { 3960 str_rtx = shallow_copy_rtx (str_rtx); 3961 set_mem_alias_set (str_rtx, 0); 3962 set_mem_expr (str_rtx, 0); 3963 } 3964 3965 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab; 3966 if (bitsize == 1 && bitpos + bitsize != str_bitsize) 3967 { 3968 value = expand_and (str_mode, value, const1_rtx, NULL); 3969 binop = xor_optab; 3970 } 3971 value = expand_shift (LSHIFT_EXPR, str_mode, value, 3972 build_int_cst (NULL_TREE, bitpos), 3973 NULL_RTX, 1); 3974 result = expand_binop (str_mode, binop, str_rtx, 3975 value, str_rtx, 1, OPTAB_WIDEN); 3976 if (result != str_rtx) 3977 emit_move_insn (str_rtx, result); 3978 return true; 3979 3980 case BIT_IOR_EXPR: 3981 case BIT_XOR_EXPR: 3982 if (TREE_CODE (op1) != INTEGER_CST) 3983 break; 3984 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0); 3985 value = convert_modes (GET_MODE (str_rtx), 3986 TYPE_MODE (TREE_TYPE (op1)), value, 3987 TYPE_UNSIGNED (TREE_TYPE (op1))); 3988 3989 /* We may be accessing data outside the field, which means 3990 we can alias adjacent data. */ 3991 if (MEM_P (str_rtx)) 3992 { 3993 str_rtx = shallow_copy_rtx (str_rtx); 3994 set_mem_alias_set (str_rtx, 0); 3995 set_mem_expr (str_rtx, 0); 3996 } 3997 3998 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab; 3999 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))) 4000 { 4001 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) 4002 - 1); 4003 value = expand_and (GET_MODE (str_rtx), value, mask, 4004 NULL_RTX); 4005 } 4006 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value, 4007 build_int_cst (NULL_TREE, bitpos), 4008 NULL_RTX, 1); 4009 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx, 4010 value, str_rtx, 1, OPTAB_WIDEN); 4011 if (result != str_rtx) 4012 emit_move_insn (str_rtx, result); 4013 return true; 4014 4015 default: 4016 break; 4017 } 4018 4019 return false; 4020} 4021 4022 4023/* Expand an assignment that stores the value of FROM into TO. */ 4024 4025void 4026expand_assignment (tree to, tree from) 4027{ 4028 rtx to_rtx = 0; 4029 rtx result; 4030 4031 /* Don't crash if the lhs of the assignment was erroneous. */ 4032 if (TREE_CODE (to) == ERROR_MARK) 4033 { 4034 result = expand_normal (from); 4035 return; 4036 } 4037 4038 /* Optimize away no-op moves without side-effects. */ 4039 if (operand_equal_p (to, from, 0)) 4040 return; 4041 4042 /* Assignment of a structure component needs special treatment 4043 if the structure component's rtx is not simply a MEM. 4044 Assignment of an array element at a constant index, and assignment of 4045 an array element in an unaligned packed structure field, has the same 4046 problem. */ 4047 if (handled_component_p (to) 4048 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) 4049 { 4050 enum machine_mode mode1; 4051 HOST_WIDE_INT bitsize, bitpos; 4052 tree offset; 4053 int unsignedp; 4054 int volatilep = 0; 4055 tree tem; 4056 4057 push_temp_slots (); 4058 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, 4059 &unsignedp, &volatilep, true); 4060 4061 /* If we are going to use store_bit_field and extract_bit_field, 4062 make sure to_rtx will be safe for multiple use. */ 4063 4064 to_rtx = expand_normal (tem); 4065 4066 if (offset != 0) 4067 { 4068 rtx offset_rtx; 4069 4070 if (!MEM_P (to_rtx)) 4071 { 4072 /* We can get constant negative offsets into arrays with broken 4073 user code. Translate this to a trap instead of ICEing. */ 4074 gcc_assert (TREE_CODE (offset) == INTEGER_CST); 4075 expand_builtin_trap (); 4076 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx); 4077 } 4078 4079 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); 4080#ifdef POINTERS_EXTEND_UNSIGNED 4081 if (GET_MODE (offset_rtx) != Pmode) 4082 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); 4083#else 4084 if (GET_MODE (offset_rtx) != ptr_mode) 4085 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); 4086#endif 4087 4088 /* A constant address in TO_RTX can have VOIDmode, we must not try 4089 to call force_reg for that case. Avoid that case. */ 4090 if (MEM_P (to_rtx) 4091 && GET_MODE (to_rtx) == BLKmode 4092 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode 4093 && bitsize > 0 4094 && (bitpos % bitsize) == 0 4095 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 4096 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1)) 4097 { 4098 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); 4099 bitpos = 0; 4100 } 4101 4102 to_rtx = offset_address (to_rtx, offset_rtx, 4103 highest_pow2_factor_for_target (to, 4104 offset)); 4105 } 4106 4107 /* Handle expand_expr of a complex value returning a CONCAT. */ 4108 if (GET_CODE (to_rtx) == CONCAT) 4109 { 4110 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE) 4111 { 4112 gcc_assert (bitpos == 0); 4113 result = store_expr (from, to_rtx, false); 4114 } 4115 else 4116 { 4117 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1)); 4118 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false); 4119 } 4120 } 4121 else 4122 { 4123 if (MEM_P (to_rtx)) 4124 { 4125 /* If the field is at offset zero, we could have been given the 4126 DECL_RTX of the parent struct. Don't munge it. */ 4127 to_rtx = shallow_copy_rtx (to_rtx); 4128 4129 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos); 4130 4131 /* Deal with volatile and readonly fields. The former is only 4132 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */ 4133 if (volatilep) 4134 MEM_VOLATILE_P (to_rtx) = 1; 4135 if (component_uses_parent_alias_set (to)) 4136 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; 4137 } 4138 4139 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1, 4140 to_rtx, to, from)) 4141 result = NULL; 4142 else 4143 result = store_field (to_rtx, bitsize, bitpos, mode1, from, 4144 TREE_TYPE (tem), get_alias_set (to)); 4145 } 4146 4147 if (result) 4148 preserve_temp_slots (result); 4149 free_temp_slots (); 4150 pop_temp_slots (); 4151 return; 4152 } 4153 4154 /* If the rhs is a function call and its value is not an aggregate, 4155 call the function before we start to compute the lhs. 4156 This is needed for correct code for cases such as 4157 val = setjmp (buf) on machines where reference to val 4158 requires loading up part of an address in a separate insn. 4159 4160 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG 4161 since it might be a promoted variable where the zero- or sign- extension 4162 needs to be done. Handling this in the normal way is safe because no 4163 computation is done before the call. */ 4164 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from) 4165 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST 4166 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) 4167 && REG_P (DECL_RTL (to)))) 4168 { 4169 rtx value; 4170 4171 push_temp_slots (); 4172 value = expand_normal (from); 4173 if (to_rtx == 0) 4174 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); 4175 4176 /* Handle calls that return values in multiple non-contiguous locations. 4177 The Irix 6 ABI has examples of this. */ 4178 if (GET_CODE (to_rtx) == PARALLEL) 4179 emit_group_load (to_rtx, value, TREE_TYPE (from), 4180 int_size_in_bytes (TREE_TYPE (from))); 4181 else if (GET_MODE (to_rtx) == BLKmode) 4182 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL); 4183 else 4184 { 4185 if (POINTER_TYPE_P (TREE_TYPE (to))) 4186 value = convert_memory_address (GET_MODE (to_rtx), value); 4187 emit_move_insn (to_rtx, value); 4188 } 4189 preserve_temp_slots (to_rtx); 4190 free_temp_slots (); 4191 pop_temp_slots (); 4192 return; 4193 } 4194 4195 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. 4196 Don't re-expand if it was expanded already (in COMPONENT_REF case). */ 4197 4198 if (to_rtx == 0) 4199 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); 4200 4201 /* Don't move directly into a return register. */ 4202 if (TREE_CODE (to) == RESULT_DECL 4203 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL)) 4204 { 4205 rtx temp; 4206 4207 push_temp_slots (); 4208 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0); 4209 4210 if (GET_CODE (to_rtx) == PARALLEL) 4211 emit_group_load (to_rtx, temp, TREE_TYPE (from), 4212 int_size_in_bytes (TREE_TYPE (from))); 4213 else 4214 emit_move_insn (to_rtx, temp); 4215 4216 preserve_temp_slots (to_rtx); 4217 free_temp_slots (); 4218 pop_temp_slots (); 4219 return; 4220 } 4221 4222 /* In case we are returning the contents of an object which overlaps 4223 the place the value is being stored, use a safe function when copying 4224 a value through a pointer into a structure value return block. */ 4225 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF 4226 && current_function_returns_struct 4227 && !current_function_returns_pcc_struct) 4228 { 4229 rtx from_rtx, size; 4230 4231 push_temp_slots (); 4232 size = expr_size (from); 4233 from_rtx = expand_normal (from); 4234 4235 emit_library_call (memmove_libfunc, LCT_NORMAL, 4236 VOIDmode, 3, XEXP (to_rtx, 0), Pmode, 4237 XEXP (from_rtx, 0), Pmode, 4238 convert_to_mode (TYPE_MODE (sizetype), 4239 size, TYPE_UNSIGNED (sizetype)), 4240 TYPE_MODE (sizetype)); 4241 4242 preserve_temp_slots (to_rtx); 4243 free_temp_slots (); 4244 pop_temp_slots (); 4245 return; 4246 } 4247 4248 /* Compute FROM and store the value in the rtx we got. */ 4249 4250 push_temp_slots (); 4251 result = store_expr (from, to_rtx, 0); 4252 preserve_temp_slots (result); 4253 free_temp_slots (); 4254 pop_temp_slots (); 4255 return; 4256} 4257 4258/* Generate code for computing expression EXP, 4259 and storing the value into TARGET. 4260 4261 If the mode is BLKmode then we may return TARGET itself. 4262 It turns out that in BLKmode it doesn't cause a problem. 4263 because C has no operators that could combine two different 4264 assignments into the same BLKmode object with different values 4265 with no sequence point. Will other languages need this to 4266 be more thorough? 4267 4268 If CALL_PARAM_P is nonzero, this is a store into a call param on the 4269 stack, and block moves may need to be treated specially. */ 4270 4271rtx 4272store_expr (tree exp, rtx target, int call_param_p) 4273{ 4274 rtx temp; 4275 rtx alt_rtl = NULL_RTX; 4276 int dont_return_target = 0; 4277 4278 if (VOID_TYPE_P (TREE_TYPE (exp))) 4279 { 4280 /* C++ can generate ?: expressions with a throw expression in one 4281 branch and an rvalue in the other. Here, we resolve attempts to 4282 store the throw expression's nonexistent result. */ 4283 gcc_assert (!call_param_p); 4284 expand_expr (exp, const0_rtx, VOIDmode, 0); 4285 return NULL_RTX; 4286 } 4287 if (TREE_CODE (exp) == COMPOUND_EXPR) 4288 { 4289 /* Perform first part of compound expression, then assign from second 4290 part. */ 4291 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 4292 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL); 4293 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p); 4294 } 4295 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode) 4296 { 4297 /* For conditional expression, get safe form of the target. Then 4298 test the condition, doing the appropriate assignment on either 4299 side. This avoids the creation of unnecessary temporaries. 4300 For non-BLKmode, it is more efficient not to do this. */ 4301 4302 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx (); 4303 4304 do_pending_stack_adjust (); 4305 NO_DEFER_POP; 4306 jumpifnot (TREE_OPERAND (exp, 0), lab1); 4307 store_expr (TREE_OPERAND (exp, 1), target, call_param_p); 4308 emit_jump_insn (gen_jump (lab2)); 4309 emit_barrier (); 4310 emit_label (lab1); 4311 store_expr (TREE_OPERAND (exp, 2), target, call_param_p); 4312 emit_label (lab2); 4313 OK_DEFER_POP; 4314 4315 return NULL_RTX; 4316 } 4317 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) 4318 /* If this is a scalar in a register that is stored in a wider mode 4319 than the declared mode, compute the result into its declared mode 4320 and then convert to the wider mode. Our value is the computed 4321 expression. */ 4322 { 4323 rtx inner_target = 0; 4324 4325 /* We can do the conversion inside EXP, which will often result 4326 in some optimizations. Do the conversion in two steps: first 4327 change the signedness, if needed, then the extend. But don't 4328 do this if the type of EXP is a subtype of something else 4329 since then the conversion might involve more than just 4330 converting modes. */ 4331 if (INTEGRAL_TYPE_P (TREE_TYPE (exp)) 4332 && TREE_TYPE (TREE_TYPE (exp)) == 0 4333 && (!lang_hooks.reduce_bit_field_operations 4334 || (GET_MODE_PRECISION (GET_MODE (target)) 4335 == TYPE_PRECISION (TREE_TYPE (exp))))) 4336 { 4337 if (TYPE_UNSIGNED (TREE_TYPE (exp)) 4338 != SUBREG_PROMOTED_UNSIGNED_P (target)) 4339 exp = fold_convert 4340 (lang_hooks.types.signed_or_unsigned_type 4341 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp); 4342 4343 exp = fold_convert (lang_hooks.types.type_for_mode 4344 (GET_MODE (SUBREG_REG (target)), 4345 SUBREG_PROMOTED_UNSIGNED_P (target)), 4346 exp); 4347 4348 inner_target = SUBREG_REG (target); 4349 } 4350 4351 temp = expand_expr (exp, inner_target, VOIDmode, 4352 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL); 4353 4354 /* If TEMP is a VOIDmode constant, use convert_modes to make 4355 sure that we properly convert it. */ 4356 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) 4357 { 4358 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), 4359 temp, SUBREG_PROMOTED_UNSIGNED_P (target)); 4360 temp = convert_modes (GET_MODE (SUBREG_REG (target)), 4361 GET_MODE (target), temp, 4362 SUBREG_PROMOTED_UNSIGNED_P (target)); 4363 } 4364 4365 convert_move (SUBREG_REG (target), temp, 4366 SUBREG_PROMOTED_UNSIGNED_P (target)); 4367 4368 return NULL_RTX; 4369 } 4370 else 4371 { 4372 temp = expand_expr_real (exp, target, GET_MODE (target), 4373 (call_param_p 4374 ? EXPAND_STACK_PARM : EXPAND_NORMAL), 4375 &alt_rtl); 4376 /* Return TARGET if it's a specified hardware register. 4377 If TARGET is a volatile mem ref, either return TARGET 4378 or return a reg copied *from* TARGET; ANSI requires this. 4379 4380 Otherwise, if TEMP is not TARGET, return TEMP 4381 if it is constant (for efficiency), 4382 or if we really want the correct value. */ 4383 if (!(target && REG_P (target) 4384 && REGNO (target) < FIRST_PSEUDO_REGISTER) 4385 && !(MEM_P (target) && MEM_VOLATILE_P (target)) 4386 && ! rtx_equal_p (temp, target) 4387 && CONSTANT_P (temp)) 4388 dont_return_target = 1; 4389 } 4390 4391 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not 4392 the same as that of TARGET, adjust the constant. This is needed, for 4393 example, in case it is a CONST_DOUBLE and we want only a word-sized 4394 value. */ 4395 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode 4396 && TREE_CODE (exp) != ERROR_MARK 4397 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) 4398 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), 4399 temp, TYPE_UNSIGNED (TREE_TYPE (exp))); 4400 4401 /* If value was not generated in the target, store it there. 4402 Convert the value to TARGET's type first if necessary and emit the 4403 pending incrementations that have been queued when expanding EXP. 4404 Note that we cannot emit the whole queue blindly because this will 4405 effectively disable the POST_INC optimization later. 4406 4407 If TEMP and TARGET compare equal according to rtx_equal_p, but 4408 one or both of them are volatile memory refs, we have to distinguish 4409 two cases: 4410 - expand_expr has used TARGET. In this case, we must not generate 4411 another copy. This can be detected by TARGET being equal according 4412 to == . 4413 - expand_expr has not used TARGET - that means that the source just 4414 happens to have the same RTX form. Since temp will have been created 4415 by expand_expr, it will compare unequal according to == . 4416 We must generate a copy in this case, to reach the correct number 4417 of volatile memory references. */ 4418 4419 if ((! rtx_equal_p (temp, target) 4420 || (temp != target && (side_effects_p (temp) 4421 || side_effects_p (target)))) 4422 && TREE_CODE (exp) != ERROR_MARK 4423 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET, 4424 but TARGET is not valid memory reference, TEMP will differ 4425 from TARGET although it is really the same location. */ 4426 && !(alt_rtl && rtx_equal_p (alt_rtl, target)) 4427 /* If there's nothing to copy, don't bother. Don't call 4428 expr_size unless necessary, because some front-ends (C++) 4429 expr_size-hook must not be given objects that are not 4430 supposed to be bit-copied or bit-initialized. */ 4431 && expr_size (exp) != const0_rtx) 4432 { 4433 if (GET_MODE (temp) != GET_MODE (target) 4434 && GET_MODE (temp) != VOIDmode) 4435 { 4436 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); 4437 if (dont_return_target) 4438 { 4439 /* In this case, we will return TEMP, 4440 so make sure it has the proper mode. 4441 But don't forget to store the value into TARGET. */ 4442 temp = convert_to_mode (GET_MODE (target), temp, unsignedp); 4443 emit_move_insn (target, temp); 4444 } 4445 else if (GET_MODE (target) == BLKmode) 4446 emit_block_move (target, temp, expr_size (exp), 4447 (call_param_p 4448 ? BLOCK_OP_CALL_PARM 4449 : BLOCK_OP_NORMAL)); 4450 else 4451 convert_move (target, temp, unsignedp); 4452 } 4453 4454 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST) 4455 { 4456 /* Handle copying a string constant into an array. The string 4457 constant may be shorter than the array. So copy just the string's 4458 actual length, and clear the rest. First get the size of the data 4459 type of the string, which is actually the size of the target. */ 4460 rtx size = expr_size (exp); 4461 4462 if (GET_CODE (size) == CONST_INT 4463 && INTVAL (size) < TREE_STRING_LENGTH (exp)) 4464 emit_block_move (target, temp, size, 4465 (call_param_p 4466 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); 4467 else 4468 { 4469 /* Compute the size of the data to copy from the string. */ 4470 tree copy_size 4471 = size_binop (MIN_EXPR, 4472 make_tree (sizetype, size), 4473 size_int (TREE_STRING_LENGTH (exp))); 4474 rtx copy_size_rtx 4475 = expand_expr (copy_size, NULL_RTX, VOIDmode, 4476 (call_param_p 4477 ? EXPAND_STACK_PARM : EXPAND_NORMAL)); 4478 rtx label = 0; 4479 4480 /* Copy that much. */ 4481 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 4482 TYPE_UNSIGNED (sizetype)); 4483 emit_block_move (target, temp, copy_size_rtx, 4484 (call_param_p 4485 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); 4486 4487 /* Figure out how much is left in TARGET that we have to clear. 4488 Do all calculations in ptr_mode. */ 4489 if (GET_CODE (copy_size_rtx) == CONST_INT) 4490 { 4491 size = plus_constant (size, -INTVAL (copy_size_rtx)); 4492 target = adjust_address (target, BLKmode, 4493 INTVAL (copy_size_rtx)); 4494 } 4495 else 4496 { 4497 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size, 4498 copy_size_rtx, NULL_RTX, 0, 4499 OPTAB_LIB_WIDEN); 4500 4501#ifdef POINTERS_EXTEND_UNSIGNED 4502 if (GET_MODE (copy_size_rtx) != Pmode) 4503 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx, 4504 TYPE_UNSIGNED (sizetype)); 4505#endif 4506 4507 target = offset_address (target, copy_size_rtx, 4508 highest_pow2_factor (copy_size)); 4509 label = gen_label_rtx (); 4510 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX, 4511 GET_MODE (size), 0, label); 4512 } 4513 4514 if (size != const0_rtx) 4515 clear_storage (target, size, BLOCK_OP_NORMAL); 4516 4517 if (label) 4518 emit_label (label); 4519 } 4520 } 4521 /* Handle calls that return values in multiple non-contiguous locations. 4522 The Irix 6 ABI has examples of this. */ 4523 else if (GET_CODE (target) == PARALLEL) 4524 emit_group_load (target, temp, TREE_TYPE (exp), 4525 int_size_in_bytes (TREE_TYPE (exp))); 4526 else if (GET_MODE (temp) == BLKmode) 4527 emit_block_move (target, temp, expr_size (exp), 4528 (call_param_p 4529 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); 4530 else 4531 { 4532 temp = force_operand (temp, target); 4533 if (temp != target) 4534 emit_move_insn (target, temp); 4535 } 4536 } 4537 4538 return NULL_RTX; 4539} 4540 4541/* Helper for categorize_ctor_elements. Identical interface. */ 4542 4543static bool 4544categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts, 4545 HOST_WIDE_INT *p_elt_count, 4546 bool *p_must_clear) 4547{ 4548 unsigned HOST_WIDE_INT idx; 4549 HOST_WIDE_INT nz_elts, elt_count; 4550 tree value, purpose; 4551 4552 /* Whether CTOR is a valid constant initializer, in accordance with what 4553 initializer_constant_valid_p does. If inferred from the constructor 4554 elements, true until proven otherwise. */ 4555 bool const_from_elts_p = constructor_static_from_elts_p (ctor); 4556 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor); 4557 4558 nz_elts = 0; 4559 elt_count = 0; 4560 4561 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value) 4562 { 4563 HOST_WIDE_INT mult; 4564 4565 mult = 1; 4566 if (TREE_CODE (purpose) == RANGE_EXPR) 4567 { 4568 tree lo_index = TREE_OPERAND (purpose, 0); 4569 tree hi_index = TREE_OPERAND (purpose, 1); 4570 4571 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1)) 4572 mult = (tree_low_cst (hi_index, 1) 4573 - tree_low_cst (lo_index, 1) + 1); 4574 } 4575 4576 switch (TREE_CODE (value)) 4577 { 4578 case CONSTRUCTOR: 4579 { 4580 HOST_WIDE_INT nz = 0, ic = 0; 4581 4582 bool const_elt_p 4583 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear); 4584 4585 nz_elts += mult * nz; 4586 elt_count += mult * ic; 4587 4588 if (const_from_elts_p && const_p) 4589 const_p = const_elt_p; 4590 } 4591 break; 4592 4593 case INTEGER_CST: 4594 case REAL_CST: 4595 if (!initializer_zerop (value)) 4596 nz_elts += mult; 4597 elt_count += mult; 4598 break; 4599 4600 case STRING_CST: 4601 nz_elts += mult * TREE_STRING_LENGTH (value); 4602 elt_count += mult * TREE_STRING_LENGTH (value); 4603 break; 4604 4605 case COMPLEX_CST: 4606 if (!initializer_zerop (TREE_REALPART (value))) 4607 nz_elts += mult; 4608 if (!initializer_zerop (TREE_IMAGPART (value))) 4609 nz_elts += mult; 4610 elt_count += mult; 4611 break; 4612 4613 case VECTOR_CST: 4614 { 4615 tree v; 4616 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v)) 4617 { 4618 if (!initializer_zerop (TREE_VALUE (v))) 4619 nz_elts += mult; 4620 elt_count += mult; 4621 } 4622 } 4623 break; 4624 4625 default: 4626 nz_elts += mult; 4627 elt_count += mult; 4628 4629 if (const_from_elts_p && const_p) 4630 const_p = initializer_constant_valid_p (value, TREE_TYPE (value)) 4631 != NULL_TREE; 4632 break; 4633 } 4634 } 4635 4636 if (!*p_must_clear 4637 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE 4638 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE)) 4639 { 4640 tree init_sub_type; 4641 bool clear_this = true; 4642 4643 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor))) 4644 { 4645 /* We don't expect more than one element of the union to be 4646 initialized. Not sure what we should do otherwise... */ 4647 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor)) 4648 == 1); 4649 4650 init_sub_type = TREE_TYPE (VEC_index (constructor_elt, 4651 CONSTRUCTOR_ELTS (ctor), 4652 0)->value); 4653 4654 /* ??? We could look at each element of the union, and find the 4655 largest element. Which would avoid comparing the size of the 4656 initialized element against any tail padding in the union. 4657 Doesn't seem worth the effort... */ 4658 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)), 4659 TYPE_SIZE (init_sub_type)) == 1) 4660 { 4661 /* And now we have to find out if the element itself is fully 4662 constructed. E.g. for union { struct { int a, b; } s; } u 4663 = { .s = { .a = 1 } }. */ 4664 if (elt_count == count_type_elements (init_sub_type, false)) 4665 clear_this = false; 4666 } 4667 } 4668 4669 *p_must_clear = clear_this; 4670 } 4671 4672 *p_nz_elts += nz_elts; 4673 *p_elt_count += elt_count; 4674 4675 return const_p; 4676} 4677 4678/* Examine CTOR to discover: 4679 * how many scalar fields are set to nonzero values, 4680 and place it in *P_NZ_ELTS; 4681 * how many scalar fields in total are in CTOR, 4682 and place it in *P_ELT_COUNT. 4683 * if a type is a union, and the initializer from the constructor 4684 is not the largest element in the union, then set *p_must_clear. 4685 4686 Return whether or not CTOR is a valid static constant initializer, the same 4687 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */ 4688 4689bool 4690categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts, 4691 HOST_WIDE_INT *p_elt_count, 4692 bool *p_must_clear) 4693{ 4694 *p_nz_elts = 0; 4695 *p_elt_count = 0; 4696 *p_must_clear = false; 4697 4698 return 4699 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear); 4700} 4701 4702/* Count the number of scalars in TYPE. Return -1 on overflow or 4703 variable-sized. If ALLOW_FLEXARR is true, don't count flexible 4704 array member at the end of the structure. */ 4705 4706HOST_WIDE_INT 4707count_type_elements (tree type, bool allow_flexarr) 4708{ 4709 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1)); 4710 switch (TREE_CODE (type)) 4711 { 4712 case ARRAY_TYPE: 4713 { 4714 tree telts = array_type_nelts (type); 4715 if (telts && host_integerp (telts, 1)) 4716 { 4717 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1; 4718 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false); 4719 if (n == 0) 4720 return 0; 4721 else if (max / n > m) 4722 return n * m; 4723 } 4724 return -1; 4725 } 4726 4727 case RECORD_TYPE: 4728 { 4729 HOST_WIDE_INT n = 0, t; 4730 tree f; 4731 4732 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f)) 4733 if (TREE_CODE (f) == FIELD_DECL) 4734 { 4735 t = count_type_elements (TREE_TYPE (f), false); 4736 if (t < 0) 4737 { 4738 /* Check for structures with flexible array member. */ 4739 tree tf = TREE_TYPE (f); 4740 if (allow_flexarr 4741 && TREE_CHAIN (f) == NULL 4742 && TREE_CODE (tf) == ARRAY_TYPE 4743 && TYPE_DOMAIN (tf) 4744 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf)) 4745 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf))) 4746 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf)) 4747 && int_size_in_bytes (type) >= 0) 4748 break; 4749 4750 return -1; 4751 } 4752 n += t; 4753 } 4754 4755 return n; 4756 } 4757 4758 case UNION_TYPE: 4759 case QUAL_UNION_TYPE: 4760 return -1; 4761 4762 case COMPLEX_TYPE: 4763 return 2; 4764 4765 case VECTOR_TYPE: 4766 return TYPE_VECTOR_SUBPARTS (type); 4767 4768 case INTEGER_TYPE: 4769 case REAL_TYPE: 4770 case ENUMERAL_TYPE: 4771 case BOOLEAN_TYPE: 4772 case POINTER_TYPE: 4773 case OFFSET_TYPE: 4774 case REFERENCE_TYPE: 4775 return 1; 4776 4777 case VOID_TYPE: 4778 case METHOD_TYPE: 4779 case FUNCTION_TYPE: 4780 case LANG_TYPE: 4781 default: 4782 gcc_unreachable (); 4783 } 4784} 4785 4786/* Return 1 if EXP contains mostly (3/4) zeros. */ 4787 4788static int 4789mostly_zeros_p (tree exp) 4790{ 4791 if (TREE_CODE (exp) == CONSTRUCTOR) 4792 4793 { 4794 HOST_WIDE_INT nz_elts, count, elts; 4795 bool must_clear; 4796 4797 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear); 4798 if (must_clear) 4799 return 1; 4800 4801 elts = count_type_elements (TREE_TYPE (exp), false); 4802 4803 return nz_elts < elts / 4; 4804 } 4805 4806 return initializer_zerop (exp); 4807} 4808 4809/* Return 1 if EXP contains all zeros. */ 4810 4811static int 4812all_zeros_p (tree exp) 4813{ 4814 if (TREE_CODE (exp) == CONSTRUCTOR) 4815 4816 { 4817 HOST_WIDE_INT nz_elts, count; 4818 bool must_clear; 4819 4820 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear); 4821 return nz_elts == 0; 4822 } 4823 4824 return initializer_zerop (exp); 4825} 4826 4827/* Helper function for store_constructor. 4828 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field. 4829 TYPE is the type of the CONSTRUCTOR, not the element type. 4830 CLEARED is as for store_constructor. 4831 ALIAS_SET is the alias set to use for any stores. 4832 4833 This provides a recursive shortcut back to store_constructor when it isn't 4834 necessary to go through store_field. This is so that we can pass through 4835 the cleared field to let store_constructor know that we may not have to 4836 clear a substructure if the outer structure has already been cleared. */ 4837 4838static void 4839store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize, 4840 HOST_WIDE_INT bitpos, enum machine_mode mode, 4841 tree exp, tree type, int cleared, int alias_set) 4842{ 4843 if (TREE_CODE (exp) == CONSTRUCTOR 4844 /* We can only call store_constructor recursively if the size and 4845 bit position are on a byte boundary. */ 4846 && bitpos % BITS_PER_UNIT == 0 4847 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0) 4848 /* If we have a nonzero bitpos for a register target, then we just 4849 let store_field do the bitfield handling. This is unlikely to 4850 generate unnecessary clear instructions anyways. */ 4851 && (bitpos == 0 || MEM_P (target))) 4852 { 4853 if (MEM_P (target)) 4854 target 4855 = adjust_address (target, 4856 GET_MODE (target) == BLKmode 4857 || 0 != (bitpos 4858 % GET_MODE_ALIGNMENT (GET_MODE (target))) 4859 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT); 4860 4861 4862 /* Update the alias set, if required. */ 4863 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target) 4864 && MEM_ALIAS_SET (target) != 0) 4865 { 4866 target = copy_rtx (target); 4867 set_mem_alias_set (target, alias_set); 4868 } 4869 4870 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT); 4871 } 4872 else 4873 store_field (target, bitsize, bitpos, mode, exp, type, alias_set); 4874} 4875 4876/* Store the value of constructor EXP into the rtx TARGET. 4877 TARGET is either a REG or a MEM; we know it cannot conflict, since 4878 safe_from_p has been called. 4879 CLEARED is true if TARGET is known to have been zero'd. 4880 SIZE is the number of bytes of TARGET we are allowed to modify: this 4881 may not be the same as the size of EXP if we are assigning to a field 4882 which has been packed to exclude padding bits. */ 4883 4884static void 4885store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) 4886{ 4887 tree type = TREE_TYPE (exp); 4888#ifdef WORD_REGISTER_OPERATIONS 4889 HOST_WIDE_INT exp_size = int_size_in_bytes (type); 4890#endif 4891 4892 switch (TREE_CODE (type)) 4893 { 4894 case RECORD_TYPE: 4895 case UNION_TYPE: 4896 case QUAL_UNION_TYPE: 4897 { 4898 unsigned HOST_WIDE_INT idx; 4899 tree field, value; 4900 4901 /* If size is zero or the target is already cleared, do nothing. */ 4902 if (size == 0 || cleared) 4903 cleared = 1; 4904 /* We either clear the aggregate or indicate the value is dead. */ 4905 else if ((TREE_CODE (type) == UNION_TYPE 4906 || TREE_CODE (type) == QUAL_UNION_TYPE) 4907 && ! CONSTRUCTOR_ELTS (exp)) 4908 /* If the constructor is empty, clear the union. */ 4909 { 4910 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL); 4911 cleared = 1; 4912 } 4913 4914 /* If we are building a static constructor into a register, 4915 set the initial value as zero so we can fold the value into 4916 a constant. But if more than one register is involved, 4917 this probably loses. */ 4918 else if (REG_P (target) && TREE_STATIC (exp) 4919 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) 4920 { 4921 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); 4922 cleared = 1; 4923 } 4924 4925 /* If the constructor has fewer fields than the structure or 4926 if we are initializing the structure to mostly zeros, clear 4927 the whole structure first. Don't do this if TARGET is a 4928 register whose mode size isn't equal to SIZE since 4929 clear_storage can't handle this case. */ 4930 else if (size > 0 4931 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp)) 4932 != fields_length (type)) 4933 || mostly_zeros_p (exp)) 4934 && (!REG_P (target) 4935 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) 4936 == size))) 4937 { 4938 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); 4939 cleared = 1; 4940 } 4941 4942 if (! cleared) 4943 emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); 4944 4945 /* Store each element of the constructor into the 4946 corresponding field of TARGET. */ 4947 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value) 4948 { 4949 enum machine_mode mode; 4950 HOST_WIDE_INT bitsize; 4951 HOST_WIDE_INT bitpos = 0; 4952 tree offset; 4953 rtx to_rtx = target; 4954 4955 /* Just ignore missing fields. We cleared the whole 4956 structure, above, if any fields are missing. */ 4957 if (field == 0) 4958 continue; 4959 4960 if (cleared && initializer_zerop (value)) 4961 continue; 4962 4963 if (host_integerp (DECL_SIZE (field), 1)) 4964 bitsize = tree_low_cst (DECL_SIZE (field), 1); 4965 else 4966 bitsize = -1; 4967 4968 mode = DECL_MODE (field); 4969 if (DECL_BIT_FIELD (field)) 4970 mode = VOIDmode; 4971 4972 offset = DECL_FIELD_OFFSET (field); 4973 if (host_integerp (offset, 0) 4974 && host_integerp (bit_position (field), 0)) 4975 { 4976 bitpos = int_bit_position (field); 4977 offset = 0; 4978 } 4979 else 4980 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); 4981 4982 if (offset) 4983 { 4984 rtx offset_rtx; 4985 4986 offset 4987 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset, 4988 make_tree (TREE_TYPE (exp), 4989 target)); 4990 4991 offset_rtx = expand_normal (offset); 4992 gcc_assert (MEM_P (to_rtx)); 4993 4994#ifdef POINTERS_EXTEND_UNSIGNED 4995 if (GET_MODE (offset_rtx) != Pmode) 4996 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); 4997#else 4998 if (GET_MODE (offset_rtx) != ptr_mode) 4999 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); 5000#endif 5001 5002 to_rtx = offset_address (to_rtx, offset_rtx, 5003 highest_pow2_factor (offset)); 5004 } 5005 5006#ifdef WORD_REGISTER_OPERATIONS 5007 /* If this initializes a field that is smaller than a 5008 word, at the start of a word, try to widen it to a full 5009 word. This special case allows us to output C++ member 5010 function initializations in a form that the optimizers 5011 can understand. */ 5012 if (REG_P (target) 5013 && bitsize < BITS_PER_WORD 5014 && bitpos % BITS_PER_WORD == 0 5015 && GET_MODE_CLASS (mode) == MODE_INT 5016 && TREE_CODE (value) == INTEGER_CST 5017 && exp_size >= 0 5018 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) 5019 { 5020 tree type = TREE_TYPE (value); 5021 5022 if (TYPE_PRECISION (type) < BITS_PER_WORD) 5023 { 5024 type = lang_hooks.types.type_for_size 5025 (BITS_PER_WORD, TYPE_UNSIGNED (type)); 5026 value = fold_convert (type, value); 5027 } 5028 5029 if (BYTES_BIG_ENDIAN) 5030 value 5031 = fold_build2 (LSHIFT_EXPR, type, value, 5032 build_int_cst (type, 5033 BITS_PER_WORD - bitsize)); 5034 bitsize = BITS_PER_WORD; 5035 mode = word_mode; 5036 } 5037#endif 5038 5039 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx) 5040 && DECL_NONADDRESSABLE_P (field)) 5041 { 5042 to_rtx = copy_rtx (to_rtx); 5043 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; 5044 } 5045 5046 store_constructor_field (to_rtx, bitsize, bitpos, mode, 5047 value, type, cleared, 5048 get_alias_set (TREE_TYPE (field))); 5049 } 5050 break; 5051 } 5052 case ARRAY_TYPE: 5053 { 5054 tree value, index; 5055 unsigned HOST_WIDE_INT i; 5056 int need_to_clear; 5057 tree domain; 5058 tree elttype = TREE_TYPE (type); 5059 int const_bounds_p; 5060 HOST_WIDE_INT minelt = 0; 5061 HOST_WIDE_INT maxelt = 0; 5062 5063 domain = TYPE_DOMAIN (type); 5064 const_bounds_p = (TYPE_MIN_VALUE (domain) 5065 && TYPE_MAX_VALUE (domain) 5066 && host_integerp (TYPE_MIN_VALUE (domain), 0) 5067 && host_integerp (TYPE_MAX_VALUE (domain), 0)); 5068 5069 /* If we have constant bounds for the range of the type, get them. */ 5070 if (const_bounds_p) 5071 { 5072 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); 5073 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); 5074 } 5075 5076 /* If the constructor has fewer elements than the array, clear 5077 the whole array first. Similarly if this is static 5078 constructor of a non-BLKmode object. */ 5079 if (cleared) 5080 need_to_clear = 0; 5081 else if (REG_P (target) && TREE_STATIC (exp)) 5082 need_to_clear = 1; 5083 else 5084 { 5085 unsigned HOST_WIDE_INT idx; 5086 tree index, value; 5087 HOST_WIDE_INT count = 0, zero_count = 0; 5088 need_to_clear = ! const_bounds_p; 5089 5090 /* This loop is a more accurate version of the loop in 5091 mostly_zeros_p (it handles RANGE_EXPR in an index). It 5092 is also needed to check for missing elements. */ 5093 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value) 5094 { 5095 HOST_WIDE_INT this_node_count; 5096 5097 if (need_to_clear) 5098 break; 5099 5100 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) 5101 { 5102 tree lo_index = TREE_OPERAND (index, 0); 5103 tree hi_index = TREE_OPERAND (index, 1); 5104 5105 if (! host_integerp (lo_index, 1) 5106 || ! host_integerp (hi_index, 1)) 5107 { 5108 need_to_clear = 1; 5109 break; 5110 } 5111 5112 this_node_count = (tree_low_cst (hi_index, 1) 5113 - tree_low_cst (lo_index, 1) + 1); 5114 } 5115 else 5116 this_node_count = 1; 5117 5118 count += this_node_count; 5119 if (mostly_zeros_p (value)) 5120 zero_count += this_node_count; 5121 } 5122 5123 /* Clear the entire array first if there are any missing 5124 elements, or if the incidence of zero elements is >= 5125 75%. */ 5126 if (! need_to_clear 5127 && (count < maxelt - minelt + 1 5128 || 4 * zero_count >= 3 * count)) 5129 need_to_clear = 1; 5130 } 5131 5132 if (need_to_clear && size > 0) 5133 { 5134 if (REG_P (target)) 5135 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); 5136 else 5137 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); 5138 cleared = 1; 5139 } 5140 5141 if (!cleared && REG_P (target)) 5142 /* Inform later passes that the old value is dead. */ 5143 emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); 5144 5145 /* Store each element of the constructor into the 5146 corresponding element of TARGET, determined by counting the 5147 elements. */ 5148 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value) 5149 { 5150 enum machine_mode mode; 5151 HOST_WIDE_INT bitsize; 5152 HOST_WIDE_INT bitpos; 5153 int unsignedp; 5154 rtx xtarget = target; 5155 5156 if (cleared && initializer_zerop (value)) 5157 continue; 5158 5159 unsignedp = TYPE_UNSIGNED (elttype); 5160 mode = TYPE_MODE (elttype); 5161 if (mode == BLKmode) 5162 bitsize = (host_integerp (TYPE_SIZE (elttype), 1) 5163 ? tree_low_cst (TYPE_SIZE (elttype), 1) 5164 : -1); 5165 else 5166 bitsize = GET_MODE_BITSIZE (mode); 5167 5168 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) 5169 { 5170 tree lo_index = TREE_OPERAND (index, 0); 5171 tree hi_index = TREE_OPERAND (index, 1); 5172 rtx index_r, pos_rtx; 5173 HOST_WIDE_INT lo, hi, count; 5174 tree position; 5175 5176 /* If the range is constant and "small", unroll the loop. */ 5177 if (const_bounds_p 5178 && host_integerp (lo_index, 0) 5179 && host_integerp (hi_index, 0) 5180 && (lo = tree_low_cst (lo_index, 0), 5181 hi = tree_low_cst (hi_index, 0), 5182 count = hi - lo + 1, 5183 (!MEM_P (target) 5184 || count <= 2 5185 || (host_integerp (TYPE_SIZE (elttype), 1) 5186 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count 5187 <= 40 * 8))))) 5188 { 5189 lo -= minelt; hi -= minelt; 5190 for (; lo <= hi; lo++) 5191 { 5192 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); 5193 5194 if (MEM_P (target) 5195 && !MEM_KEEP_ALIAS_SET_P (target) 5196 && TREE_CODE (type) == ARRAY_TYPE 5197 && TYPE_NONALIASED_COMPONENT (type)) 5198 { 5199 target = copy_rtx (target); 5200 MEM_KEEP_ALIAS_SET_P (target) = 1; 5201 } 5202 5203 store_constructor_field 5204 (target, bitsize, bitpos, mode, value, type, cleared, 5205 get_alias_set (elttype)); 5206 } 5207 } 5208 else 5209 { 5210 rtx loop_start = gen_label_rtx (); 5211 rtx loop_end = gen_label_rtx (); 5212 tree exit_cond; 5213 5214 expand_normal (hi_index); 5215 unsignedp = TYPE_UNSIGNED (domain); 5216 5217 index = build_decl (VAR_DECL, NULL_TREE, domain); 5218 5219 index_r 5220 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), 5221 &unsignedp, 0)); 5222 SET_DECL_RTL (index, index_r); 5223 store_expr (lo_index, index_r, 0); 5224 5225 /* Build the head of the loop. */ 5226 do_pending_stack_adjust (); 5227 emit_label (loop_start); 5228 5229 /* Assign value to element index. */ 5230 position = 5231 fold_convert (ssizetype, 5232 fold_build2 (MINUS_EXPR, 5233 TREE_TYPE (index), 5234 index, 5235 TYPE_MIN_VALUE (domain))); 5236 5237 position = 5238 size_binop (MULT_EXPR, position, 5239 fold_convert (ssizetype, 5240 TYPE_SIZE_UNIT (elttype))); 5241 5242 pos_rtx = expand_normal (position); 5243 xtarget = offset_address (target, pos_rtx, 5244 highest_pow2_factor (position)); 5245 xtarget = adjust_address (xtarget, mode, 0); 5246 if (TREE_CODE (value) == CONSTRUCTOR) 5247 store_constructor (value, xtarget, cleared, 5248 bitsize / BITS_PER_UNIT); 5249 else 5250 store_expr (value, xtarget, 0); 5251 5252 /* Generate a conditional jump to exit the loop. */ 5253 exit_cond = build2 (LT_EXPR, integer_type_node, 5254 index, hi_index); 5255 jumpif (exit_cond, loop_end); 5256 5257 /* Update the loop counter, and jump to the head of 5258 the loop. */ 5259 expand_assignment (index, 5260 build2 (PLUS_EXPR, TREE_TYPE (index), 5261 index, integer_one_node)); 5262 5263 emit_jump (loop_start); 5264 5265 /* Build the end of the loop. */ 5266 emit_label (loop_end); 5267 } 5268 } 5269 else if ((index != 0 && ! host_integerp (index, 0)) 5270 || ! host_integerp (TYPE_SIZE (elttype), 1)) 5271 { 5272 tree position; 5273 5274 if (index == 0) 5275 index = ssize_int (1); 5276 5277 if (minelt) 5278 index = fold_convert (ssizetype, 5279 fold_build2 (MINUS_EXPR, 5280 TREE_TYPE (index), 5281 index, 5282 TYPE_MIN_VALUE (domain))); 5283 5284 position = 5285 size_binop (MULT_EXPR, index, 5286 fold_convert (ssizetype, 5287 TYPE_SIZE_UNIT (elttype))); 5288 xtarget = offset_address (target, 5289 expand_normal (position), 5290 highest_pow2_factor (position)); 5291 xtarget = adjust_address (xtarget, mode, 0); 5292 store_expr (value, xtarget, 0); 5293 } 5294 else 5295 { 5296 if (index != 0) 5297 bitpos = ((tree_low_cst (index, 0) - minelt) 5298 * tree_low_cst (TYPE_SIZE (elttype), 1)); 5299 else 5300 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); 5301 5302 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target) 5303 && TREE_CODE (type) == ARRAY_TYPE 5304 && TYPE_NONALIASED_COMPONENT (type)) 5305 { 5306 target = copy_rtx (target); 5307 MEM_KEEP_ALIAS_SET_P (target) = 1; 5308 } 5309 store_constructor_field (target, bitsize, bitpos, mode, value, 5310 type, cleared, get_alias_set (elttype)); 5311 } 5312 } 5313 break; 5314 } 5315 5316 case VECTOR_TYPE: 5317 { 5318 unsigned HOST_WIDE_INT idx; 5319 constructor_elt *ce; 5320 int i; 5321 int need_to_clear; 5322 int icode = 0; 5323 tree elttype = TREE_TYPE (type); 5324 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1); 5325 enum machine_mode eltmode = TYPE_MODE (elttype); 5326 HOST_WIDE_INT bitsize; 5327 HOST_WIDE_INT bitpos; 5328 rtvec vector = NULL; 5329 unsigned n_elts; 5330 5331 gcc_assert (eltmode != BLKmode); 5332 5333 n_elts = TYPE_VECTOR_SUBPARTS (type); 5334 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target))) 5335 { 5336 enum machine_mode mode = GET_MODE (target); 5337 5338 icode = (int) vec_init_optab->handlers[mode].insn_code; 5339 if (icode != CODE_FOR_nothing) 5340 { 5341 unsigned int i; 5342 5343 vector = rtvec_alloc (n_elts); 5344 for (i = 0; i < n_elts; i++) 5345 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode)); 5346 } 5347 } 5348 5349 /* If the constructor has fewer elements than the vector, 5350 clear the whole array first. Similarly if this is static 5351 constructor of a non-BLKmode object. */ 5352 if (cleared) 5353 need_to_clear = 0; 5354 else if (REG_P (target) && TREE_STATIC (exp)) 5355 need_to_clear = 1; 5356 else 5357 { 5358 unsigned HOST_WIDE_INT count = 0, zero_count = 0; 5359 tree value; 5360 5361 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) 5362 { 5363 int n_elts_here = tree_low_cst 5364 (int_const_binop (TRUNC_DIV_EXPR, 5365 TYPE_SIZE (TREE_TYPE (value)), 5366 TYPE_SIZE (elttype), 0), 1); 5367 5368 count += n_elts_here; 5369 if (mostly_zeros_p (value)) 5370 zero_count += n_elts_here; 5371 } 5372 5373 /* Clear the entire vector first if there are any missing elements, 5374 or if the incidence of zero elements is >= 75%. */ 5375 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count); 5376 } 5377 5378 if (need_to_clear && size > 0 && !vector) 5379 { 5380 if (REG_P (target)) 5381 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); 5382 else 5383 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); 5384 cleared = 1; 5385 } 5386 5387 /* Inform later passes that the old value is dead. */ 5388 if (!cleared && !vector && REG_P (target)) 5389 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); 5390 5391 /* Store each element of the constructor into the corresponding 5392 element of TARGET, determined by counting the elements. */ 5393 for (idx = 0, i = 0; 5394 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce); 5395 idx++, i += bitsize / elt_size) 5396 { 5397 HOST_WIDE_INT eltpos; 5398 tree value = ce->value; 5399 5400 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1); 5401 if (cleared && initializer_zerop (value)) 5402 continue; 5403 5404 if (ce->index) 5405 eltpos = tree_low_cst (ce->index, 1); 5406 else 5407 eltpos = i; 5408 5409 if (vector) 5410 { 5411 /* Vector CONSTRUCTORs should only be built from smaller 5412 vectors in the case of BLKmode vectors. */ 5413 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE); 5414 RTVEC_ELT (vector, eltpos) 5415 = expand_normal (value); 5416 } 5417 else 5418 { 5419 enum machine_mode value_mode = 5420 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE 5421 ? TYPE_MODE (TREE_TYPE (value)) 5422 : eltmode; 5423 bitpos = eltpos * elt_size; 5424 store_constructor_field (target, bitsize, bitpos, 5425 value_mode, value, type, 5426 cleared, get_alias_set (elttype)); 5427 } 5428 } 5429 5430 if (vector) 5431 emit_insn (GEN_FCN (icode) 5432 (target, 5433 gen_rtx_PARALLEL (GET_MODE (target), vector))); 5434 break; 5435 } 5436 5437 default: 5438 gcc_unreachable (); 5439 } 5440} 5441 5442/* Store the value of EXP (an expression tree) 5443 into a subfield of TARGET which has mode MODE and occupies 5444 BITSIZE bits, starting BITPOS bits from the start of TARGET. 5445 If MODE is VOIDmode, it means that we are storing into a bit-field. 5446 5447 Always return const0_rtx unless we have something particular to 5448 return. 5449 5450 TYPE is the type of the underlying object, 5451 5452 ALIAS_SET is the alias set for the destination. This value will 5453 (in general) be different from that for TARGET, since TARGET is a 5454 reference to the containing structure. */ 5455 5456static rtx 5457store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, 5458 enum machine_mode mode, tree exp, tree type, int alias_set) 5459{ 5460 HOST_WIDE_INT width_mask = 0; 5461 5462 if (TREE_CODE (exp) == ERROR_MARK) 5463 return const0_rtx; 5464 5465 /* If we have nothing to store, do nothing unless the expression has 5466 side-effects. */ 5467 if (bitsize == 0) 5468 return expand_expr (exp, const0_rtx, VOIDmode, 0); 5469 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT) 5470 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1; 5471 5472 /* If we are storing into an unaligned field of an aligned union that is 5473 in a register, we may have the mode of TARGET being an integer mode but 5474 MODE == BLKmode. In that case, get an aligned object whose size and 5475 alignment are the same as TARGET and store TARGET into it (we can avoid 5476 the store if the field being stored is the entire width of TARGET). Then 5477 call ourselves recursively to store the field into a BLKmode version of 5478 that object. Finally, load from the object into TARGET. This is not 5479 very efficient in general, but should only be slightly more expensive 5480 than the otherwise-required unaligned accesses. Perhaps this can be 5481 cleaned up later. It's tempting to make OBJECT readonly, but it's set 5482 twice, once with emit_move_insn and once via store_field. */ 5483 5484 if (mode == BLKmode 5485 && (REG_P (target) || GET_CODE (target) == SUBREG)) 5486 { 5487 rtx object = assign_temp (type, 0, 1, 1); 5488 rtx blk_object = adjust_address (object, BLKmode, 0); 5489 5490 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target))) 5491 emit_move_insn (object, target); 5492 5493 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set); 5494 5495 emit_move_insn (target, object); 5496 5497 /* We want to return the BLKmode version of the data. */ 5498 return blk_object; 5499 } 5500 5501 if (GET_CODE (target) == CONCAT) 5502 { 5503 /* We're storing into a struct containing a single __complex. */ 5504 5505 gcc_assert (!bitpos); 5506 return store_expr (exp, target, 0); 5507 } 5508 5509 /* If the structure is in a register or if the component 5510 is a bit field, we cannot use addressing to access it. 5511 Use bit-field techniques or SUBREG to store in it. */ 5512 5513 if (mode == VOIDmode 5514 || (mode != BLKmode && ! direct_store[(int) mode] 5515 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT 5516 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) 5517 || REG_P (target) 5518 || GET_CODE (target) == SUBREG 5519 /* If the field isn't aligned enough to store as an ordinary memref, 5520 store it as a bit field. */ 5521 || (mode != BLKmode 5522 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)) 5523 || bitpos % GET_MODE_ALIGNMENT (mode)) 5524 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))) 5525 || (bitpos % BITS_PER_UNIT != 0))) 5526 /* If the RHS and field are a constant size and the size of the 5527 RHS isn't the same size as the bitfield, we must use bitfield 5528 operations. */ 5529 || (bitsize >= 0 5530 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST 5531 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)) 5532 { 5533 rtx temp; 5534 5535 /* If EXP is a NOP_EXPR of precision less than its mode, then that 5536 implies a mask operation. If the precision is the same size as 5537 the field we're storing into, that mask is redundant. This is 5538 particularly common with bit field assignments generated by the 5539 C front end. */ 5540 if (TREE_CODE (exp) == NOP_EXPR) 5541 { 5542 tree type = TREE_TYPE (exp); 5543 if (INTEGRAL_TYPE_P (type) 5544 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type)) 5545 && bitsize == TYPE_PRECISION (type)) 5546 { 5547 type = TREE_TYPE (TREE_OPERAND (exp, 0)); 5548 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize) 5549 exp = TREE_OPERAND (exp, 0); 5550 } 5551 } 5552 5553 temp = expand_normal (exp); 5554 5555 /* If BITSIZE is narrower than the size of the type of EXP 5556 we will be narrowing TEMP. Normally, what's wanted are the 5557 low-order bits. However, if EXP's type is a record and this is 5558 big-endian machine, we want the upper BITSIZE bits. */ 5559 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT 5560 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp)) 5561 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE) 5562 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp, 5563 size_int (GET_MODE_BITSIZE (GET_MODE (temp)) 5564 - bitsize), 5565 NULL_RTX, 1); 5566 5567 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to 5568 MODE. */ 5569 if (mode != VOIDmode && mode != BLKmode 5570 && mode != TYPE_MODE (TREE_TYPE (exp))) 5571 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1); 5572 5573 /* If the modes of TARGET and TEMP are both BLKmode, both 5574 must be in memory and BITPOS must be aligned on a byte 5575 boundary. If so, we simply do a block copy. */ 5576 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode) 5577 { 5578 gcc_assert (MEM_P (target) && MEM_P (temp) 5579 && !(bitpos % BITS_PER_UNIT)); 5580 5581 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); 5582 emit_block_move (target, temp, 5583 GEN_INT ((bitsize + BITS_PER_UNIT - 1) 5584 / BITS_PER_UNIT), 5585 BLOCK_OP_NORMAL); 5586 5587 return const0_rtx; 5588 } 5589 5590 /* Store the value in the bitfield. */ 5591 store_bit_field (target, bitsize, bitpos, mode, temp); 5592 5593 return const0_rtx; 5594 } 5595 else 5596 { 5597 /* Now build a reference to just the desired component. */ 5598 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT); 5599 5600 if (to_rtx == target) 5601 to_rtx = copy_rtx (to_rtx); 5602 5603 MEM_SET_IN_STRUCT_P (to_rtx, 1); 5604 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) 5605 set_mem_alias_set (to_rtx, alias_set); 5606 5607 return store_expr (exp, to_rtx, 0); 5608 } 5609} 5610 5611/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF, 5612 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these 5613 codes and find the ultimate containing object, which we return. 5614 5615 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the 5616 bit position, and *PUNSIGNEDP to the signedness of the field. 5617 If the position of the field is variable, we store a tree 5618 giving the variable offset (in units) in *POFFSET. 5619 This offset is in addition to the bit position. 5620 If the position is not variable, we store 0 in *POFFSET. 5621 5622 If any of the extraction expressions is volatile, 5623 we store 1 in *PVOLATILEP. Otherwise we don't change that. 5624 5625 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it 5626 is a mode that can be used to access the field. In that case, *PBITSIZE 5627 is redundant. 5628 5629 If the field describes a variable-sized object, *PMODE is set to 5630 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in 5631 this case, but the address of the object can be found. 5632 5633 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't 5634 look through nodes that serve as markers of a greater alignment than 5635 the one that can be deduced from the expression. These nodes make it 5636 possible for front-ends to prevent temporaries from being created by 5637 the middle-end on alignment considerations. For that purpose, the 5638 normal operating mode at high-level is to always pass FALSE so that 5639 the ultimate containing object is really returned; moreover, the 5640 associated predicate handled_component_p will always return TRUE 5641 on these nodes, thus indicating that they are essentially handled 5642 by get_inner_reference. TRUE should only be passed when the caller 5643 is scanning the expression in order to build another representation 5644 and specifically knows how to handle these nodes; as such, this is 5645 the normal operating mode in the RTL expanders. */ 5646 5647tree 5648get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, 5649 HOST_WIDE_INT *pbitpos, tree *poffset, 5650 enum machine_mode *pmode, int *punsignedp, 5651 int *pvolatilep, bool keep_aligning) 5652{ 5653 tree size_tree = 0; 5654 enum machine_mode mode = VOIDmode; 5655 tree offset = size_zero_node; 5656 tree bit_offset = bitsize_zero_node; 5657 5658 /* First get the mode, signedness, and size. We do this from just the 5659 outermost expression. */ 5660 if (TREE_CODE (exp) == COMPONENT_REF) 5661 { 5662 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); 5663 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) 5664 mode = DECL_MODE (TREE_OPERAND (exp, 1)); 5665 5666 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1)); 5667 } 5668 else if (TREE_CODE (exp) == BIT_FIELD_REF) 5669 { 5670 size_tree = TREE_OPERAND (exp, 1); 5671 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp); 5672 } 5673 else 5674 { 5675 mode = TYPE_MODE (TREE_TYPE (exp)); 5676 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); 5677 5678 if (mode == BLKmode) 5679 size_tree = TYPE_SIZE (TREE_TYPE (exp)); 5680 else 5681 *pbitsize = GET_MODE_BITSIZE (mode); 5682 } 5683 5684 if (size_tree != 0) 5685 { 5686 if (! host_integerp (size_tree, 1)) 5687 mode = BLKmode, *pbitsize = -1; 5688 else 5689 *pbitsize = tree_low_cst (size_tree, 1); 5690 } 5691 5692 *pmode = mode; 5693 5694 /* Compute cumulative bit-offset for nested component-refs and array-refs, 5695 and find the ultimate containing object. */ 5696 while (1) 5697 { 5698 switch (TREE_CODE (exp)) 5699 { 5700 case BIT_FIELD_REF: 5701 bit_offset = size_binop (PLUS_EXPR, bit_offset, 5702 TREE_OPERAND (exp, 2)); 5703 break; 5704 5705 case COMPONENT_REF: 5706 { 5707 tree field = TREE_OPERAND (exp, 1); 5708 tree this_offset = component_ref_field_offset (exp); 5709 5710 /* If this field hasn't been filled in yet, don't go past it. 5711 This should only happen when folding expressions made during 5712 type construction. */ 5713 if (this_offset == 0) 5714 break; 5715 5716 offset = size_binop (PLUS_EXPR, offset, this_offset); 5717 bit_offset = size_binop (PLUS_EXPR, bit_offset, 5718 DECL_FIELD_BIT_OFFSET (field)); 5719 5720 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ 5721 } 5722 break; 5723 5724 case ARRAY_REF: 5725 case ARRAY_RANGE_REF: 5726 { 5727 tree index = TREE_OPERAND (exp, 1); 5728 tree low_bound = array_ref_low_bound (exp); 5729 tree unit_size = array_ref_element_size (exp); 5730 5731 /* We assume all arrays have sizes that are a multiple of a byte. 5732 First subtract the lower bound, if any, in the type of the 5733 index, then convert to sizetype and multiply by the size of 5734 the array element. */ 5735 if (! integer_zerop (low_bound)) 5736 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), 5737 index, low_bound); 5738 5739 offset = size_binop (PLUS_EXPR, offset, 5740 size_binop (MULT_EXPR, 5741 fold_convert (sizetype, index), 5742 unit_size)); 5743 } 5744 break; 5745 5746 case REALPART_EXPR: 5747 break; 5748 5749 case IMAGPART_EXPR: 5750 bit_offset = size_binop (PLUS_EXPR, bit_offset, 5751 bitsize_int (*pbitsize)); 5752 break; 5753 5754 case VIEW_CONVERT_EXPR: 5755 if (keep_aligning && STRICT_ALIGNMENT 5756 && (TYPE_ALIGN (TREE_TYPE (exp)) 5757 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))) 5758 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))) 5759 < BIGGEST_ALIGNMENT) 5760 && (TYPE_ALIGN_OK (TREE_TYPE (exp)) 5761 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0))))) 5762 goto done; 5763 break; 5764 5765 default: 5766 goto done; 5767 } 5768 5769 /* If any reference in the chain is volatile, the effect is volatile. */ 5770 if (TREE_THIS_VOLATILE (exp)) 5771 *pvolatilep = 1; 5772 5773 exp = TREE_OPERAND (exp, 0); 5774 } 5775 done: 5776 5777 /* If OFFSET is constant, see if we can return the whole thing as a 5778 constant bit position. Make sure to handle overflow during 5779 this conversion. */ 5780 if (host_integerp (offset, 0)) 5781 { 5782 double_int tem = double_int_mul (tree_to_double_int (offset), 5783 uhwi_to_double_int (BITS_PER_UNIT)); 5784 tem = double_int_add (tem, tree_to_double_int (bit_offset)); 5785 if (double_int_fits_in_shwi_p (tem)) 5786 { 5787 *pbitpos = double_int_to_shwi (tem); 5788 *poffset = NULL_TREE; 5789 return exp; 5790 } 5791 } 5792 5793 /* Otherwise, split it up. */ 5794 *pbitpos = tree_low_cst (bit_offset, 0); 5795 *poffset = offset; 5796 5797 return exp; 5798} 5799 5800/* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF, 5801 look for whether EXP or any nested component-refs within EXP is marked 5802 as PACKED. */ 5803 5804bool 5805contains_packed_reference (tree exp) 5806{ 5807 bool packed_p = false; 5808 5809 while (1) 5810 { 5811 switch (TREE_CODE (exp)) 5812 { 5813 case COMPONENT_REF: 5814 { 5815 tree field = TREE_OPERAND (exp, 1); 5816 packed_p = DECL_PACKED (field) 5817 || TYPE_PACKED (TREE_TYPE (field)) 5818 || TYPE_PACKED (TREE_TYPE (exp)); 5819 if (packed_p) 5820 goto done; 5821 } 5822 break; 5823 5824 case BIT_FIELD_REF: 5825 case ARRAY_REF: 5826 case ARRAY_RANGE_REF: 5827 case REALPART_EXPR: 5828 case IMAGPART_EXPR: 5829 case VIEW_CONVERT_EXPR: 5830 break; 5831 5832 default: 5833 goto done; 5834 } 5835 exp = TREE_OPERAND (exp, 0); 5836 } 5837 done: 5838 return packed_p; 5839} 5840 5841/* Return a tree of sizetype representing the size, in bytes, of the element 5842 of EXP, an ARRAY_REF. */ 5843 5844tree 5845array_ref_element_size (tree exp) 5846{ 5847 tree aligned_size = TREE_OPERAND (exp, 3); 5848 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))); 5849 5850 /* If a size was specified in the ARRAY_REF, it's the size measured 5851 in alignment units of the element type. So multiply by that value. */ 5852 if (aligned_size) 5853 { 5854 /* ??? tree_ssa_useless_type_conversion will eliminate casts to 5855 sizetype from another type of the same width and signedness. */ 5856 if (TREE_TYPE (aligned_size) != sizetype) 5857 aligned_size = fold_convert (sizetype, aligned_size); 5858 return size_binop (MULT_EXPR, aligned_size, 5859 size_int (TYPE_ALIGN_UNIT (elmt_type))); 5860 } 5861 5862 /* Otherwise, take the size from that of the element type. Substitute 5863 any PLACEHOLDER_EXPR that we have. */ 5864 else 5865 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp); 5866} 5867 5868/* Return a tree representing the lower bound of the array mentioned in 5869 EXP, an ARRAY_REF. */ 5870 5871tree 5872array_ref_low_bound (tree exp) 5873{ 5874 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); 5875 5876 /* If a lower bound is specified in EXP, use it. */ 5877 if (TREE_OPERAND (exp, 2)) 5878 return TREE_OPERAND (exp, 2); 5879 5880 /* Otherwise, if there is a domain type and it has a lower bound, use it, 5881 substituting for a PLACEHOLDER_EXPR as needed. */ 5882 if (domain_type && TYPE_MIN_VALUE (domain_type)) 5883 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp); 5884 5885 /* Otherwise, return a zero of the appropriate type. */ 5886 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0); 5887} 5888 5889/* Return a tree representing the upper bound of the array mentioned in 5890 EXP, an ARRAY_REF. */ 5891 5892tree 5893array_ref_up_bound (tree exp) 5894{ 5895 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); 5896 5897 /* If there is a domain type and it has an upper bound, use it, substituting 5898 for a PLACEHOLDER_EXPR as needed. */ 5899 if (domain_type && TYPE_MAX_VALUE (domain_type)) 5900 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp); 5901 5902 /* Otherwise fail. */ 5903 return NULL_TREE; 5904} 5905 5906/* Return a tree representing the offset, in bytes, of the field referenced 5907 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */ 5908 5909tree 5910component_ref_field_offset (tree exp) 5911{ 5912 tree aligned_offset = TREE_OPERAND (exp, 2); 5913 tree field = TREE_OPERAND (exp, 1); 5914 5915 /* If an offset was specified in the COMPONENT_REF, it's the offset measured 5916 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that 5917 value. */ 5918 if (aligned_offset) 5919 { 5920 /* ??? tree_ssa_useless_type_conversion will eliminate casts to 5921 sizetype from another type of the same width and signedness. */ 5922 if (TREE_TYPE (aligned_offset) != sizetype) 5923 aligned_offset = fold_convert (sizetype, aligned_offset); 5924 return size_binop (MULT_EXPR, aligned_offset, 5925 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT)); 5926 } 5927 5928 /* Otherwise, take the offset from that of the field. Substitute 5929 any PLACEHOLDER_EXPR that we have. */ 5930 else 5931 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp); 5932} 5933 5934/* Return 1 if T is an expression that get_inner_reference handles. */ 5935 5936int 5937handled_component_p (tree t) 5938{ 5939 switch (TREE_CODE (t)) 5940 { 5941 case BIT_FIELD_REF: 5942 case COMPONENT_REF: 5943 case ARRAY_REF: 5944 case ARRAY_RANGE_REF: 5945 case VIEW_CONVERT_EXPR: 5946 case REALPART_EXPR: 5947 case IMAGPART_EXPR: 5948 return 1; 5949 5950 default: 5951 return 0; 5952 } 5953} 5954 5955/* Given an rtx VALUE that may contain additions and multiplications, return 5956 an equivalent value that just refers to a register, memory, or constant. 5957 This is done by generating instructions to perform the arithmetic and 5958 returning a pseudo-register containing the value. 5959 5960 The returned value may be a REG, SUBREG, MEM or constant. */ 5961 5962rtx 5963force_operand (rtx value, rtx target) 5964{ 5965 rtx op1, op2; 5966 /* Use subtarget as the target for operand 0 of a binary operation. */ 5967 rtx subtarget = get_subtarget (target); 5968 enum rtx_code code = GET_CODE (value); 5969 5970 /* Check for subreg applied to an expression produced by loop optimizer. */ 5971 if (code == SUBREG 5972 && !REG_P (SUBREG_REG (value)) 5973 && !MEM_P (SUBREG_REG (value))) 5974 { 5975 value = simplify_gen_subreg (GET_MODE (value), 5976 force_reg (GET_MODE (SUBREG_REG (value)), 5977 force_operand (SUBREG_REG (value), 5978 NULL_RTX)), 5979 GET_MODE (SUBREG_REG (value)), 5980 SUBREG_BYTE (value)); 5981 code = GET_CODE (value); 5982 } 5983 5984 /* Check for a PIC address load. */ 5985 if ((code == PLUS || code == MINUS) 5986 && XEXP (value, 0) == pic_offset_table_rtx 5987 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF 5988 || GET_CODE (XEXP (value, 1)) == LABEL_REF 5989 || GET_CODE (XEXP (value, 1)) == CONST)) 5990 { 5991 if (!subtarget) 5992 subtarget = gen_reg_rtx (GET_MODE (value)); 5993 emit_move_insn (subtarget, value); 5994 return subtarget; 5995 } 5996 5997 if (ARITHMETIC_P (value)) 5998 { 5999 op2 = XEXP (value, 1); 6000 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget)) 6001 subtarget = 0; 6002 if (code == MINUS && GET_CODE (op2) == CONST_INT) 6003 { 6004 code = PLUS; 6005 op2 = negate_rtx (GET_MODE (value), op2); 6006 } 6007 6008 /* Check for an addition with OP2 a constant integer and our first 6009 operand a PLUS of a virtual register and something else. In that 6010 case, we want to emit the sum of the virtual register and the 6011 constant first and then add the other value. This allows virtual 6012 register instantiation to simply modify the constant rather than 6013 creating another one around this addition. */ 6014 if (code == PLUS && GET_CODE (op2) == CONST_INT 6015 && GET_CODE (XEXP (value, 0)) == PLUS 6016 && REG_P (XEXP (XEXP (value, 0), 0)) 6017 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER 6018 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER) 6019 { 6020 rtx temp = expand_simple_binop (GET_MODE (value), code, 6021 XEXP (XEXP (value, 0), 0), op2, 6022 subtarget, 0, OPTAB_LIB_WIDEN); 6023 return expand_simple_binop (GET_MODE (value), code, temp, 6024 force_operand (XEXP (XEXP (value, 6025 0), 1), 0), 6026 target, 0, OPTAB_LIB_WIDEN); 6027 } 6028 6029 op1 = force_operand (XEXP (value, 0), subtarget); 6030 op2 = force_operand (op2, NULL_RTX); 6031 switch (code) 6032 { 6033 case MULT: 6034 return expand_mult (GET_MODE (value), op1, op2, target, 1); 6035 case DIV: 6036 if (!INTEGRAL_MODE_P (GET_MODE (value))) 6037 return expand_simple_binop (GET_MODE (value), code, op1, op2, 6038 target, 1, OPTAB_LIB_WIDEN); 6039 else 6040 return expand_divmod (0, 6041 FLOAT_MODE_P (GET_MODE (value)) 6042 ? RDIV_EXPR : TRUNC_DIV_EXPR, 6043 GET_MODE (value), op1, op2, target, 0); 6044 break; 6045 case MOD: 6046 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, 6047 target, 0); 6048 break; 6049 case UDIV: 6050 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2, 6051 target, 1); 6052 break; 6053 case UMOD: 6054 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, 6055 target, 1); 6056 break; 6057 case ASHIFTRT: 6058 return expand_simple_binop (GET_MODE (value), code, op1, op2, 6059 target, 0, OPTAB_LIB_WIDEN); 6060 break; 6061 default: 6062 return expand_simple_binop (GET_MODE (value), code, op1, op2, 6063 target, 1, OPTAB_LIB_WIDEN); 6064 } 6065 } 6066 if (UNARY_P (value)) 6067 { 6068 if (!target) 6069 target = gen_reg_rtx (GET_MODE (value)); 6070 op1 = force_operand (XEXP (value, 0), NULL_RTX); 6071 switch (code) 6072 { 6073 case ZERO_EXTEND: 6074 case SIGN_EXTEND: 6075 case TRUNCATE: 6076 case FLOAT_EXTEND: 6077 case FLOAT_TRUNCATE: 6078 convert_move (target, op1, code == ZERO_EXTEND); 6079 return target; 6080 6081 case FIX: 6082 case UNSIGNED_FIX: 6083 expand_fix (target, op1, code == UNSIGNED_FIX); 6084 return target; 6085 6086 case FLOAT: 6087 case UNSIGNED_FLOAT: 6088 expand_float (target, op1, code == UNSIGNED_FLOAT); 6089 return target; 6090 6091 default: 6092 return expand_simple_unop (GET_MODE (value), code, op1, target, 0); 6093 } 6094 } 6095 6096#ifdef INSN_SCHEDULING 6097 /* On machines that have insn scheduling, we want all memory reference to be 6098 explicit, so we need to deal with such paradoxical SUBREGs. */ 6099 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value)) 6100 && (GET_MODE_SIZE (GET_MODE (value)) 6101 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value))))) 6102 value 6103 = simplify_gen_subreg (GET_MODE (value), 6104 force_reg (GET_MODE (SUBREG_REG (value)), 6105 force_operand (SUBREG_REG (value), 6106 NULL_RTX)), 6107 GET_MODE (SUBREG_REG (value)), 6108 SUBREG_BYTE (value)); 6109#endif 6110 6111 return value; 6112} 6113 6114/* Subroutine of expand_expr: return nonzero iff there is no way that 6115 EXP can reference X, which is being modified. TOP_P is nonzero if this 6116 call is going to be used to determine whether we need a temporary 6117 for EXP, as opposed to a recursive call to this function. 6118 6119 It is always safe for this routine to return zero since it merely 6120 searches for optimization opportunities. */ 6121 6122int 6123safe_from_p (rtx x, tree exp, int top_p) 6124{ 6125 rtx exp_rtl = 0; 6126 int i, nops; 6127 6128 if (x == 0 6129 /* If EXP has varying size, we MUST use a target since we currently 6130 have no way of allocating temporaries of variable size 6131 (except for arrays that have TYPE_ARRAY_MAX_SIZE set). 6132 So we assume here that something at a higher level has prevented a 6133 clash. This is somewhat bogus, but the best we can do. Only 6134 do this when X is BLKmode and when we are at the top level. */ 6135 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) 6136 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST 6137 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE 6138 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE 6139 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp))) 6140 != INTEGER_CST) 6141 && GET_MODE (x) == BLKmode) 6142 /* If X is in the outgoing argument area, it is always safe. */ 6143 || (MEM_P (x) 6144 && (XEXP (x, 0) == virtual_outgoing_args_rtx 6145 || (GET_CODE (XEXP (x, 0)) == PLUS 6146 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))) 6147 return 1; 6148 6149 /* If this is a subreg of a hard register, declare it unsafe, otherwise, 6150 find the underlying pseudo. */ 6151 if (GET_CODE (x) == SUBREG) 6152 { 6153 x = SUBREG_REG (x); 6154 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) 6155 return 0; 6156 } 6157 6158 /* Now look at our tree code and possibly recurse. */ 6159 switch (TREE_CODE_CLASS (TREE_CODE (exp))) 6160 { 6161 case tcc_declaration: 6162 exp_rtl = DECL_RTL_IF_SET (exp); 6163 break; 6164 6165 case tcc_constant: 6166 return 1; 6167 6168 case tcc_exceptional: 6169 if (TREE_CODE (exp) == TREE_LIST) 6170 { 6171 while (1) 6172 { 6173 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0)) 6174 return 0; 6175 exp = TREE_CHAIN (exp); 6176 if (!exp) 6177 return 1; 6178 if (TREE_CODE (exp) != TREE_LIST) 6179 return safe_from_p (x, exp, 0); 6180 } 6181 } 6182 else if (TREE_CODE (exp) == CONSTRUCTOR) 6183 { 6184 constructor_elt *ce; 6185 unsigned HOST_WIDE_INT idx; 6186 6187 for (idx = 0; 6188 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce); 6189 idx++) 6190 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0)) 6191 || !safe_from_p (x, ce->value, 0)) 6192 return 0; 6193 return 1; 6194 } 6195 else if (TREE_CODE (exp) == ERROR_MARK) 6196 return 1; /* An already-visited SAVE_EXPR? */ 6197 else 6198 return 0; 6199 6200 case tcc_statement: 6201 /* The only case we look at here is the DECL_INITIAL inside a 6202 DECL_EXPR. */ 6203 return (TREE_CODE (exp) != DECL_EXPR 6204 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL 6205 || !DECL_INITIAL (DECL_EXPR_DECL (exp)) 6206 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0)); 6207 6208 case tcc_binary: 6209 case tcc_comparison: 6210 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0)) 6211 return 0; 6212 /* Fall through. */ 6213 6214 case tcc_unary: 6215 return safe_from_p (x, TREE_OPERAND (exp, 0), 0); 6216 6217 case tcc_expression: 6218 case tcc_reference: 6219 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in 6220 the expression. If it is set, we conflict iff we are that rtx or 6221 both are in memory. Otherwise, we check all operands of the 6222 expression recursively. */ 6223 6224 switch (TREE_CODE (exp)) 6225 { 6226 case ADDR_EXPR: 6227 /* If the operand is static or we are static, we can't conflict. 6228 Likewise if we don't conflict with the operand at all. */ 6229 if (staticp (TREE_OPERAND (exp, 0)) 6230 || TREE_STATIC (exp) 6231 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)) 6232 return 1; 6233 6234 /* Otherwise, the only way this can conflict is if we are taking 6235 the address of a DECL a that address if part of X, which is 6236 very rare. */ 6237 exp = TREE_OPERAND (exp, 0); 6238 if (DECL_P (exp)) 6239 { 6240 if (!DECL_RTL_SET_P (exp) 6241 || !MEM_P (DECL_RTL (exp))) 6242 return 0; 6243 else 6244 exp_rtl = XEXP (DECL_RTL (exp), 0); 6245 } 6246 break; 6247 6248 case MISALIGNED_INDIRECT_REF: 6249 case ALIGN_INDIRECT_REF: 6250 case INDIRECT_REF: 6251 if (MEM_P (x) 6252 && alias_sets_conflict_p (MEM_ALIAS_SET (x), 6253 get_alias_set (exp))) 6254 return 0; 6255 break; 6256 6257 case CALL_EXPR: 6258 /* Assume that the call will clobber all hard registers and 6259 all of memory. */ 6260 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) 6261 || MEM_P (x)) 6262 return 0; 6263 break; 6264 6265 case WITH_CLEANUP_EXPR: 6266 case CLEANUP_POINT_EXPR: 6267 /* Lowered by gimplify.c. */ 6268 gcc_unreachable (); 6269 6270 case SAVE_EXPR: 6271 return safe_from_p (x, TREE_OPERAND (exp, 0), 0); 6272 6273 default: 6274 break; 6275 } 6276 6277 /* If we have an rtx, we do not need to scan our operands. */ 6278 if (exp_rtl) 6279 break; 6280 6281 nops = TREE_CODE_LENGTH (TREE_CODE (exp)); 6282 for (i = 0; i < nops; i++) 6283 if (TREE_OPERAND (exp, i) != 0 6284 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0)) 6285 return 0; 6286 6287 /* If this is a language-specific tree code, it may require 6288 special handling. */ 6289 if ((unsigned int) TREE_CODE (exp) 6290 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE 6291 && !lang_hooks.safe_from_p (x, exp)) 6292 return 0; 6293 break; 6294 6295 case tcc_type: 6296 /* Should never get a type here. */ 6297 gcc_unreachable (); 6298 } 6299 6300 /* If we have an rtl, find any enclosed object. Then see if we conflict 6301 with it. */ 6302 if (exp_rtl) 6303 { 6304 if (GET_CODE (exp_rtl) == SUBREG) 6305 { 6306 exp_rtl = SUBREG_REG (exp_rtl); 6307 if (REG_P (exp_rtl) 6308 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER) 6309 return 0; 6310 } 6311 6312 /* If the rtl is X, then it is not safe. Otherwise, it is unless both 6313 are memory and they conflict. */ 6314 return ! (rtx_equal_p (x, exp_rtl) 6315 || (MEM_P (x) && MEM_P (exp_rtl) 6316 && true_dependence (exp_rtl, VOIDmode, x, 6317 rtx_addr_varies_p))); 6318 } 6319 6320 /* If we reach here, it is safe. */ 6321 return 1; 6322} 6323 6324 6325/* Return the highest power of two that EXP is known to be a multiple of. 6326 This is used in updating alignment of MEMs in array references. */ 6327 6328unsigned HOST_WIDE_INT 6329highest_pow2_factor (tree exp) 6330{ 6331 unsigned HOST_WIDE_INT c0, c1; 6332 6333 switch (TREE_CODE (exp)) 6334 { 6335 case INTEGER_CST: 6336 /* We can find the lowest bit that's a one. If the low 6337 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT. 6338 We need to handle this case since we can find it in a COND_EXPR, 6339 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an 6340 erroneous program, so return BIGGEST_ALIGNMENT to avoid any 6341 later ICE. */ 6342 if (TREE_CONSTANT_OVERFLOW (exp)) 6343 return BIGGEST_ALIGNMENT; 6344 else 6345 { 6346 /* Note: tree_low_cst is intentionally not used here, 6347 we don't care about the upper bits. */ 6348 c0 = TREE_INT_CST_LOW (exp); 6349 c0 &= -c0; 6350 return c0 ? c0 : BIGGEST_ALIGNMENT; 6351 } 6352 break; 6353 6354 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR: 6355 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); 6356 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); 6357 return MIN (c0, c1); 6358 6359 case MULT_EXPR: 6360 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); 6361 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); 6362 return c0 * c1; 6363 6364 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR: 6365 case CEIL_DIV_EXPR: 6366 if (integer_pow2p (TREE_OPERAND (exp, 1)) 6367 && host_integerp (TREE_OPERAND (exp, 1), 1)) 6368 { 6369 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); 6370 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1); 6371 return MAX (1, c0 / c1); 6372 } 6373 break; 6374 6375 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR: 6376 case SAVE_EXPR: 6377 return highest_pow2_factor (TREE_OPERAND (exp, 0)); 6378 6379 case COMPOUND_EXPR: 6380 return highest_pow2_factor (TREE_OPERAND (exp, 1)); 6381 6382 case COND_EXPR: 6383 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1)); 6384 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2)); 6385 return MIN (c0, c1); 6386 6387 default: 6388 break; 6389 } 6390 6391 return 1; 6392} 6393 6394/* Similar, except that the alignment requirements of TARGET are 6395 taken into account. Assume it is at least as aligned as its 6396 type, unless it is a COMPONENT_REF in which case the layout of 6397 the structure gives the alignment. */ 6398 6399static unsigned HOST_WIDE_INT 6400highest_pow2_factor_for_target (tree target, tree exp) 6401{ 6402 unsigned HOST_WIDE_INT target_align, factor; 6403 6404 factor = highest_pow2_factor (exp); 6405 if (TREE_CODE (target) == COMPONENT_REF) 6406 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1)); 6407 else 6408 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target)); 6409 return MAX (factor, target_align); 6410} 6411 6412/* Expands variable VAR. */ 6413 6414void 6415expand_var (tree var) 6416{ 6417 if (DECL_EXTERNAL (var)) 6418 return; 6419 6420 if (TREE_STATIC (var)) 6421 /* If this is an inlined copy of a static local variable, 6422 look up the original decl. */ 6423 var = DECL_ORIGIN (var); 6424 6425 if (TREE_STATIC (var) 6426 ? !TREE_ASM_WRITTEN (var) 6427 : !DECL_RTL_SET_P (var)) 6428 { 6429 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var)) 6430 /* Should be ignored. */; 6431 else if (lang_hooks.expand_decl (var)) 6432 /* OK. */; 6433 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var)) 6434 expand_decl (var); 6435 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var)) 6436 rest_of_decl_compilation (var, 0, 0); 6437 else 6438 /* No expansion needed. */ 6439 gcc_assert (TREE_CODE (var) == TYPE_DECL 6440 || TREE_CODE (var) == CONST_DECL 6441 || TREE_CODE (var) == FUNCTION_DECL 6442 || TREE_CODE (var) == LABEL_DECL); 6443 } 6444} 6445 6446/* Subroutine of expand_expr. Expand the two operands of a binary 6447 expression EXP0 and EXP1 placing the results in OP0 and OP1. 6448 The value may be stored in TARGET if TARGET is nonzero. The 6449 MODIFIER argument is as documented by expand_expr. */ 6450 6451static void 6452expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1, 6453 enum expand_modifier modifier) 6454{ 6455 if (! safe_from_p (target, exp1, 1)) 6456 target = 0; 6457 if (operand_equal_p (exp0, exp1, 0)) 6458 { 6459 *op0 = expand_expr (exp0, target, VOIDmode, modifier); 6460 *op1 = copy_rtx (*op0); 6461 } 6462 else 6463 { 6464 /* If we need to preserve evaluation order, copy exp0 into its own 6465 temporary variable so that it can't be clobbered by exp1. */ 6466 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1)) 6467 exp0 = save_expr (exp0); 6468 *op0 = expand_expr (exp0, target, VOIDmode, modifier); 6469 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier); 6470 } 6471} 6472 6473 6474/* Return a MEM that contains constant EXP. DEFER is as for 6475 output_constant_def and MODIFIER is as for expand_expr. */ 6476 6477static rtx 6478expand_expr_constant (tree exp, int defer, enum expand_modifier modifier) 6479{ 6480 rtx mem; 6481 6482 mem = output_constant_def (exp, defer); 6483 if (modifier != EXPAND_INITIALIZER) 6484 mem = use_anchored_address (mem); 6485 return mem; 6486} 6487 6488/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP. 6489 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */ 6490 6491static rtx 6492expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, 6493 enum expand_modifier modifier) 6494{ 6495 rtx result, subtarget; 6496 tree inner, offset; 6497 HOST_WIDE_INT bitsize, bitpos; 6498 int volatilep, unsignedp; 6499 enum machine_mode mode1; 6500 6501 /* If we are taking the address of a constant and are at the top level, 6502 we have to use output_constant_def since we can't call force_const_mem 6503 at top level. */ 6504 /* ??? This should be considered a front-end bug. We should not be 6505 generating ADDR_EXPR of something that isn't an LVALUE. The only 6506 exception here is STRING_CST. */ 6507 if (TREE_CODE (exp) == CONSTRUCTOR 6508 || CONSTANT_CLASS_P (exp)) 6509 return XEXP (expand_expr_constant (exp, 0, modifier), 0); 6510 6511 /* Everything must be something allowed by is_gimple_addressable. */ 6512 switch (TREE_CODE (exp)) 6513 { 6514 case INDIRECT_REF: 6515 /* This case will happen via recursion for &a->b. */ 6516 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); 6517 6518 case CONST_DECL: 6519 /* Recurse and make the output_constant_def clause above handle this. */ 6520 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target, 6521 tmode, modifier); 6522 6523 case REALPART_EXPR: 6524 /* The real part of the complex number is always first, therefore 6525 the address is the same as the address of the parent object. */ 6526 offset = 0; 6527 bitpos = 0; 6528 inner = TREE_OPERAND (exp, 0); 6529 break; 6530 6531 case IMAGPART_EXPR: 6532 /* The imaginary part of the complex number is always second. 6533 The expression is therefore always offset by the size of the 6534 scalar type. */ 6535 offset = 0; 6536 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp))); 6537 inner = TREE_OPERAND (exp, 0); 6538 break; 6539 6540 default: 6541 /* If the object is a DECL, then expand it for its rtl. Don't bypass 6542 expand_expr, as that can have various side effects; LABEL_DECLs for 6543 example, may not have their DECL_RTL set yet. Assume language 6544 specific tree nodes can be expanded in some interesting way. */ 6545 if (DECL_P (exp) 6546 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE) 6547 { 6548 result = expand_expr (exp, target, tmode, 6549 modifier == EXPAND_INITIALIZER 6550 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS); 6551 6552 /* If the DECL isn't in memory, then the DECL wasn't properly 6553 marked TREE_ADDRESSABLE, which will be either a front-end 6554 or a tree optimizer bug. */ 6555 gcc_assert (MEM_P (result)); 6556 result = XEXP (result, 0); 6557 6558 /* ??? Is this needed anymore? */ 6559 if (DECL_P (exp) && !TREE_USED (exp) == 0) 6560 { 6561 assemble_external (exp); 6562 TREE_USED (exp) = 1; 6563 } 6564 6565 if (modifier != EXPAND_INITIALIZER 6566 && modifier != EXPAND_CONST_ADDRESS) 6567 result = force_operand (result, target); 6568 return result; 6569 } 6570 6571 /* Pass FALSE as the last argument to get_inner_reference although 6572 we are expanding to RTL. The rationale is that we know how to 6573 handle "aligning nodes" here: we can just bypass them because 6574 they won't change the final object whose address will be returned 6575 (they actually exist only for that purpose). */ 6576 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, 6577 &mode1, &unsignedp, &volatilep, false); 6578 break; 6579 } 6580 6581 /* We must have made progress. */ 6582 gcc_assert (inner != exp); 6583 6584 subtarget = offset || bitpos ? NULL_RTX : target; 6585 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier); 6586 6587 if (offset) 6588 { 6589 rtx tmp; 6590 6591 if (modifier != EXPAND_NORMAL) 6592 result = force_operand (result, NULL); 6593 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL); 6594 6595 result = convert_memory_address (tmode, result); 6596 tmp = convert_memory_address (tmode, tmp); 6597 6598 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) 6599 result = gen_rtx_PLUS (tmode, result, tmp); 6600 else 6601 { 6602 subtarget = bitpos ? NULL_RTX : target; 6603 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget, 6604 1, OPTAB_LIB_WIDEN); 6605 } 6606 } 6607 6608 if (bitpos) 6609 { 6610 /* Someone beforehand should have rejected taking the address 6611 of such an object. */ 6612 gcc_assert ((bitpos % BITS_PER_UNIT) == 0); 6613 6614 result = plus_constant (result, bitpos / BITS_PER_UNIT); 6615 if (modifier < EXPAND_SUM) 6616 result = force_operand (result, target); 6617 } 6618 6619 return result; 6620} 6621 6622/* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR. 6623 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */ 6624 6625static rtx 6626expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, 6627 enum expand_modifier modifier) 6628{ 6629 enum machine_mode rmode; 6630 rtx result; 6631 6632 /* Target mode of VOIDmode says "whatever's natural". */ 6633 if (tmode == VOIDmode) 6634 tmode = TYPE_MODE (TREE_TYPE (exp)); 6635 6636 /* We can get called with some Weird Things if the user does silliness 6637 like "(short) &a". In that case, convert_memory_address won't do 6638 the right thing, so ignore the given target mode. */ 6639 if (tmode != Pmode && tmode != ptr_mode) 6640 tmode = Pmode; 6641 6642 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target, 6643 tmode, modifier); 6644 6645 /* Despite expand_expr claims concerning ignoring TMODE when not 6646 strictly convenient, stuff breaks if we don't honor it. Note 6647 that combined with the above, we only do this for pointer modes. */ 6648 rmode = GET_MODE (result); 6649 if (rmode == VOIDmode) 6650 rmode = tmode; 6651 if (rmode != tmode) 6652 result = convert_memory_address (tmode, result); 6653 6654 return result; 6655} 6656 6657 6658/* expand_expr: generate code for computing expression EXP. 6659 An rtx for the computed value is returned. The value is never null. 6660 In the case of a void EXP, const0_rtx is returned. 6661 6662 The value may be stored in TARGET if TARGET is nonzero. 6663 TARGET is just a suggestion; callers must assume that 6664 the rtx returned may not be the same as TARGET. 6665 6666 If TARGET is CONST0_RTX, it means that the value will be ignored. 6667 6668 If TMODE is not VOIDmode, it suggests generating the 6669 result in mode TMODE. But this is done only when convenient. 6670 Otherwise, TMODE is ignored and the value generated in its natural mode. 6671 TMODE is just a suggestion; callers must assume that 6672 the rtx returned may not have mode TMODE. 6673 6674 Note that TARGET may have neither TMODE nor MODE. In that case, it 6675 probably will not be used. 6676 6677 If MODIFIER is EXPAND_SUM then when EXP is an addition 6678 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...)) 6679 or a nest of (PLUS ...) and (MINUS ...) where the terms are 6680 products as above, or REG or MEM, or constant. 6681 Ordinarily in such cases we would output mul or add instructions 6682 and then return a pseudo reg containing the sum. 6683 6684 EXPAND_INITIALIZER is much like EXPAND_SUM except that 6685 it also marks a label as absolutely required (it can't be dead). 6686 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns. 6687 This is used for outputting expressions used in initializers. 6688 6689 EXPAND_CONST_ADDRESS says that it is okay to return a MEM 6690 with a constant address even if that address is not normally legitimate. 6691 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. 6692 6693 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for 6694 a call parameter. Such targets require special care as we haven't yet 6695 marked TARGET so that it's safe from being trashed by libcalls. We 6696 don't want to use TARGET for anything but the final result; 6697 Intermediate values must go elsewhere. Additionally, calls to 6698 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. 6699 6700 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid 6701 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the 6702 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a 6703 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on 6704 recursively. */ 6705 6706static rtx expand_expr_real_1 (tree, rtx, enum machine_mode, 6707 enum expand_modifier, rtx *); 6708 6709rtx 6710expand_expr_real (tree exp, rtx target, enum machine_mode tmode, 6711 enum expand_modifier modifier, rtx *alt_rtl) 6712{ 6713 int rn = -1; 6714 rtx ret, last = NULL; 6715 6716 /* Handle ERROR_MARK before anybody tries to access its type. */ 6717 if (TREE_CODE (exp) == ERROR_MARK 6718 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK) 6719 { 6720 ret = CONST0_RTX (tmode); 6721 return ret ? ret : const0_rtx; 6722 } 6723 6724 if (flag_non_call_exceptions) 6725 { 6726 rn = lookup_stmt_eh_region (exp); 6727 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */ 6728 if (rn >= 0) 6729 last = get_last_insn (); 6730 } 6731 6732 /* If this is an expression of some kind and it has an associated line 6733 number, then emit the line number before expanding the expression. 6734 6735 We need to save and restore the file and line information so that 6736 errors discovered during expansion are emitted with the right 6737 information. It would be better of the diagnostic routines 6738 used the file/line information embedded in the tree nodes rather 6739 than globals. */ 6740 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp)) 6741 { 6742 location_t saved_location = input_location; 6743 input_location = EXPR_LOCATION (exp); 6744 emit_line_note (input_location); 6745 6746 /* Record where the insns produced belong. */ 6747 record_block_change (TREE_BLOCK (exp)); 6748 6749 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); 6750 6751 input_location = saved_location; 6752 } 6753 else 6754 { 6755 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); 6756 } 6757 6758 /* If using non-call exceptions, mark all insns that may trap. 6759 expand_call() will mark CALL_INSNs before we get to this code, 6760 but it doesn't handle libcalls, and these may trap. */ 6761 if (rn >= 0) 6762 { 6763 rtx insn; 6764 for (insn = next_real_insn (last); insn; 6765 insn = next_real_insn (insn)) 6766 { 6767 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) 6768 /* If we want exceptions for non-call insns, any 6769 may_trap_p instruction may throw. */ 6770 && GET_CODE (PATTERN (insn)) != CLOBBER 6771 && GET_CODE (PATTERN (insn)) != USE 6772 && (CALL_P (insn) || may_trap_p (PATTERN (insn)))) 6773 { 6774 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn), 6775 REG_NOTES (insn)); 6776 } 6777 } 6778 } 6779 6780 return ret; 6781} 6782 6783static rtx 6784expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, 6785 enum expand_modifier modifier, rtx *alt_rtl) 6786{ 6787 rtx op0, op1, temp, decl_rtl; 6788 tree type = TREE_TYPE (exp); 6789 int unsignedp; 6790 enum machine_mode mode; 6791 enum tree_code code = TREE_CODE (exp); 6792 optab this_optab; 6793 rtx subtarget, original_target; 6794 int ignore; 6795 tree context, subexp0, subexp1; 6796 bool reduce_bit_field = false; 6797#define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \ 6798 ? reduce_to_bit_field_precision ((expr), \ 6799 target, \ 6800 type) \ 6801 : (expr)) 6802 6803 mode = TYPE_MODE (type); 6804 unsignedp = TYPE_UNSIGNED (type); 6805 if (lang_hooks.reduce_bit_field_operations 6806 && TREE_CODE (type) == INTEGER_TYPE 6807 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type)) 6808 { 6809 /* An operation in what may be a bit-field type needs the 6810 result to be reduced to the precision of the bit-field type, 6811 which is narrower than that of the type's mode. */ 6812 reduce_bit_field = true; 6813 if (modifier == EXPAND_STACK_PARM) 6814 target = 0; 6815 } 6816 6817 /* Use subtarget as the target for operand 0 of a binary operation. */ 6818 subtarget = get_subtarget (target); 6819 original_target = target; 6820 ignore = (target == const0_rtx 6821 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR 6822 || code == CONVERT_EXPR || code == COND_EXPR 6823 || code == VIEW_CONVERT_EXPR) 6824 && TREE_CODE (type) == VOID_TYPE)); 6825 6826 /* If we are going to ignore this result, we need only do something 6827 if there is a side-effect somewhere in the expression. If there 6828 is, short-circuit the most common cases here. Note that we must 6829 not call expand_expr with anything but const0_rtx in case this 6830 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */ 6831 6832 if (ignore) 6833 { 6834 if (! TREE_SIDE_EFFECTS (exp)) 6835 return const0_rtx; 6836 6837 /* Ensure we reference a volatile object even if value is ignored, but 6838 don't do this if all we are doing is taking its address. */ 6839 if (TREE_THIS_VOLATILE (exp) 6840 && TREE_CODE (exp) != FUNCTION_DECL 6841 && mode != VOIDmode && mode != BLKmode 6842 && modifier != EXPAND_CONST_ADDRESS) 6843 { 6844 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier); 6845 if (MEM_P (temp)) 6846 temp = copy_to_reg (temp); 6847 return const0_rtx; 6848 } 6849 6850 if (TREE_CODE_CLASS (code) == tcc_unary 6851 || code == COMPONENT_REF || code == INDIRECT_REF) 6852 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 6853 modifier); 6854 6855 else if (TREE_CODE_CLASS (code) == tcc_binary 6856 || TREE_CODE_CLASS (code) == tcc_comparison 6857 || code == ARRAY_REF || code == ARRAY_RANGE_REF) 6858 { 6859 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); 6860 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); 6861 return const0_rtx; 6862 } 6863 else if (code == BIT_FIELD_REF) 6864 { 6865 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); 6866 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); 6867 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier); 6868 return const0_rtx; 6869 } 6870 6871 target = 0; 6872 } 6873 6874 6875 switch (code) 6876 { 6877 case LABEL_DECL: 6878 { 6879 tree function = decl_function_context (exp); 6880 6881 temp = label_rtx (exp); 6882 temp = gen_rtx_LABEL_REF (Pmode, temp); 6883 6884 if (function != current_function_decl 6885 && function != 0) 6886 LABEL_REF_NONLOCAL_P (temp) = 1; 6887 6888 temp = gen_rtx_MEM (FUNCTION_MODE, temp); 6889 return temp; 6890 } 6891 6892 case SSA_NAME: 6893 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, 6894 NULL); 6895 6896 case PARM_DECL: 6897 case VAR_DECL: 6898 /* If a static var's type was incomplete when the decl was written, 6899 but the type is complete now, lay out the decl now. */ 6900 if (DECL_SIZE (exp) == 0 6901 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp)) 6902 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp))) 6903 layout_decl (exp, 0); 6904 6905 /* ... fall through ... */ 6906 6907 case FUNCTION_DECL: 6908 case RESULT_DECL: 6909 decl_rtl = DECL_RTL (exp); 6910 gcc_assert (decl_rtl); 6911 6912 /* Ensure variable marked as used even if it doesn't go through 6913 a parser. If it hasn't be used yet, write out an external 6914 definition. */ 6915 if (! TREE_USED (exp)) 6916 { 6917 assemble_external (exp); 6918 TREE_USED (exp) = 1; 6919 } 6920 6921 /* Show we haven't gotten RTL for this yet. */ 6922 temp = 0; 6923 6924 /* Variables inherited from containing functions should have 6925 been lowered by this point. */ 6926 context = decl_function_context (exp); 6927 gcc_assert (!context 6928 || context == current_function_decl 6929 || TREE_STATIC (exp) 6930 /* ??? C++ creates functions that are not TREE_STATIC. */ 6931 || TREE_CODE (exp) == FUNCTION_DECL); 6932 6933 /* This is the case of an array whose size is to be determined 6934 from its initializer, while the initializer is still being parsed. 6935 See expand_decl. */ 6936 6937 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0))) 6938 temp = validize_mem (decl_rtl); 6939 6940 /* If DECL_RTL is memory, we are in the normal case and either 6941 the address is not valid or it is not a register and -fforce-addr 6942 is specified, get the address into a register. */ 6943 6944 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER) 6945 { 6946 if (alt_rtl) 6947 *alt_rtl = decl_rtl; 6948 decl_rtl = use_anchored_address (decl_rtl); 6949 if (modifier != EXPAND_CONST_ADDRESS 6950 && modifier != EXPAND_SUM 6951 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)) 6952 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0))))) 6953 temp = replace_equiv_address (decl_rtl, 6954 copy_rtx (XEXP (decl_rtl, 0))); 6955 } 6956 6957 /* If we got something, return it. But first, set the alignment 6958 if the address is a register. */ 6959 if (temp != 0) 6960 { 6961 if (MEM_P (temp) && REG_P (XEXP (temp, 0))) 6962 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp)); 6963 6964 return temp; 6965 } 6966 6967 /* If the mode of DECL_RTL does not match that of the decl, it 6968 must be a promoted value. We return a SUBREG of the wanted mode, 6969 but mark it so that we know that it was already extended. */ 6970 6971 if (REG_P (decl_rtl) 6972 && GET_MODE (decl_rtl) != DECL_MODE (exp)) 6973 { 6974 enum machine_mode pmode; 6975 6976 /* Get the signedness used for this variable. Ensure we get the 6977 same mode we got when the variable was declared. */ 6978 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp, 6979 (TREE_CODE (exp) == RESULT_DECL 6980 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0); 6981 gcc_assert (GET_MODE (decl_rtl) == pmode); 6982 6983 temp = gen_lowpart_SUBREG (mode, decl_rtl); 6984 SUBREG_PROMOTED_VAR_P (temp) = 1; 6985 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); 6986 return temp; 6987 } 6988 6989 return decl_rtl; 6990 6991 case INTEGER_CST: 6992 temp = immed_double_const (TREE_INT_CST_LOW (exp), 6993 TREE_INT_CST_HIGH (exp), mode); 6994 6995 /* ??? If overflow is set, fold will have done an incomplete job, 6996 which can result in (plus xx (const_int 0)), which can get 6997 simplified by validate_replace_rtx during virtual register 6998 instantiation, which can result in unrecognizable insns. 6999 Avoid this by forcing all overflows into registers. */ 7000 if (TREE_CONSTANT_OVERFLOW (exp) 7001 && modifier != EXPAND_INITIALIZER) 7002 temp = force_reg (mode, temp); 7003 7004 return temp; 7005 7006 case VECTOR_CST: 7007 { 7008 tree tmp = NULL_TREE; 7009 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT 7010 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT) 7011 return const_vector_from_tree (exp); 7012 if (GET_MODE_CLASS (mode) == MODE_INT) 7013 { 7014 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1); 7015 if (type_for_mode) 7016 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp); 7017 } 7018 if (!tmp) 7019 tmp = build_constructor_from_list (type, 7020 TREE_VECTOR_CST_ELTS (exp)); 7021 return expand_expr (tmp, ignore ? const0_rtx : target, 7022 tmode, modifier); 7023 } 7024 7025 case CONST_DECL: 7026 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier); 7027 7028 case REAL_CST: 7029 /* If optimized, generate immediate CONST_DOUBLE 7030 which will be turned into memory by reload if necessary. 7031 7032 We used to force a register so that loop.c could see it. But 7033 this does not allow gen_* patterns to perform optimizations with 7034 the constants. It also produces two insns in cases like "x = 1.0;". 7035 On most machines, floating-point constants are not permitted in 7036 many insns, so we'd end up copying it to a register in any case. 7037 7038 Now, we do the copying in expand_binop, if appropriate. */ 7039 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp), 7040 TYPE_MODE (TREE_TYPE (exp))); 7041 7042 case COMPLEX_CST: 7043 /* Handle evaluating a complex constant in a CONCAT target. */ 7044 if (original_target && GET_CODE (original_target) == CONCAT) 7045 { 7046 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); 7047 rtx rtarg, itarg; 7048 7049 rtarg = XEXP (original_target, 0); 7050 itarg = XEXP (original_target, 1); 7051 7052 /* Move the real and imaginary parts separately. */ 7053 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0); 7054 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0); 7055 7056 if (op0 != rtarg) 7057 emit_move_insn (rtarg, op0); 7058 if (op1 != itarg) 7059 emit_move_insn (itarg, op1); 7060 7061 return original_target; 7062 } 7063 7064 /* ... fall through ... */ 7065 7066 case STRING_CST: 7067 temp = expand_expr_constant (exp, 1, modifier); 7068 7069 /* temp contains a constant address. 7070 On RISC machines where a constant address isn't valid, 7071 make some insns to get that address into a register. */ 7072 if (modifier != EXPAND_CONST_ADDRESS 7073 && modifier != EXPAND_INITIALIZER 7074 && modifier != EXPAND_SUM 7075 && (! memory_address_p (mode, XEXP (temp, 0)) 7076 || flag_force_addr)) 7077 return replace_equiv_address (temp, 7078 copy_rtx (XEXP (temp, 0))); 7079 return temp; 7080 7081 case SAVE_EXPR: 7082 { 7083 tree val = TREE_OPERAND (exp, 0); 7084 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl); 7085 7086 if (!SAVE_EXPR_RESOLVED_P (exp)) 7087 { 7088 /* We can indeed still hit this case, typically via builtin 7089 expanders calling save_expr immediately before expanding 7090 something. Assume this means that we only have to deal 7091 with non-BLKmode values. */ 7092 gcc_assert (GET_MODE (ret) != BLKmode); 7093 7094 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp)); 7095 DECL_ARTIFICIAL (val) = 1; 7096 DECL_IGNORED_P (val) = 1; 7097 TREE_OPERAND (exp, 0) = val; 7098 SAVE_EXPR_RESOLVED_P (exp) = 1; 7099 7100 if (!CONSTANT_P (ret)) 7101 ret = copy_to_reg (ret); 7102 SET_DECL_RTL (val, ret); 7103 } 7104 7105 return ret; 7106 } 7107 7108 case GOTO_EXPR: 7109 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL) 7110 expand_goto (TREE_OPERAND (exp, 0)); 7111 else 7112 expand_computed_goto (TREE_OPERAND (exp, 0)); 7113 return const0_rtx; 7114 7115 case CONSTRUCTOR: 7116 /* If we don't need the result, just ensure we evaluate any 7117 subexpressions. */ 7118 if (ignore) 7119 { 7120 unsigned HOST_WIDE_INT idx; 7121 tree value; 7122 7123 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) 7124 expand_expr (value, const0_rtx, VOIDmode, 0); 7125 7126 return const0_rtx; 7127 } 7128 7129 /* Try to avoid creating a temporary at all. This is possible 7130 if all of the initializer is zero. 7131 FIXME: try to handle all [0..255] initializers we can handle 7132 with memset. */ 7133 else if (TREE_STATIC (exp) 7134 && !TREE_ADDRESSABLE (exp) 7135 && target != 0 && mode == BLKmode 7136 && all_zeros_p (exp)) 7137 { 7138 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL); 7139 return target; 7140 } 7141 7142 /* All elts simple constants => refer to a constant in memory. But 7143 if this is a non-BLKmode mode, let it store a field at a time 7144 since that should make a CONST_INT or CONST_DOUBLE when we 7145 fold. Likewise, if we have a target we can use, it is best to 7146 store directly into the target unless the type is large enough 7147 that memcpy will be used. If we are making an initializer and 7148 all operands are constant, put it in memory as well. 7149 7150 FIXME: Avoid trying to fill vector constructors piece-meal. 7151 Output them with output_constant_def below unless we're sure 7152 they're zeros. This should go away when vector initializers 7153 are treated like VECTOR_CST instead of arrays. 7154 */ 7155 else if ((TREE_STATIC (exp) 7156 && ((mode == BLKmode 7157 && ! (target != 0 && safe_from_p (target, exp, 1))) 7158 || TREE_ADDRESSABLE (exp) 7159 || (host_integerp (TYPE_SIZE_UNIT (type), 1) 7160 && (! MOVE_BY_PIECES_P 7161 (tree_low_cst (TYPE_SIZE_UNIT (type), 1), 7162 TYPE_ALIGN (type))) 7163 && ! mostly_zeros_p (exp)))) 7164 || ((modifier == EXPAND_INITIALIZER 7165 || modifier == EXPAND_CONST_ADDRESS) 7166 && TREE_CONSTANT (exp))) 7167 { 7168 rtx constructor = expand_expr_constant (exp, 1, modifier); 7169 7170 if (modifier != EXPAND_CONST_ADDRESS 7171 && modifier != EXPAND_INITIALIZER 7172 && modifier != EXPAND_SUM) 7173 constructor = validize_mem (constructor); 7174 7175 return constructor; 7176 } 7177 else 7178 { 7179 /* Handle calls that pass values in multiple non-contiguous 7180 locations. The Irix 6 ABI has examples of this. */ 7181 if (target == 0 || ! safe_from_p (target, exp, 1) 7182 || GET_CODE (target) == PARALLEL 7183 || modifier == EXPAND_STACK_PARM) 7184 target 7185 = assign_temp (build_qualified_type (type, 7186 (TYPE_QUALS (type) 7187 | (TREE_READONLY (exp) 7188 * TYPE_QUAL_CONST))), 7189 0, TREE_ADDRESSABLE (exp), 1); 7190 7191 store_constructor (exp, target, 0, int_expr_size (exp)); 7192 return target; 7193 } 7194 7195 case MISALIGNED_INDIRECT_REF: 7196 case ALIGN_INDIRECT_REF: 7197 case INDIRECT_REF: 7198 { 7199 tree exp1 = TREE_OPERAND (exp, 0); 7200 7201 if (modifier != EXPAND_WRITE) 7202 { 7203 tree t; 7204 7205 t = fold_read_from_constant_string (exp); 7206 if (t) 7207 return expand_expr (t, target, tmode, modifier); 7208 } 7209 7210 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM); 7211 op0 = memory_address (mode, op0); 7212 7213 if (code == ALIGN_INDIRECT_REF) 7214 { 7215 int align = TYPE_ALIGN_UNIT (type); 7216 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align)); 7217 op0 = memory_address (mode, op0); 7218 } 7219 7220 temp = gen_rtx_MEM (mode, op0); 7221 7222 set_mem_attributes (temp, exp, 0); 7223 7224 /* Resolve the misalignment now, so that we don't have to remember 7225 to resolve it later. Of course, this only works for reads. */ 7226 /* ??? When we get around to supporting writes, we'll have to handle 7227 this in store_expr directly. The vectorizer isn't generating 7228 those yet, however. */ 7229 if (code == MISALIGNED_INDIRECT_REF) 7230 { 7231 int icode; 7232 rtx reg, insn; 7233 7234 gcc_assert (modifier == EXPAND_NORMAL 7235 || modifier == EXPAND_STACK_PARM); 7236 7237 /* The vectorizer should have already checked the mode. */ 7238 icode = movmisalign_optab->handlers[mode].insn_code; 7239 gcc_assert (icode != CODE_FOR_nothing); 7240 7241 /* We've already validated the memory, and we're creating a 7242 new pseudo destination. The predicates really can't fail. */ 7243 reg = gen_reg_rtx (mode); 7244 7245 /* Nor can the insn generator. */ 7246 insn = GEN_FCN (icode) (reg, temp); 7247 emit_insn (insn); 7248 7249 return reg; 7250 } 7251 7252 return temp; 7253 } 7254 7255 case TARGET_MEM_REF: 7256 { 7257 struct mem_address addr; 7258 7259 get_address_description (exp, &addr); 7260 op0 = addr_for_mem_ref (&addr, true); 7261 op0 = memory_address (mode, op0); 7262 temp = gen_rtx_MEM (mode, op0); 7263 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0); 7264 } 7265 return temp; 7266 7267 case ARRAY_REF: 7268 7269 { 7270 tree array = TREE_OPERAND (exp, 0); 7271 tree index = TREE_OPERAND (exp, 1); 7272 7273 /* Fold an expression like: "foo"[2]. 7274 This is not done in fold so it won't happen inside &. 7275 Don't fold if this is for wide characters since it's too 7276 difficult to do correctly and this is a very rare case. */ 7277 7278 if (modifier != EXPAND_CONST_ADDRESS 7279 && modifier != EXPAND_INITIALIZER 7280 && modifier != EXPAND_MEMORY) 7281 { 7282 tree t = fold_read_from_constant_string (exp); 7283 7284 if (t) 7285 return expand_expr (t, target, tmode, modifier); 7286 } 7287 7288 /* If this is a constant index into a constant array, 7289 just get the value from the array. Handle both the cases when 7290 we have an explicit constructor and when our operand is a variable 7291 that was declared const. */ 7292 7293 if (modifier != EXPAND_CONST_ADDRESS 7294 && modifier != EXPAND_INITIALIZER 7295 && modifier != EXPAND_MEMORY 7296 && TREE_CODE (array) == CONSTRUCTOR 7297 && ! TREE_SIDE_EFFECTS (array) 7298 && TREE_CODE (index) == INTEGER_CST) 7299 { 7300 unsigned HOST_WIDE_INT ix; 7301 tree field, value; 7302 7303 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix, 7304 field, value) 7305 if (tree_int_cst_equal (field, index)) 7306 { 7307 if (!TREE_SIDE_EFFECTS (value)) 7308 return expand_expr (fold (value), target, tmode, modifier); 7309 break; 7310 } 7311 } 7312 7313 else if (optimize >= 1 7314 && modifier != EXPAND_CONST_ADDRESS 7315 && modifier != EXPAND_INITIALIZER 7316 && modifier != EXPAND_MEMORY 7317 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array) 7318 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array) 7319 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK 7320 && targetm.binds_local_p (array)) 7321 { 7322 if (TREE_CODE (index) == INTEGER_CST) 7323 { 7324 tree init = DECL_INITIAL (array); 7325 7326 if (TREE_CODE (init) == CONSTRUCTOR) 7327 { 7328 unsigned HOST_WIDE_INT ix; 7329 tree field, value; 7330 7331 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix, 7332 field, value) 7333 if (tree_int_cst_equal (field, index)) 7334 { 7335 if (!TREE_SIDE_EFFECTS (value)) 7336 return expand_expr (fold (value), target, tmode, 7337 modifier); 7338 break; 7339 } 7340 } 7341 else if(TREE_CODE (init) == STRING_CST) 7342 { 7343 tree index1 = index; 7344 tree low_bound = array_ref_low_bound (exp); 7345 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1)); 7346 7347 /* Optimize the special-case of a zero lower bound. 7348 7349 We convert the low_bound to sizetype to avoid some problems 7350 with constant folding. (E.g. suppose the lower bound is 1, 7351 and its mode is QI. Without the conversion,l (ARRAY 7352 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) 7353 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */ 7354 7355 if (! integer_zerop (low_bound)) 7356 index1 = size_diffop (index1, fold_convert (sizetype, 7357 low_bound)); 7358 7359 if (0 > compare_tree_int (index1, 7360 TREE_STRING_LENGTH (init))) 7361 { 7362 tree type = TREE_TYPE (TREE_TYPE (init)); 7363 enum machine_mode mode = TYPE_MODE (type); 7364 7365 if (GET_MODE_CLASS (mode) == MODE_INT 7366 && GET_MODE_SIZE (mode) == 1) 7367 return gen_int_mode (TREE_STRING_POINTER (init) 7368 [TREE_INT_CST_LOW (index1)], 7369 mode); 7370 } 7371 } 7372 } 7373 } 7374 } 7375 goto normal_inner_ref; 7376 7377 case COMPONENT_REF: 7378 /* If the operand is a CONSTRUCTOR, we can just extract the 7379 appropriate field if it is present. */ 7380 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR) 7381 { 7382 unsigned HOST_WIDE_INT idx; 7383 tree field, value; 7384 7385 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), 7386 idx, field, value) 7387 if (field == TREE_OPERAND (exp, 1) 7388 /* We can normally use the value of the field in the 7389 CONSTRUCTOR. However, if this is a bitfield in 7390 an integral mode that we can fit in a HOST_WIDE_INT, 7391 we must mask only the number of bits in the bitfield, 7392 since this is done implicitly by the constructor. If 7393 the bitfield does not meet either of those conditions, 7394 we can't do this optimization. */ 7395 && (! DECL_BIT_FIELD (field) 7396 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT) 7397 && (GET_MODE_BITSIZE (DECL_MODE (field)) 7398 <= HOST_BITS_PER_WIDE_INT)))) 7399 { 7400 if (DECL_BIT_FIELD (field) 7401 && modifier == EXPAND_STACK_PARM) 7402 target = 0; 7403 op0 = expand_expr (value, target, tmode, modifier); 7404 if (DECL_BIT_FIELD (field)) 7405 { 7406 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)); 7407 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field)); 7408 7409 if (TYPE_UNSIGNED (TREE_TYPE (field))) 7410 { 7411 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1); 7412 op0 = expand_and (imode, op0, op1, target); 7413 } 7414 else 7415 { 7416 tree count 7417 = build_int_cst (NULL_TREE, 7418 GET_MODE_BITSIZE (imode) - bitsize); 7419 7420 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count, 7421 target, 0); 7422 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count, 7423 target, 0); 7424 } 7425 } 7426 7427 return op0; 7428 } 7429 } 7430 goto normal_inner_ref; 7431 7432 case BIT_FIELD_REF: 7433 case ARRAY_RANGE_REF: 7434 normal_inner_ref: 7435 { 7436 enum machine_mode mode1; 7437 HOST_WIDE_INT bitsize, bitpos; 7438 tree offset; 7439 int volatilep = 0; 7440 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, 7441 &mode1, &unsignedp, &volatilep, true); 7442 rtx orig_op0; 7443 7444 /* If we got back the original object, something is wrong. Perhaps 7445 we are evaluating an expression too early. In any event, don't 7446 infinitely recurse. */ 7447 gcc_assert (tem != exp); 7448 7449 /* If TEM's type is a union of variable size, pass TARGET to the inner 7450 computation, since it will need a temporary and TARGET is known 7451 to have to do. This occurs in unchecked conversion in Ada. */ 7452 7453 orig_op0 = op0 7454 = expand_expr (tem, 7455 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE 7456 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) 7457 != INTEGER_CST) 7458 && modifier != EXPAND_STACK_PARM 7459 ? target : NULL_RTX), 7460 VOIDmode, 7461 (modifier == EXPAND_INITIALIZER 7462 || modifier == EXPAND_CONST_ADDRESS 7463 || modifier == EXPAND_STACK_PARM) 7464 ? modifier : EXPAND_NORMAL); 7465 7466 /* If this is a constant, put it into a register if it is a legitimate 7467 constant, OFFSET is 0, and we won't try to extract outside the 7468 register (in case we were passed a partially uninitialized object 7469 or a view_conversion to a larger size). Force the constant to 7470 memory otherwise. */ 7471 if (CONSTANT_P (op0)) 7472 { 7473 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem)); 7474 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) 7475 && offset == 0 7476 && bitpos + bitsize <= GET_MODE_BITSIZE (mode)) 7477 op0 = force_reg (mode, op0); 7478 else 7479 op0 = validize_mem (force_const_mem (mode, op0)); 7480 } 7481 7482 /* Otherwise, if this object not in memory and we either have an 7483 offset, a BLKmode result, or a reference outside the object, put it 7484 there. Such cases can occur in Ada if we have unchecked conversion 7485 of an expression from a scalar type to an array or record type or 7486 for an ARRAY_RANGE_REF whose type is BLKmode. */ 7487 else if (!MEM_P (op0) 7488 && (offset != 0 7489 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0))) 7490 || (code == ARRAY_RANGE_REF && mode == BLKmode))) 7491 { 7492 tree nt = build_qualified_type (TREE_TYPE (tem), 7493 (TYPE_QUALS (TREE_TYPE (tem)) 7494 | TYPE_QUAL_CONST)); 7495 rtx memloc = assign_temp (nt, 1, 1, 1); 7496 7497 emit_move_insn (memloc, op0); 7498 op0 = memloc; 7499 } 7500 7501 if (offset != 0) 7502 { 7503 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 7504 EXPAND_SUM); 7505 7506 gcc_assert (MEM_P (op0)); 7507 7508#ifdef POINTERS_EXTEND_UNSIGNED 7509 if (GET_MODE (offset_rtx) != Pmode) 7510 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); 7511#else 7512 if (GET_MODE (offset_rtx) != ptr_mode) 7513 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); 7514#endif 7515 7516 if (GET_MODE (op0) == BLKmode 7517 /* A constant address in OP0 can have VOIDmode, we must 7518 not try to call force_reg in that case. */ 7519 && GET_MODE (XEXP (op0, 0)) != VOIDmode 7520 && bitsize != 0 7521 && (bitpos % bitsize) == 0 7522 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 7523 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1)) 7524 { 7525 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); 7526 bitpos = 0; 7527 } 7528 7529 op0 = offset_address (op0, offset_rtx, 7530 highest_pow2_factor (offset)); 7531 } 7532 7533 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT, 7534 record its alignment as BIGGEST_ALIGNMENT. */ 7535 if (MEM_P (op0) && bitpos == 0 && offset != 0 7536 && is_aligning_offset (offset, tem)) 7537 set_mem_align (op0, BIGGEST_ALIGNMENT); 7538 7539 /* Don't forget about volatility even if this is a bitfield. */ 7540 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0)) 7541 { 7542 if (op0 == orig_op0) 7543 op0 = copy_rtx (op0); 7544 7545 MEM_VOLATILE_P (op0) = 1; 7546 } 7547 7548 /* The following code doesn't handle CONCAT. 7549 Assume only bitpos == 0 can be used for CONCAT, due to 7550 one element arrays having the same mode as its element. */ 7551 if (GET_CODE (op0) == CONCAT) 7552 { 7553 gcc_assert (bitpos == 0 7554 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0))); 7555 return op0; 7556 } 7557 7558 /* In cases where an aligned union has an unaligned object 7559 as a field, we might be extracting a BLKmode value from 7560 an integer-mode (e.g., SImode) object. Handle this case 7561 by doing the extract into an object as wide as the field 7562 (which we know to be the width of a basic mode), then 7563 storing into memory, and changing the mode to BLKmode. */ 7564 if (mode1 == VOIDmode 7565 || REG_P (op0) || GET_CODE (op0) == SUBREG 7566 || (mode1 != BLKmode && ! direct_load[(int) mode1] 7567 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT 7568 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT 7569 && modifier != EXPAND_CONST_ADDRESS 7570 && modifier != EXPAND_INITIALIZER) 7571 /* If the field isn't aligned enough to fetch as a memref, 7572 fetch it as a bit field. */ 7573 || (mode1 != BLKmode 7574 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode) 7575 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0) 7576 || (MEM_P (op0) 7577 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1) 7578 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0)))) 7579 && ((modifier == EXPAND_CONST_ADDRESS 7580 || modifier == EXPAND_INITIALIZER) 7581 ? STRICT_ALIGNMENT 7582 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))) 7583 || (bitpos % BITS_PER_UNIT != 0))) 7584 /* If the type and the field are a constant size and the 7585 size of the type isn't the same size as the bitfield, 7586 we must use bitfield operations. */ 7587 || (bitsize >= 0 7588 && TYPE_SIZE (TREE_TYPE (exp)) 7589 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST 7590 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), 7591 bitsize))) 7592 { 7593 enum machine_mode ext_mode = mode; 7594 7595 if (ext_mode == BLKmode 7596 && ! (target != 0 && MEM_P (op0) 7597 && MEM_P (target) 7598 && bitpos % BITS_PER_UNIT == 0)) 7599 ext_mode = mode_for_size (bitsize, MODE_INT, 1); 7600 7601 if (ext_mode == BLKmode) 7602 { 7603 if (target == 0) 7604 target = assign_temp (type, 0, 1, 1); 7605 7606 if (bitsize == 0) 7607 return target; 7608 7609 /* In this case, BITPOS must start at a byte boundary and 7610 TARGET, if specified, must be a MEM. */ 7611 gcc_assert (MEM_P (op0) 7612 && (!target || MEM_P (target)) 7613 && !(bitpos % BITS_PER_UNIT)); 7614 7615 emit_block_move (target, 7616 adjust_address (op0, VOIDmode, 7617 bitpos / BITS_PER_UNIT), 7618 GEN_INT ((bitsize + BITS_PER_UNIT - 1) 7619 / BITS_PER_UNIT), 7620 (modifier == EXPAND_STACK_PARM 7621 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); 7622 7623 return target; 7624 } 7625 7626 op0 = validize_mem (op0); 7627 7628 if (MEM_P (op0) && REG_P (XEXP (op0, 0))) 7629 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); 7630 7631 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, 7632 (modifier == EXPAND_STACK_PARM 7633 ? NULL_RTX : target), 7634 ext_mode, ext_mode); 7635 7636 /* If the result is a record type and BITSIZE is narrower than 7637 the mode of OP0, an integral mode, and this is a big endian 7638 machine, we must put the field into the high-order bits. */ 7639 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN 7640 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT 7641 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0))) 7642 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0, 7643 size_int (GET_MODE_BITSIZE (GET_MODE (op0)) 7644 - bitsize), 7645 op0, 1); 7646 7647 /* If the result type is BLKmode, store the data into a temporary 7648 of the appropriate type, but with the mode corresponding to the 7649 mode for the data we have (op0's mode). It's tempting to make 7650 this a constant type, since we know it's only being stored once, 7651 but that can cause problems if we are taking the address of this 7652 COMPONENT_REF because the MEM of any reference via that address 7653 will have flags corresponding to the type, which will not 7654 necessarily be constant. */ 7655 if (mode == BLKmode) 7656 { 7657 rtx new 7658 = assign_stack_temp_for_type 7659 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type); 7660 7661 emit_move_insn (new, op0); 7662 op0 = copy_rtx (new); 7663 PUT_MODE (op0, BLKmode); 7664 set_mem_attributes (op0, exp, 1); 7665 } 7666 7667 return op0; 7668 } 7669 7670 /* If the result is BLKmode, use that to access the object 7671 now as well. */ 7672 if (mode == BLKmode) 7673 mode1 = BLKmode; 7674 7675 /* Get a reference to just this component. */ 7676 if (modifier == EXPAND_CONST_ADDRESS 7677 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) 7678 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); 7679 else 7680 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); 7681 7682 if (op0 == orig_op0) 7683 op0 = copy_rtx (op0); 7684 7685 set_mem_attributes (op0, exp, 0); 7686 if (REG_P (XEXP (op0, 0))) 7687 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); 7688 7689 MEM_VOLATILE_P (op0) |= volatilep; 7690 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode 7691 || modifier == EXPAND_CONST_ADDRESS 7692 || modifier == EXPAND_INITIALIZER) 7693 return op0; 7694 else if (target == 0) 7695 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); 7696 7697 convert_move (target, op0, unsignedp); 7698 return target; 7699 } 7700 7701 case OBJ_TYPE_REF: 7702 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier); 7703 7704 case CALL_EXPR: 7705 /* Check for a built-in function. */ 7706 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR 7707 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) 7708 == FUNCTION_DECL) 7709 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) 7710 { 7711 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) 7712 == BUILT_IN_FRONTEND) 7713 return lang_hooks.expand_expr (exp, original_target, 7714 tmode, modifier, 7715 alt_rtl); 7716 else 7717 return expand_builtin (exp, target, subtarget, tmode, ignore); 7718 } 7719 7720 return expand_call (exp, target, ignore); 7721 7722 case NON_LVALUE_EXPR: 7723 case NOP_EXPR: 7724 case CONVERT_EXPR: 7725 if (TREE_OPERAND (exp, 0) == error_mark_node) 7726 return const0_rtx; 7727 7728 if (TREE_CODE (type) == UNION_TYPE) 7729 { 7730 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0)); 7731 7732 /* If both input and output are BLKmode, this conversion isn't doing 7733 anything except possibly changing memory attribute. */ 7734 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode) 7735 { 7736 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode, 7737 modifier); 7738 7739 result = copy_rtx (result); 7740 set_mem_attributes (result, exp, 0); 7741 return result; 7742 } 7743 7744 if (target == 0) 7745 { 7746 if (TYPE_MODE (type) != BLKmode) 7747 target = gen_reg_rtx (TYPE_MODE (type)); 7748 else 7749 target = assign_temp (type, 0, 1, 1); 7750 } 7751 7752 if (MEM_P (target)) 7753 /* Store data into beginning of memory target. */ 7754 store_expr (TREE_OPERAND (exp, 0), 7755 adjust_address (target, TYPE_MODE (valtype), 0), 7756 modifier == EXPAND_STACK_PARM); 7757 7758 else 7759 { 7760 gcc_assert (REG_P (target)); 7761 7762 /* Store this field into a union of the proper type. */ 7763 store_field (target, 7764 MIN ((int_size_in_bytes (TREE_TYPE 7765 (TREE_OPERAND (exp, 0))) 7766 * BITS_PER_UNIT), 7767 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), 7768 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), 7769 type, 0); 7770 } 7771 7772 /* Return the entire union. */ 7773 return target; 7774 } 7775 7776 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) 7777 { 7778 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 7779 modifier); 7780 7781 /* If the signedness of the conversion differs and OP0 is 7782 a promoted SUBREG, clear that indication since we now 7783 have to do the proper extension. */ 7784 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp 7785 && GET_CODE (op0) == SUBREG) 7786 SUBREG_PROMOTED_VAR_P (op0) = 0; 7787 7788 return REDUCE_BIT_FIELD (op0); 7789 } 7790 7791 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 7792 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier); 7793 if (GET_MODE (op0) == mode) 7794 ; 7795 7796 /* If OP0 is a constant, just convert it into the proper mode. */ 7797 else if (CONSTANT_P (op0)) 7798 { 7799 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 7800 enum machine_mode inner_mode = TYPE_MODE (inner_type); 7801 7802 if (modifier == EXPAND_INITIALIZER) 7803 op0 = simplify_gen_subreg (mode, op0, inner_mode, 7804 subreg_lowpart_offset (mode, 7805 inner_mode)); 7806 else 7807 op0= convert_modes (mode, inner_mode, op0, 7808 TYPE_UNSIGNED (inner_type)); 7809 } 7810 7811 else if (modifier == EXPAND_INITIALIZER) 7812 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0); 7813 7814 else if (target == 0) 7815 op0 = convert_to_mode (mode, op0, 7816 TYPE_UNSIGNED (TREE_TYPE 7817 (TREE_OPERAND (exp, 0)))); 7818 else 7819 { 7820 convert_move (target, op0, 7821 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); 7822 op0 = target; 7823 } 7824 7825 return REDUCE_BIT_FIELD (op0); 7826 7827 case VIEW_CONVERT_EXPR: 7828 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); 7829 7830 /* If the input and output modes are both the same, we are done. */ 7831 if (TYPE_MODE (type) == GET_MODE (op0)) 7832 ; 7833 /* If neither mode is BLKmode, and both modes are the same size 7834 then we can use gen_lowpart. */ 7835 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode 7836 && GET_MODE_SIZE (TYPE_MODE (type)) 7837 == GET_MODE_SIZE (GET_MODE (op0))) 7838 { 7839 if (GET_CODE (op0) == SUBREG) 7840 op0 = force_reg (GET_MODE (op0), op0); 7841 op0 = gen_lowpart (TYPE_MODE (type), op0); 7842 } 7843 /* If both modes are integral, then we can convert from one to the 7844 other. */ 7845 else if (SCALAR_INT_MODE_P (GET_MODE (op0)) 7846 && SCALAR_INT_MODE_P (TYPE_MODE (type))) 7847 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0, 7848 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); 7849 /* As a last resort, spill op0 to memory, and reload it in a 7850 different mode. */ 7851 else if (!MEM_P (op0)) 7852 { 7853 /* If the operand is not a MEM, force it into memory. Since we 7854 are going to be changing the mode of the MEM, don't call 7855 force_const_mem for constants because we don't allow pool 7856 constants to change mode. */ 7857 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 7858 7859 gcc_assert (!TREE_ADDRESSABLE (exp)); 7860 7861 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type)) 7862 target 7863 = assign_stack_temp_for_type 7864 (TYPE_MODE (inner_type), 7865 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type); 7866 7867 emit_move_insn (target, op0); 7868 op0 = target; 7869 } 7870 7871 /* At this point, OP0 is in the correct mode. If the output type is such 7872 that the operand is known to be aligned, indicate that it is. 7873 Otherwise, we need only be concerned about alignment for non-BLKmode 7874 results. */ 7875 if (MEM_P (op0)) 7876 { 7877 op0 = copy_rtx (op0); 7878 7879 if (TYPE_ALIGN_OK (type)) 7880 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type))); 7881 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT 7882 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) 7883 { 7884 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 7885 HOST_WIDE_INT temp_size 7886 = MAX (int_size_in_bytes (inner_type), 7887 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type))); 7888 rtx new = assign_stack_temp_for_type (TYPE_MODE (type), 7889 temp_size, 0, type); 7890 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0); 7891 7892 gcc_assert (!TREE_ADDRESSABLE (exp)); 7893 7894 if (GET_MODE (op0) == BLKmode) 7895 emit_block_move (new_with_op0_mode, op0, 7896 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))), 7897 (modifier == EXPAND_STACK_PARM 7898 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); 7899 else 7900 emit_move_insn (new_with_op0_mode, op0); 7901 7902 op0 = new; 7903 } 7904 7905 op0 = adjust_address (op0, TYPE_MODE (type), 0); 7906 } 7907 7908 return op0; 7909 7910 case PLUS_EXPR: 7911 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and 7912 something else, make sure we add the register to the constant and 7913 then to the other thing. This case can occur during strength 7914 reduction and doing it this way will produce better code if the 7915 frame pointer or argument pointer is eliminated. 7916 7917 fold-const.c will ensure that the constant is always in the inner 7918 PLUS_EXPR, so the only case we need to do anything about is if 7919 sp, ap, or fp is our second argument, in which case we must swap 7920 the innermost first argument and our second argument. */ 7921 7922 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR 7923 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST 7924 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL 7925 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx 7926 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx 7927 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx)) 7928 { 7929 tree t = TREE_OPERAND (exp, 1); 7930 7931 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); 7932 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t; 7933 } 7934 7935 /* If the result is to be ptr_mode and we are adding an integer to 7936 something, we might be forming a constant. So try to use 7937 plus_constant. If it produces a sum and we can't accept it, 7938 use force_operand. This allows P = &ARR[const] to generate 7939 efficient code on machines where a SYMBOL_REF is not a valid 7940 address. 7941 7942 If this is an EXPAND_SUM call, always return the sum. */ 7943 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER 7944 || (mode == ptr_mode && (unsignedp || ! flag_trapv))) 7945 { 7946 if (modifier == EXPAND_STACK_PARM) 7947 target = 0; 7948 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST 7949 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT 7950 && TREE_CONSTANT (TREE_OPERAND (exp, 1))) 7951 { 7952 rtx constant_part; 7953 7954 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, 7955 EXPAND_SUM); 7956 /* Use immed_double_const to ensure that the constant is 7957 truncated according to the mode of OP1, then sign extended 7958 to a HOST_WIDE_INT. Using the constant directly can result 7959 in non-canonical RTL in a 64x32 cross compile. */ 7960 constant_part 7961 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)), 7962 (HOST_WIDE_INT) 0, 7963 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))); 7964 op1 = plus_constant (op1, INTVAL (constant_part)); 7965 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) 7966 op1 = force_operand (op1, target); 7967 return REDUCE_BIT_FIELD (op1); 7968 } 7969 7970 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST 7971 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT 7972 && TREE_CONSTANT (TREE_OPERAND (exp, 0))) 7973 { 7974 rtx constant_part; 7975 7976 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 7977 (modifier == EXPAND_INITIALIZER 7978 ? EXPAND_INITIALIZER : EXPAND_SUM)); 7979 if (! CONSTANT_P (op0)) 7980 { 7981 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, 7982 VOIDmode, modifier); 7983 /* Return a PLUS if modifier says it's OK. */ 7984 if (modifier == EXPAND_SUM 7985 || modifier == EXPAND_INITIALIZER) 7986 return simplify_gen_binary (PLUS, mode, op0, op1); 7987 goto binop2; 7988 } 7989 /* Use immed_double_const to ensure that the constant is 7990 truncated according to the mode of OP1, then sign extended 7991 to a HOST_WIDE_INT. Using the constant directly can result 7992 in non-canonical RTL in a 64x32 cross compile. */ 7993 constant_part 7994 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)), 7995 (HOST_WIDE_INT) 0, 7996 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))); 7997 op0 = plus_constant (op0, INTVAL (constant_part)); 7998 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) 7999 op0 = force_operand (op0, target); 8000 return REDUCE_BIT_FIELD (op0); 8001 } 8002 } 8003 8004 /* No sense saving up arithmetic to be done 8005 if it's all in the wrong mode to form part of an address. 8006 And force_operand won't know whether to sign-extend or 8007 zero-extend. */ 8008 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) 8009 || mode != ptr_mode) 8010 { 8011 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8012 subtarget, &op0, &op1, 0); 8013 if (op0 == const0_rtx) 8014 return op1; 8015 if (op1 == const0_rtx) 8016 return op0; 8017 goto binop2; 8018 } 8019 8020 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8021 subtarget, &op0, &op1, modifier); 8022 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); 8023 8024 case MINUS_EXPR: 8025 /* For initializers, we are allowed to return a MINUS of two 8026 symbolic constants. Here we handle all cases when both operands 8027 are constant. */ 8028 /* Handle difference of two symbolic constants, 8029 for the sake of an initializer. */ 8030 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) 8031 && really_constant_p (TREE_OPERAND (exp, 0)) 8032 && really_constant_p (TREE_OPERAND (exp, 1))) 8033 { 8034 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8035 NULL_RTX, &op0, &op1, modifier); 8036 8037 /* If the last operand is a CONST_INT, use plus_constant of 8038 the negated constant. Else make the MINUS. */ 8039 if (GET_CODE (op1) == CONST_INT) 8040 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1))); 8041 else 8042 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1)); 8043 } 8044 8045 /* No sense saving up arithmetic to be done 8046 if it's all in the wrong mode to form part of an address. 8047 And force_operand won't know whether to sign-extend or 8048 zero-extend. */ 8049 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) 8050 || mode != ptr_mode) 8051 goto binop; 8052 8053 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8054 subtarget, &op0, &op1, modifier); 8055 8056 /* Convert A - const to A + (-const). */ 8057 if (GET_CODE (op1) == CONST_INT) 8058 { 8059 op1 = negate_rtx (mode, op1); 8060 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); 8061 } 8062 8063 goto binop2; 8064 8065 case MULT_EXPR: 8066 /* If first operand is constant, swap them. 8067 Thus the following special case checks need only 8068 check the second operand. */ 8069 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) 8070 { 8071 tree t1 = TREE_OPERAND (exp, 0); 8072 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1); 8073 TREE_OPERAND (exp, 1) = t1; 8074 } 8075 8076 /* Attempt to return something suitable for generating an 8077 indexed address, for machines that support that. */ 8078 8079 if (modifier == EXPAND_SUM && mode == ptr_mode 8080 && host_integerp (TREE_OPERAND (exp, 1), 0)) 8081 { 8082 tree exp1 = TREE_OPERAND (exp, 1); 8083 8084 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 8085 EXPAND_SUM); 8086 8087 if (!REG_P (op0)) 8088 op0 = force_operand (op0, NULL_RTX); 8089 if (!REG_P (op0)) 8090 op0 = copy_to_mode_reg (mode, op0); 8091 8092 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0, 8093 gen_int_mode (tree_low_cst (exp1, 0), 8094 TYPE_MODE (TREE_TYPE (exp1))))); 8095 } 8096 8097 if (modifier == EXPAND_STACK_PARM) 8098 target = 0; 8099 8100 /* Check for multiplying things that have been extended 8101 from a narrower type. If this machine supports multiplying 8102 in that narrower type with a result in the desired type, 8103 do it that way, and avoid the explicit type-conversion. */ 8104 8105 subexp0 = TREE_OPERAND (exp, 0); 8106 subexp1 = TREE_OPERAND (exp, 1); 8107 /* First, check if we have a multiplication of one signed and one 8108 unsigned operand. */ 8109 if (TREE_CODE (subexp0) == NOP_EXPR 8110 && TREE_CODE (subexp1) == NOP_EXPR 8111 && TREE_CODE (type) == INTEGER_TYPE 8112 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0))) 8113 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) 8114 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0))) 8115 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0)))) 8116 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))) 8117 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0))))) 8118 { 8119 enum machine_mode innermode 8120 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0))); 8121 this_optab = usmul_widen_optab; 8122 if (mode == GET_MODE_WIDER_MODE (innermode)) 8123 { 8124 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) 8125 { 8126 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))) 8127 expand_operands (TREE_OPERAND (subexp0, 0), 8128 TREE_OPERAND (subexp1, 0), 8129 NULL_RTX, &op0, &op1, 0); 8130 else 8131 expand_operands (TREE_OPERAND (subexp0, 0), 8132 TREE_OPERAND (subexp1, 0), 8133 NULL_RTX, &op1, &op0, 0); 8134 8135 goto binop3; 8136 } 8137 } 8138 } 8139 /* Check for a multiplication with matching signedness. */ 8140 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR 8141 && TREE_CODE (type) == INTEGER_TYPE 8142 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) 8143 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) 8144 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST 8145 && int_fits_type_p (TREE_OPERAND (exp, 1), 8146 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) 8147 /* Don't use a widening multiply if a shift will do. */ 8148 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))) 8149 > HOST_BITS_PER_WIDE_INT) 8150 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0)) 8151 || 8152 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR 8153 && (TYPE_PRECISION (TREE_TYPE 8154 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) 8155 == TYPE_PRECISION (TREE_TYPE 8156 (TREE_OPERAND 8157 (TREE_OPERAND (exp, 0), 0)))) 8158 /* If both operands are extended, they must either both 8159 be zero-extended or both be sign-extended. */ 8160 && (TYPE_UNSIGNED (TREE_TYPE 8161 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) 8162 == TYPE_UNSIGNED (TREE_TYPE 8163 (TREE_OPERAND 8164 (TREE_OPERAND (exp, 0), 0))))))) 8165 { 8166 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)); 8167 enum machine_mode innermode = TYPE_MODE (op0type); 8168 bool zextend_p = TYPE_UNSIGNED (op0type); 8169 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; 8170 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab; 8171 8172 if (mode == GET_MODE_2XWIDER_MODE (innermode)) 8173 { 8174 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) 8175 { 8176 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) 8177 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), 8178 TREE_OPERAND (exp, 1), 8179 NULL_RTX, &op0, &op1, EXPAND_NORMAL); 8180 else 8181 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), 8182 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 8183 NULL_RTX, &op0, &op1, EXPAND_NORMAL); 8184 goto binop3; 8185 } 8186 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing 8187 && innermode == word_mode) 8188 { 8189 rtx htem, hipart; 8190 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)); 8191 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) 8192 op1 = convert_modes (innermode, mode, 8193 expand_normal (TREE_OPERAND (exp, 1)), 8194 unsignedp); 8195 else 8196 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)); 8197 temp = expand_binop (mode, other_optab, op0, op1, target, 8198 unsignedp, OPTAB_LIB_WIDEN); 8199 hipart = gen_highpart (innermode, temp); 8200 htem = expand_mult_highpart_adjust (innermode, hipart, 8201 op0, op1, hipart, 8202 zextend_p); 8203 if (htem != hipart) 8204 emit_move_insn (hipart, htem); 8205 return REDUCE_BIT_FIELD (temp); 8206 } 8207 } 8208 } 8209 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8210 subtarget, &op0, &op1, 0); 8211 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp)); 8212 8213 case TRUNC_DIV_EXPR: 8214 case FLOOR_DIV_EXPR: 8215 case CEIL_DIV_EXPR: 8216 case ROUND_DIV_EXPR: 8217 case EXACT_DIV_EXPR: 8218 if (modifier == EXPAND_STACK_PARM) 8219 target = 0; 8220 /* Possible optimization: compute the dividend with EXPAND_SUM 8221 then if the divisor is constant can optimize the case 8222 where some terms of the dividend have coeffs divisible by it. */ 8223 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8224 subtarget, &op0, &op1, 0); 8225 return expand_divmod (0, code, mode, op0, op1, target, unsignedp); 8226 8227 case RDIV_EXPR: 8228 goto binop; 8229 8230 case TRUNC_MOD_EXPR: 8231 case FLOOR_MOD_EXPR: 8232 case CEIL_MOD_EXPR: 8233 case ROUND_MOD_EXPR: 8234 if (modifier == EXPAND_STACK_PARM) 8235 target = 0; 8236 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8237 subtarget, &op0, &op1, 0); 8238 return expand_divmod (1, code, mode, op0, op1, target, unsignedp); 8239 8240 case FIX_ROUND_EXPR: 8241 case FIX_FLOOR_EXPR: 8242 case FIX_CEIL_EXPR: 8243 gcc_unreachable (); /* Not used for C. */ 8244 8245 case FIX_TRUNC_EXPR: 8246 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8247 if (target == 0 || modifier == EXPAND_STACK_PARM) 8248 target = gen_reg_rtx (mode); 8249 expand_fix (target, op0, unsignedp); 8250 return target; 8251 8252 case FLOAT_EXPR: 8253 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8254 if (target == 0 || modifier == EXPAND_STACK_PARM) 8255 target = gen_reg_rtx (mode); 8256 /* expand_float can't figure out what to do if FROM has VOIDmode. 8257 So give it the correct mode. With -O, cse will optimize this. */ 8258 if (GET_MODE (op0) == VOIDmode) 8259 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), 8260 op0); 8261 expand_float (target, op0, 8262 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); 8263 return target; 8264 8265 case NEGATE_EXPR: 8266 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); 8267 if (modifier == EXPAND_STACK_PARM) 8268 target = 0; 8269 temp = expand_unop (mode, 8270 optab_for_tree_code (NEGATE_EXPR, type), 8271 op0, target, 0); 8272 gcc_assert (temp); 8273 return REDUCE_BIT_FIELD (temp); 8274 8275 case ABS_EXPR: 8276 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); 8277 if (modifier == EXPAND_STACK_PARM) 8278 target = 0; 8279 8280 /* ABS_EXPR is not valid for complex arguments. */ 8281 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT 8282 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT); 8283 8284 /* Unsigned abs is simply the operand. Testing here means we don't 8285 risk generating incorrect code below. */ 8286 if (TYPE_UNSIGNED (type)) 8287 return op0; 8288 8289 return expand_abs (mode, op0, target, unsignedp, 8290 safe_from_p (target, TREE_OPERAND (exp, 0), 1)); 8291 8292 case MAX_EXPR: 8293 case MIN_EXPR: 8294 target = original_target; 8295 if (target == 0 8296 || modifier == EXPAND_STACK_PARM 8297 || (MEM_P (target) && MEM_VOLATILE_P (target)) 8298 || GET_MODE (target) != mode 8299 || (REG_P (target) 8300 && REGNO (target) < FIRST_PSEUDO_REGISTER)) 8301 target = gen_reg_rtx (mode); 8302 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8303 target, &op0, &op1, 0); 8304 8305 /* First try to do it with a special MIN or MAX instruction. 8306 If that does not win, use a conditional jump to select the proper 8307 value. */ 8308 this_optab = optab_for_tree_code (code, type); 8309 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, 8310 OPTAB_WIDEN); 8311 if (temp != 0) 8312 return temp; 8313 8314 /* At this point, a MEM target is no longer useful; we will get better 8315 code without it. */ 8316 8317 if (! REG_P (target)) 8318 target = gen_reg_rtx (mode); 8319 8320 /* If op1 was placed in target, swap op0 and op1. */ 8321 if (target != op0 && target == op1) 8322 { 8323 temp = op0; 8324 op0 = op1; 8325 op1 = temp; 8326 } 8327 8328 /* We generate better code and avoid problems with op1 mentioning 8329 target by forcing op1 into a pseudo if it isn't a constant. */ 8330 if (! CONSTANT_P (op1)) 8331 op1 = force_reg (mode, op1); 8332 8333 { 8334 enum rtx_code comparison_code; 8335 rtx cmpop1 = op1; 8336 8337 if (code == MAX_EXPR) 8338 comparison_code = unsignedp ? GEU : GE; 8339 else 8340 comparison_code = unsignedp ? LEU : LE; 8341 8342 /* Canonicalize to comparisons against 0. */ 8343 if (op1 == const1_rtx) 8344 { 8345 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1) 8346 or (a != 0 ? a : 1) for unsigned. 8347 For MIN we are safe converting (a <= 1 ? a : 1) 8348 into (a <= 0 ? a : 1) */ 8349 cmpop1 = const0_rtx; 8350 if (code == MAX_EXPR) 8351 comparison_code = unsignedp ? NE : GT; 8352 } 8353 if (op1 == constm1_rtx && !unsignedp) 8354 { 8355 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1) 8356 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */ 8357 cmpop1 = const0_rtx; 8358 if (code == MIN_EXPR) 8359 comparison_code = LT; 8360 } 8361#ifdef HAVE_conditional_move 8362 /* Use a conditional move if possible. */ 8363 if (can_conditionally_move_p (mode)) 8364 { 8365 rtx insn; 8366 8367 /* ??? Same problem as in expmed.c: emit_conditional_move 8368 forces a stack adjustment via compare_from_rtx, and we 8369 lose the stack adjustment if the sequence we are about 8370 to create is discarded. */ 8371 do_pending_stack_adjust (); 8372 8373 start_sequence (); 8374 8375 /* Try to emit the conditional move. */ 8376 insn = emit_conditional_move (target, comparison_code, 8377 op0, cmpop1, mode, 8378 op0, op1, mode, 8379 unsignedp); 8380 8381 /* If we could do the conditional move, emit the sequence, 8382 and return. */ 8383 if (insn) 8384 { 8385 rtx seq = get_insns (); 8386 end_sequence (); 8387 emit_insn (seq); 8388 return target; 8389 } 8390 8391 /* Otherwise discard the sequence and fall back to code with 8392 branches. */ 8393 end_sequence (); 8394 } 8395#endif 8396 if (target != op0) 8397 emit_move_insn (target, op0); 8398 8399 temp = gen_label_rtx (); 8400 do_compare_rtx_and_jump (target, cmpop1, comparison_code, 8401 unsignedp, mode, NULL_RTX, NULL_RTX, temp); 8402 } 8403 emit_move_insn (target, op1); 8404 emit_label (temp); 8405 return target; 8406 8407 case BIT_NOT_EXPR: 8408 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); 8409 if (modifier == EXPAND_STACK_PARM) 8410 target = 0; 8411 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); 8412 gcc_assert (temp); 8413 return temp; 8414 8415 /* ??? Can optimize bitwise operations with one arg constant. 8416 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b) 8417 and (a bitwise1 b) bitwise2 b (etc) 8418 but that is probably not worth while. */ 8419 8420 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two 8421 boolean values when we want in all cases to compute both of them. In 8422 general it is fastest to do TRUTH_AND_EXPR by computing both operands 8423 as actual zero-or-1 values and then bitwise anding. In cases where 8424 there cannot be any side effects, better code would be made by 8425 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is 8426 how to recognize those cases. */ 8427 8428 case TRUTH_AND_EXPR: 8429 code = BIT_AND_EXPR; 8430 case BIT_AND_EXPR: 8431 goto binop; 8432 8433 case TRUTH_OR_EXPR: 8434 code = BIT_IOR_EXPR; 8435 case BIT_IOR_EXPR: 8436 goto binop; 8437 8438 case TRUTH_XOR_EXPR: 8439 code = BIT_XOR_EXPR; 8440 case BIT_XOR_EXPR: 8441 goto binop; 8442 8443 case LSHIFT_EXPR: 8444 case RSHIFT_EXPR: 8445 case LROTATE_EXPR: 8446 case RROTATE_EXPR: 8447 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) 8448 subtarget = 0; 8449 if (modifier == EXPAND_STACK_PARM) 8450 target = 0; 8451 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); 8452 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target, 8453 unsignedp); 8454 8455 /* Could determine the answer when only additive constants differ. Also, 8456 the addition of one can be handled by changing the condition. */ 8457 case LT_EXPR: 8458 case LE_EXPR: 8459 case GT_EXPR: 8460 case GE_EXPR: 8461 case EQ_EXPR: 8462 case NE_EXPR: 8463 case UNORDERED_EXPR: 8464 case ORDERED_EXPR: 8465 case UNLT_EXPR: 8466 case UNLE_EXPR: 8467 case UNGT_EXPR: 8468 case UNGE_EXPR: 8469 case UNEQ_EXPR: 8470 case LTGT_EXPR: 8471 temp = do_store_flag (exp, 8472 modifier != EXPAND_STACK_PARM ? target : NULL_RTX, 8473 tmode != VOIDmode ? tmode : mode, 0); 8474 if (temp != 0) 8475 return temp; 8476 8477 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */ 8478 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1)) 8479 && original_target 8480 && REG_P (original_target) 8481 && (GET_MODE (original_target) 8482 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) 8483 { 8484 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, 8485 VOIDmode, 0); 8486 8487 /* If temp is constant, we can just compute the result. */ 8488 if (GET_CODE (temp) == CONST_INT) 8489 { 8490 if (INTVAL (temp) != 0) 8491 emit_move_insn (target, const1_rtx); 8492 else 8493 emit_move_insn (target, const0_rtx); 8494 8495 return target; 8496 } 8497 8498 if (temp != original_target) 8499 { 8500 enum machine_mode mode1 = GET_MODE (temp); 8501 if (mode1 == VOIDmode) 8502 mode1 = tmode != VOIDmode ? tmode : mode; 8503 8504 temp = copy_to_mode_reg (mode1, temp); 8505 } 8506 8507 op1 = gen_label_rtx (); 8508 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX, 8509 GET_MODE (temp), unsignedp, op1); 8510 emit_move_insn (temp, const1_rtx); 8511 emit_label (op1); 8512 return temp; 8513 } 8514 8515 /* If no set-flag instruction, must generate a conditional store 8516 into a temporary variable. Drop through and handle this 8517 like && and ||. */ 8518 8519 if (! ignore 8520 && (target == 0 8521 || modifier == EXPAND_STACK_PARM 8522 || ! safe_from_p (target, exp, 1) 8523 /* Make sure we don't have a hard reg (such as function's return 8524 value) live across basic blocks, if not optimizing. */ 8525 || (!optimize && REG_P (target) 8526 && REGNO (target) < FIRST_PSEUDO_REGISTER))) 8527 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); 8528 8529 if (target) 8530 emit_move_insn (target, const0_rtx); 8531 8532 op1 = gen_label_rtx (); 8533 jumpifnot (exp, op1); 8534 8535 if (target) 8536 emit_move_insn (target, const1_rtx); 8537 8538 emit_label (op1); 8539 return ignore ? const0_rtx : target; 8540 8541 case TRUTH_NOT_EXPR: 8542 if (modifier == EXPAND_STACK_PARM) 8543 target = 0; 8544 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0); 8545 /* The parser is careful to generate TRUTH_NOT_EXPR 8546 only with operands that are always zero or one. */ 8547 temp = expand_binop (mode, xor_optab, op0, const1_rtx, 8548 target, 1, OPTAB_LIB_WIDEN); 8549 gcc_assert (temp); 8550 return temp; 8551 8552 case STATEMENT_LIST: 8553 { 8554 tree_stmt_iterator iter; 8555 8556 gcc_assert (ignore); 8557 8558 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter)) 8559 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier); 8560 } 8561 return const0_rtx; 8562 8563 case COND_EXPR: 8564 /* A COND_EXPR with its type being VOID_TYPE represents a 8565 conditional jump and is handled in 8566 expand_gimple_cond_expr. */ 8567 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp))); 8568 8569 /* Note that COND_EXPRs whose type is a structure or union 8570 are required to be constructed to contain assignments of 8571 a temporary variable, so that we can evaluate them here 8572 for side effect only. If type is void, we must do likewise. */ 8573 8574 gcc_assert (!TREE_ADDRESSABLE (type) 8575 && !ignore 8576 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node 8577 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node); 8578 8579 /* If we are not to produce a result, we have no target. Otherwise, 8580 if a target was specified use it; it will not be used as an 8581 intermediate target unless it is safe. If no target, use a 8582 temporary. */ 8583 8584 if (modifier != EXPAND_STACK_PARM 8585 && original_target 8586 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1) 8587 && GET_MODE (original_target) == mode 8588#ifdef HAVE_conditional_move 8589 && (! can_conditionally_move_p (mode) 8590 || REG_P (original_target)) 8591#endif 8592 && !MEM_P (original_target)) 8593 temp = original_target; 8594 else 8595 temp = assign_temp (type, 0, 0, 1); 8596 8597 do_pending_stack_adjust (); 8598 NO_DEFER_POP; 8599 op0 = gen_label_rtx (); 8600 op1 = gen_label_rtx (); 8601 jumpifnot (TREE_OPERAND (exp, 0), op0); 8602 store_expr (TREE_OPERAND (exp, 1), temp, 8603 modifier == EXPAND_STACK_PARM); 8604 8605 emit_jump_insn (gen_jump (op1)); 8606 emit_barrier (); 8607 emit_label (op0); 8608 store_expr (TREE_OPERAND (exp, 2), temp, 8609 modifier == EXPAND_STACK_PARM); 8610 8611 emit_label (op1); 8612 OK_DEFER_POP; 8613 return temp; 8614 8615 case VEC_COND_EXPR: 8616 target = expand_vec_cond_expr (exp, target); 8617 return target; 8618 8619 case MODIFY_EXPR: 8620 { 8621 tree lhs = TREE_OPERAND (exp, 0); 8622 tree rhs = TREE_OPERAND (exp, 1); 8623 8624 gcc_assert (ignore); 8625 8626 /* Check for |= or &= of a bitfield of size one into another bitfield 8627 of size 1. In this case, (unless we need the result of the 8628 assignment) we can do this more efficiently with a 8629 test followed by an assignment, if necessary. 8630 8631 ??? At this point, we can't get a BIT_FIELD_REF here. But if 8632 things change so we do, this code should be enhanced to 8633 support it. */ 8634 if (TREE_CODE (lhs) == COMPONENT_REF 8635 && (TREE_CODE (rhs) == BIT_IOR_EXPR 8636 || TREE_CODE (rhs) == BIT_AND_EXPR) 8637 && TREE_OPERAND (rhs, 0) == lhs 8638 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF 8639 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) 8640 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) 8641 { 8642 rtx label = gen_label_rtx (); 8643 int value = TREE_CODE (rhs) == BIT_IOR_EXPR; 8644 do_jump (TREE_OPERAND (rhs, 1), 8645 value ? label : 0, 8646 value ? 0 : label); 8647 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value)); 8648 do_pending_stack_adjust (); 8649 emit_label (label); 8650 return const0_rtx; 8651 } 8652 8653 expand_assignment (lhs, rhs); 8654 8655 return const0_rtx; 8656 } 8657 8658 case RETURN_EXPR: 8659 if (!TREE_OPERAND (exp, 0)) 8660 expand_null_return (); 8661 else 8662 expand_return (TREE_OPERAND (exp, 0)); 8663 return const0_rtx; 8664 8665 case ADDR_EXPR: 8666 return expand_expr_addr_expr (exp, target, tmode, modifier); 8667 8668 case COMPLEX_EXPR: 8669 /* Get the rtx code of the operands. */ 8670 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8671 op1 = expand_normal (TREE_OPERAND (exp, 1)); 8672 8673 if (!target) 8674 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); 8675 8676 /* Move the real (op0) and imaginary (op1) parts to their location. */ 8677 write_complex_part (target, op0, false); 8678 write_complex_part (target, op1, true); 8679 8680 return target; 8681 8682 case REALPART_EXPR: 8683 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8684 return read_complex_part (op0, false); 8685 8686 case IMAGPART_EXPR: 8687 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8688 return read_complex_part (op0, true); 8689 8690 case RESX_EXPR: 8691 expand_resx_expr (exp); 8692 return const0_rtx; 8693 8694 case TRY_CATCH_EXPR: 8695 case CATCH_EXPR: 8696 case EH_FILTER_EXPR: 8697 case TRY_FINALLY_EXPR: 8698 /* Lowered by tree-eh.c. */ 8699 gcc_unreachable (); 8700 8701 case WITH_CLEANUP_EXPR: 8702 case CLEANUP_POINT_EXPR: 8703 case TARGET_EXPR: 8704 case CASE_LABEL_EXPR: 8705 case VA_ARG_EXPR: 8706 case BIND_EXPR: 8707 case INIT_EXPR: 8708 case CONJ_EXPR: 8709 case COMPOUND_EXPR: 8710 case PREINCREMENT_EXPR: 8711 case PREDECREMENT_EXPR: 8712 case POSTINCREMENT_EXPR: 8713 case POSTDECREMENT_EXPR: 8714 case LOOP_EXPR: 8715 case EXIT_EXPR: 8716 case TRUTH_ANDIF_EXPR: 8717 case TRUTH_ORIF_EXPR: 8718 /* Lowered by gimplify.c. */ 8719 gcc_unreachable (); 8720 8721 case EXC_PTR_EXPR: 8722 return get_exception_pointer (cfun); 8723 8724 case FILTER_EXPR: 8725 return get_exception_filter (cfun); 8726 8727 case FDESC_EXPR: 8728 /* Function descriptors are not valid except for as 8729 initialization constants, and should not be expanded. */ 8730 gcc_unreachable (); 8731 8732 case SWITCH_EXPR: 8733 expand_case (exp); 8734 return const0_rtx; 8735 8736 case LABEL_EXPR: 8737 expand_label (TREE_OPERAND (exp, 0)); 8738 return const0_rtx; 8739 8740 case ASM_EXPR: 8741 expand_asm_expr (exp); 8742 return const0_rtx; 8743 8744 case WITH_SIZE_EXPR: 8745 /* WITH_SIZE_EXPR expands to its first argument. The caller should 8746 have pulled out the size to use in whatever context it needed. */ 8747 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode, 8748 modifier, alt_rtl); 8749 8750 case REALIGN_LOAD_EXPR: 8751 { 8752 tree oprnd0 = TREE_OPERAND (exp, 0); 8753 tree oprnd1 = TREE_OPERAND (exp, 1); 8754 tree oprnd2 = TREE_OPERAND (exp, 2); 8755 rtx op2; 8756 8757 this_optab = optab_for_tree_code (code, type); 8758 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); 8759 op2 = expand_normal (oprnd2); 8760 temp = expand_ternary_op (mode, this_optab, op0, op1, op2, 8761 target, unsignedp); 8762 gcc_assert (temp); 8763 return temp; 8764 } 8765 8766 case DOT_PROD_EXPR: 8767 { 8768 tree oprnd0 = TREE_OPERAND (exp, 0); 8769 tree oprnd1 = TREE_OPERAND (exp, 1); 8770 tree oprnd2 = TREE_OPERAND (exp, 2); 8771 rtx op2; 8772 8773 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); 8774 op2 = expand_normal (oprnd2); 8775 target = expand_widen_pattern_expr (exp, op0, op1, op2, 8776 target, unsignedp); 8777 return target; 8778 } 8779 8780 case WIDEN_SUM_EXPR: 8781 { 8782 tree oprnd0 = TREE_OPERAND (exp, 0); 8783 tree oprnd1 = TREE_OPERAND (exp, 1); 8784 8785 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0); 8786 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1, 8787 target, unsignedp); 8788 return target; 8789 } 8790 8791 case REDUC_MAX_EXPR: 8792 case REDUC_MIN_EXPR: 8793 case REDUC_PLUS_EXPR: 8794 { 8795 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8796 this_optab = optab_for_tree_code (code, type); 8797 temp = expand_unop (mode, this_optab, op0, target, unsignedp); 8798 gcc_assert (temp); 8799 return temp; 8800 } 8801 8802 case VEC_LSHIFT_EXPR: 8803 case VEC_RSHIFT_EXPR: 8804 { 8805 target = expand_vec_shift_expr (exp, target); 8806 return target; 8807 } 8808 8809 default: 8810 return lang_hooks.expand_expr (exp, original_target, tmode, 8811 modifier, alt_rtl); 8812 } 8813 8814 /* Here to do an ordinary binary operator. */ 8815 binop: 8816 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8817 subtarget, &op0, &op1, 0); 8818 binop2: 8819 this_optab = optab_for_tree_code (code, type); 8820 binop3: 8821 if (modifier == EXPAND_STACK_PARM) 8822 target = 0; 8823 temp = expand_binop (mode, this_optab, op0, op1, target, 8824 unsignedp, OPTAB_LIB_WIDEN); 8825 gcc_assert (temp); 8826 return REDUCE_BIT_FIELD (temp); 8827} 8828#undef REDUCE_BIT_FIELD 8829 8830/* Subroutine of above: reduce EXP to the precision of TYPE (in the 8831 signedness of TYPE), possibly returning the result in TARGET. */ 8832static rtx 8833reduce_to_bit_field_precision (rtx exp, rtx target, tree type) 8834{ 8835 HOST_WIDE_INT prec = TYPE_PRECISION (type); 8836 if (target && GET_MODE (target) != GET_MODE (exp)) 8837 target = 0; 8838 /* For constant values, reduce using build_int_cst_type. */ 8839 if (GET_CODE (exp) == CONST_INT) 8840 { 8841 HOST_WIDE_INT value = INTVAL (exp); 8842 tree t = build_int_cst_type (type, value); 8843 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL); 8844 } 8845 else if (TYPE_UNSIGNED (type)) 8846 { 8847 rtx mask; 8848 if (prec < HOST_BITS_PER_WIDE_INT) 8849 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0, 8850 GET_MODE (exp)); 8851 else 8852 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1, 8853 ((unsigned HOST_WIDE_INT) 1 8854 << (prec - HOST_BITS_PER_WIDE_INT)) - 1, 8855 GET_MODE (exp)); 8856 return expand_and (GET_MODE (exp), exp, mask, target); 8857 } 8858 else 8859 { 8860 tree count = build_int_cst (NULL_TREE, 8861 GET_MODE_BITSIZE (GET_MODE (exp)) - prec); 8862 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0); 8863 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0); 8864 } 8865} 8866 8867/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that 8868 when applied to the address of EXP produces an address known to be 8869 aligned more than BIGGEST_ALIGNMENT. */ 8870 8871static int 8872is_aligning_offset (tree offset, tree exp) 8873{ 8874 /* Strip off any conversions. */ 8875 while (TREE_CODE (offset) == NON_LVALUE_EXPR 8876 || TREE_CODE (offset) == NOP_EXPR 8877 || TREE_CODE (offset) == CONVERT_EXPR) 8878 offset = TREE_OPERAND (offset, 0); 8879 8880 /* We must now have a BIT_AND_EXPR with a constant that is one less than 8881 power of 2 and which is larger than BIGGEST_ALIGNMENT. */ 8882 if (TREE_CODE (offset) != BIT_AND_EXPR 8883 || !host_integerp (TREE_OPERAND (offset, 1), 1) 8884 || compare_tree_int (TREE_OPERAND (offset, 1), 8885 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0 8886 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0) 8887 return 0; 8888 8889 /* Look at the first operand of BIT_AND_EXPR and strip any conversion. 8890 It must be NEGATE_EXPR. Then strip any more conversions. */ 8891 offset = TREE_OPERAND (offset, 0); 8892 while (TREE_CODE (offset) == NON_LVALUE_EXPR 8893 || TREE_CODE (offset) == NOP_EXPR 8894 || TREE_CODE (offset) == CONVERT_EXPR) 8895 offset = TREE_OPERAND (offset, 0); 8896 8897 if (TREE_CODE (offset) != NEGATE_EXPR) 8898 return 0; 8899 8900 offset = TREE_OPERAND (offset, 0); 8901 while (TREE_CODE (offset) == NON_LVALUE_EXPR 8902 || TREE_CODE (offset) == NOP_EXPR 8903 || TREE_CODE (offset) == CONVERT_EXPR) 8904 offset = TREE_OPERAND (offset, 0); 8905 8906 /* This must now be the address of EXP. */ 8907 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp; 8908} 8909 8910/* Return the tree node if an ARG corresponds to a string constant or zero 8911 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset 8912 in bytes within the string that ARG is accessing. The type of the 8913 offset will be `sizetype'. */ 8914 8915tree 8916string_constant (tree arg, tree *ptr_offset) 8917{ 8918 tree array, offset; 8919 STRIP_NOPS (arg); 8920 8921 if (TREE_CODE (arg) == ADDR_EXPR) 8922 { 8923 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) 8924 { 8925 *ptr_offset = size_zero_node; 8926 return TREE_OPERAND (arg, 0); 8927 } 8928 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL) 8929 { 8930 array = TREE_OPERAND (arg, 0); 8931 offset = size_zero_node; 8932 } 8933 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF) 8934 { 8935 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); 8936 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1); 8937 if (TREE_CODE (array) != STRING_CST 8938 && TREE_CODE (array) != VAR_DECL) 8939 return 0; 8940 } 8941 else 8942 return 0; 8943 } 8944 else if (TREE_CODE (arg) == PLUS_EXPR) 8945 { 8946 tree arg0 = TREE_OPERAND (arg, 0); 8947 tree arg1 = TREE_OPERAND (arg, 1); 8948 8949 STRIP_NOPS (arg0); 8950 STRIP_NOPS (arg1); 8951 8952 if (TREE_CODE (arg0) == ADDR_EXPR 8953 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST 8954 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL)) 8955 { 8956 array = TREE_OPERAND (arg0, 0); 8957 offset = arg1; 8958 } 8959 else if (TREE_CODE (arg1) == ADDR_EXPR 8960 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST 8961 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL)) 8962 { 8963 array = TREE_OPERAND (arg1, 0); 8964 offset = arg0; 8965 } 8966 else 8967 return 0; 8968 } 8969 else 8970 return 0; 8971 8972 if (TREE_CODE (array) == STRING_CST) 8973 { 8974 *ptr_offset = fold_convert (sizetype, offset); 8975 return array; 8976 } 8977 else if (TREE_CODE (array) == VAR_DECL) 8978 { 8979 int length; 8980 8981 /* Variables initialized to string literals can be handled too. */ 8982 if (DECL_INITIAL (array) == NULL_TREE 8983 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST) 8984 return 0; 8985 8986 /* If they are read-only, non-volatile and bind locally. */ 8987 if (! TREE_READONLY (array) 8988 || TREE_SIDE_EFFECTS (array) 8989 || ! targetm.binds_local_p (array)) 8990 return 0; 8991 8992 /* Avoid const char foo[4] = "abcde"; */ 8993 if (DECL_SIZE_UNIT (array) == NULL_TREE 8994 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST 8995 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0 8996 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0) 8997 return 0; 8998 8999 /* If variable is bigger than the string literal, OFFSET must be constant 9000 and inside of the bounds of the string literal. */ 9001 offset = fold_convert (sizetype, offset); 9002 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0 9003 && (! host_integerp (offset, 1) 9004 || compare_tree_int (offset, length) >= 0)) 9005 return 0; 9006 9007 *ptr_offset = offset; 9008 return DECL_INITIAL (array); 9009 } 9010 9011 return 0; 9012} 9013 9014/* Generate code to calculate EXP using a store-flag instruction 9015 and return an rtx for the result. EXP is either a comparison 9016 or a TRUTH_NOT_EXPR whose operand is a comparison. 9017 9018 If TARGET is nonzero, store the result there if convenient. 9019 9020 If ONLY_CHEAP is nonzero, only do this if it is likely to be very 9021 cheap. 9022 9023 Return zero if there is no suitable set-flag instruction 9024 available on this machine. 9025 9026 Once expand_expr has been called on the arguments of the comparison, 9027 we are committed to doing the store flag, since it is not safe to 9028 re-evaluate the expression. We emit the store-flag insn by calling 9029 emit_store_flag, but only expand the arguments if we have a reason 9030 to believe that emit_store_flag will be successful. If we think that 9031 it will, but it isn't, we have to simulate the store-flag with a 9032 set/jump/set sequence. */ 9033 9034static rtx 9035do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) 9036{ 9037 enum rtx_code code; 9038 tree arg0, arg1, type; 9039 tree tem; 9040 enum machine_mode operand_mode; 9041 int invert = 0; 9042 int unsignedp; 9043 rtx op0, op1; 9044 enum insn_code icode; 9045 rtx subtarget = target; 9046 rtx result, label; 9047 9048 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the 9049 result at the end. We can't simply invert the test since it would 9050 have already been inverted if it were valid. This case occurs for 9051 some floating-point comparisons. */ 9052 9053 if (TREE_CODE (exp) == TRUTH_NOT_EXPR) 9054 invert = 1, exp = TREE_OPERAND (exp, 0); 9055 9056 arg0 = TREE_OPERAND (exp, 0); 9057 arg1 = TREE_OPERAND (exp, 1); 9058 9059 /* Don't crash if the comparison was erroneous. */ 9060 if (arg0 == error_mark_node || arg1 == error_mark_node) 9061 return const0_rtx; 9062 9063 type = TREE_TYPE (arg0); 9064 operand_mode = TYPE_MODE (type); 9065 unsignedp = TYPE_UNSIGNED (type); 9066 9067 /* We won't bother with BLKmode store-flag operations because it would mean 9068 passing a lot of information to emit_store_flag. */ 9069 if (operand_mode == BLKmode) 9070 return 0; 9071 9072 /* We won't bother with store-flag operations involving function pointers 9073 when function pointers must be canonicalized before comparisons. */ 9074#ifdef HAVE_canonicalize_funcptr_for_compare 9075 if (HAVE_canonicalize_funcptr_for_compare 9076 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE 9077 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) 9078 == FUNCTION_TYPE)) 9079 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE 9080 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) 9081 == FUNCTION_TYPE)))) 9082 return 0; 9083#endif 9084 9085 STRIP_NOPS (arg0); 9086 STRIP_NOPS (arg1); 9087 9088 /* Get the rtx comparison code to use. We know that EXP is a comparison 9089 operation of some type. Some comparisons against 1 and -1 can be 9090 converted to comparisons with zero. Do so here so that the tests 9091 below will be aware that we have a comparison with zero. These 9092 tests will not catch constants in the first operand, but constants 9093 are rarely passed as the first operand. */ 9094 9095 switch (TREE_CODE (exp)) 9096 { 9097 case EQ_EXPR: 9098 code = EQ; 9099 break; 9100 case NE_EXPR: 9101 code = NE; 9102 break; 9103 case LT_EXPR: 9104 if (integer_onep (arg1)) 9105 arg1 = integer_zero_node, code = unsignedp ? LEU : LE; 9106 else 9107 code = unsignedp ? LTU : LT; 9108 break; 9109 case LE_EXPR: 9110 if (! unsignedp && integer_all_onesp (arg1)) 9111 arg1 = integer_zero_node, code = LT; 9112 else 9113 code = unsignedp ? LEU : LE; 9114 break; 9115 case GT_EXPR: 9116 if (! unsignedp && integer_all_onesp (arg1)) 9117 arg1 = integer_zero_node, code = GE; 9118 else 9119 code = unsignedp ? GTU : GT; 9120 break; 9121 case GE_EXPR: 9122 if (integer_onep (arg1)) 9123 arg1 = integer_zero_node, code = unsignedp ? GTU : GT; 9124 else 9125 code = unsignedp ? GEU : GE; 9126 break; 9127 9128 case UNORDERED_EXPR: 9129 code = UNORDERED; 9130 break; 9131 case ORDERED_EXPR: 9132 code = ORDERED; 9133 break; 9134 case UNLT_EXPR: 9135 code = UNLT; 9136 break; 9137 case UNLE_EXPR: 9138 code = UNLE; 9139 break; 9140 case UNGT_EXPR: 9141 code = UNGT; 9142 break; 9143 case UNGE_EXPR: 9144 code = UNGE; 9145 break; 9146 case UNEQ_EXPR: 9147 code = UNEQ; 9148 break; 9149 case LTGT_EXPR: 9150 code = LTGT; 9151 break; 9152 9153 default: 9154 gcc_unreachable (); 9155 } 9156 9157 /* Put a constant second. */ 9158 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST) 9159 { 9160 tem = arg0; arg0 = arg1; arg1 = tem; 9161 code = swap_condition (code); 9162 } 9163 9164 /* If this is an equality or inequality test of a single bit, we can 9165 do this by shifting the bit being tested to the low-order bit and 9166 masking the result with the constant 1. If the condition was EQ, 9167 we xor it with 1. This does not require an scc insn and is faster 9168 than an scc insn even if we have it. 9169 9170 The code to make this transformation was moved into fold_single_bit_test, 9171 so we just call into the folder and expand its result. */ 9172 9173 if ((code == NE || code == EQ) 9174 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) 9175 && integer_pow2p (TREE_OPERAND (arg0, 1))) 9176 { 9177 tree type = lang_hooks.types.type_for_mode (mode, unsignedp); 9178 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR, 9179 arg0, arg1, type), 9180 target, VOIDmode, EXPAND_NORMAL); 9181 } 9182 9183 /* Now see if we are likely to be able to do this. Return if not. */ 9184 if (! can_compare_p (code, operand_mode, ccp_store_flag)) 9185 return 0; 9186 9187 icode = setcc_gen_code[(int) code]; 9188 if (icode == CODE_FOR_nothing 9189 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode)) 9190 { 9191 /* We can only do this if it is one of the special cases that 9192 can be handled without an scc insn. */ 9193 if ((code == LT && integer_zerop (arg1)) 9194 || (! only_cheap && code == GE && integer_zerop (arg1))) 9195 ; 9196 else if (! only_cheap && (code == NE || code == EQ) 9197 && TREE_CODE (type) != REAL_TYPE 9198 && ((abs_optab->handlers[(int) operand_mode].insn_code 9199 != CODE_FOR_nothing) 9200 || (ffs_optab->handlers[(int) operand_mode].insn_code 9201 != CODE_FOR_nothing))) 9202 ; 9203 else 9204 return 0; 9205 } 9206 9207 if (! get_subtarget (target) 9208 || GET_MODE (subtarget) != operand_mode) 9209 subtarget = 0; 9210 9211 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0); 9212 9213 if (target == 0) 9214 target = gen_reg_rtx (mode); 9215 9216 result = emit_store_flag (target, code, op0, op1, 9217 operand_mode, unsignedp, 1); 9218 9219 if (result) 9220 { 9221 if (invert) 9222 result = expand_binop (mode, xor_optab, result, const1_rtx, 9223 result, 0, OPTAB_LIB_WIDEN); 9224 return result; 9225 } 9226 9227 /* If this failed, we have to do this with set/compare/jump/set code. */ 9228 if (!REG_P (target) 9229 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1)) 9230 target = gen_reg_rtx (GET_MODE (target)); 9231 9232 emit_move_insn (target, invert ? const0_rtx : const1_rtx); 9233 result = compare_from_rtx (op0, op1, code, unsignedp, 9234 operand_mode, NULL_RTX); 9235 if (GET_CODE (result) == CONST_INT) 9236 return (((result == const0_rtx && ! invert) 9237 || (result != const0_rtx && invert)) 9238 ? const0_rtx : const1_rtx); 9239 9240 /* The code of RESULT may not match CODE if compare_from_rtx 9241 decided to swap its operands and reverse the original code. 9242 9243 We know that compare_from_rtx returns either a CONST_INT or 9244 a new comparison code, so it is safe to just extract the 9245 code from RESULT. */ 9246 code = GET_CODE (result); 9247 9248 label = gen_label_rtx (); 9249 gcc_assert (bcc_gen_fctn[(int) code]); 9250 9251 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); 9252 emit_move_insn (target, invert ? const1_rtx : const0_rtx); 9253 emit_label (label); 9254 9255 return target; 9256} 9257 9258 9259/* Stubs in case we haven't got a casesi insn. */ 9260#ifndef HAVE_casesi 9261# define HAVE_casesi 0 9262# define gen_casesi(a, b, c, d, e) (0) 9263# define CODE_FOR_casesi CODE_FOR_nothing 9264#endif 9265 9266/* If the machine does not have a case insn that compares the bounds, 9267 this means extra overhead for dispatch tables, which raises the 9268 threshold for using them. */ 9269#ifndef CASE_VALUES_THRESHOLD 9270#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5) 9271#endif /* CASE_VALUES_THRESHOLD */ 9272 9273unsigned int 9274case_values_threshold (void) 9275{ 9276 return CASE_VALUES_THRESHOLD; 9277} 9278 9279/* Attempt to generate a casesi instruction. Returns 1 if successful, 9280 0 otherwise (i.e. if there is no casesi instruction). */ 9281int 9282try_casesi (tree index_type, tree index_expr, tree minval, tree range, 9283 rtx table_label ATTRIBUTE_UNUSED, rtx default_label) 9284{ 9285 enum machine_mode index_mode = SImode; 9286 int index_bits = GET_MODE_BITSIZE (index_mode); 9287 rtx op1, op2, index; 9288 enum machine_mode op_mode; 9289 9290 if (! HAVE_casesi) 9291 return 0; 9292 9293 /* Convert the index to SImode. */ 9294 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode)) 9295 { 9296 enum machine_mode omode = TYPE_MODE (index_type); 9297 rtx rangertx = expand_normal (range); 9298 9299 /* We must handle the endpoints in the original mode. */ 9300 index_expr = build2 (MINUS_EXPR, index_type, 9301 index_expr, minval); 9302 minval = integer_zero_node; 9303 index = expand_normal (index_expr); 9304 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX, 9305 omode, 1, default_label); 9306 /* Now we can safely truncate. */ 9307 index = convert_to_mode (index_mode, index, 0); 9308 } 9309 else 9310 { 9311 if (TYPE_MODE (index_type) != index_mode) 9312 { 9313 index_type = lang_hooks.types.type_for_size (index_bits, 0); 9314 index_expr = fold_convert (index_type, index_expr); 9315 } 9316 9317 index = expand_normal (index_expr); 9318 } 9319 9320 do_pending_stack_adjust (); 9321 9322 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode; 9323 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate) 9324 (index, op_mode)) 9325 index = copy_to_mode_reg (op_mode, index); 9326 9327 op1 = expand_normal (minval); 9328 9329 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode; 9330 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)), 9331 op1, TYPE_UNSIGNED (TREE_TYPE (minval))); 9332 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate) 9333 (op1, op_mode)) 9334 op1 = copy_to_mode_reg (op_mode, op1); 9335 9336 op2 = expand_normal (range); 9337 9338 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode; 9339 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)), 9340 op2, TYPE_UNSIGNED (TREE_TYPE (range))); 9341 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate) 9342 (op2, op_mode)) 9343 op2 = copy_to_mode_reg (op_mode, op2); 9344 9345 emit_jump_insn (gen_casesi (index, op1, op2, 9346 table_label, default_label)); 9347 return 1; 9348} 9349 9350/* Attempt to generate a tablejump instruction; same concept. */ 9351#ifndef HAVE_tablejump 9352#define HAVE_tablejump 0 9353#define gen_tablejump(x, y) (0) 9354#endif 9355 9356/* Subroutine of the next function. 9357 9358 INDEX is the value being switched on, with the lowest value 9359 in the table already subtracted. 9360 MODE is its expected mode (needed if INDEX is constant). 9361 RANGE is the length of the jump table. 9362 TABLE_LABEL is a CODE_LABEL rtx for the table itself. 9363 9364 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the 9365 index value is out of range. */ 9366 9367static void 9368do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label, 9369 rtx default_label) 9370{ 9371 rtx temp, vector; 9372 9373 if (INTVAL (range) > cfun->max_jumptable_ents) 9374 cfun->max_jumptable_ents = INTVAL (range); 9375 9376 /* Do an unsigned comparison (in the proper mode) between the index 9377 expression and the value which represents the length of the range. 9378 Since we just finished subtracting the lower bound of the range 9379 from the index expression, this comparison allows us to simultaneously 9380 check that the original index expression value is both greater than 9381 or equal to the minimum value of the range and less than or equal to 9382 the maximum value of the range. */ 9383 9384 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, 9385 default_label); 9386 9387 /* If index is in range, it must fit in Pmode. 9388 Convert to Pmode so we can index with it. */ 9389 if (mode != Pmode) 9390 index = convert_to_mode (Pmode, index, 1); 9391 9392 /* Don't let a MEM slip through, because then INDEX that comes 9393 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, 9394 and break_out_memory_refs will go to work on it and mess it up. */ 9395#ifdef PIC_CASE_VECTOR_ADDRESS 9396 if (flag_pic && !REG_P (index)) 9397 index = copy_to_mode_reg (Pmode, index); 9398#endif 9399 9400 /* If flag_force_addr were to affect this address 9401 it could interfere with the tricky assumptions made 9402 about addresses that contain label-refs, 9403 which may be valid only very near the tablejump itself. */ 9404 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the 9405 GET_MODE_SIZE, because this indicates how large insns are. The other 9406 uses should all be Pmode, because they are addresses. This code 9407 could fail if addresses and insns are not the same size. */ 9408 index = gen_rtx_PLUS (Pmode, 9409 gen_rtx_MULT (Pmode, index, 9410 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))), 9411 gen_rtx_LABEL_REF (Pmode, table_label)); 9412#ifdef PIC_CASE_VECTOR_ADDRESS 9413 if (flag_pic) 9414 index = PIC_CASE_VECTOR_ADDRESS (index); 9415 else 9416#endif 9417 index = memory_address_noforce (CASE_VECTOR_MODE, index); 9418 temp = gen_reg_rtx (CASE_VECTOR_MODE); 9419 vector = gen_const_mem (CASE_VECTOR_MODE, index); 9420 convert_move (temp, vector, 0); 9421 9422 emit_jump_insn (gen_tablejump (temp, table_label)); 9423 9424 /* If we are generating PIC code or if the table is PC-relative, the 9425 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */ 9426 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic) 9427 emit_barrier (); 9428} 9429 9430int 9431try_tablejump (tree index_type, tree index_expr, tree minval, tree range, 9432 rtx table_label, rtx default_label) 9433{ 9434 rtx index; 9435 9436 if (! HAVE_tablejump) 9437 return 0; 9438 9439 index_expr = fold_build2 (MINUS_EXPR, index_type, 9440 fold_convert (index_type, index_expr), 9441 fold_convert (index_type, minval)); 9442 index = expand_normal (index_expr); 9443 do_pending_stack_adjust (); 9444 9445 do_tablejump (index, TYPE_MODE (index_type), 9446 convert_modes (TYPE_MODE (index_type), 9447 TYPE_MODE (TREE_TYPE (range)), 9448 expand_normal (range), 9449 TYPE_UNSIGNED (TREE_TYPE (range))), 9450 table_label, default_label); 9451 return 1; 9452} 9453 9454/* Nonzero if the mode is a valid vector mode for this architecture. 9455 This returns nonzero even if there is no hardware support for the 9456 vector mode, but we can emulate with narrower modes. */ 9457 9458int 9459vector_mode_valid_p (enum machine_mode mode) 9460{ 9461 enum mode_class class = GET_MODE_CLASS (mode); 9462 enum machine_mode innermode; 9463 9464 /* Doh! What's going on? */ 9465 if (class != MODE_VECTOR_INT 9466 && class != MODE_VECTOR_FLOAT) 9467 return 0; 9468 9469 /* Hardware support. Woo hoo! */ 9470 if (targetm.vector_mode_supported_p (mode)) 9471 return 1; 9472 9473 innermode = GET_MODE_INNER (mode); 9474 9475 /* We should probably return 1 if requesting V4DI and we have no DI, 9476 but we have V2DI, but this is probably very unlikely. */ 9477 9478 /* If we have support for the inner mode, we can safely emulate it. 9479 We may not have V2DI, but me can emulate with a pair of DIs. */ 9480 return targetm.scalar_mode_supported_p (innermode); 9481} 9482 9483/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */ 9484static rtx 9485const_vector_from_tree (tree exp) 9486{ 9487 rtvec v; 9488 int units, i; 9489 tree link, elt; 9490 enum machine_mode inner, mode; 9491 9492 mode = TYPE_MODE (TREE_TYPE (exp)); 9493 9494 if (initializer_zerop (exp)) 9495 return CONST0_RTX (mode); 9496 9497 units = GET_MODE_NUNITS (mode); 9498 inner = GET_MODE_INNER (mode); 9499 9500 v = rtvec_alloc (units); 9501 9502 link = TREE_VECTOR_CST_ELTS (exp); 9503 for (i = 0; link; link = TREE_CHAIN (link), ++i) 9504 { 9505 elt = TREE_VALUE (link); 9506 9507 if (TREE_CODE (elt) == REAL_CST) 9508 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt), 9509 inner); 9510 else 9511 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt), 9512 TREE_INT_CST_HIGH (elt), 9513 inner); 9514 } 9515 9516 /* Initialize remaining elements to 0. */ 9517 for (; i < units; ++i) 9518 RTVEC_ELT (v, i) = CONST0_RTX (inner); 9519 9520 return gen_rtx_CONST_VECTOR (mode, v); 9521} 9522#include "gt-expr.h" 9523