1/* Reload pseudo regs into hard regs for insns that require hard regs. 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 3 1998, 1999, 2000 Free Software Foundation, Inc. 4 5This file is part of GNU CC. 6 7GNU CC is free software; you can redistribute it and/or modify 8it under the terms of the GNU General Public License as published by 9the Free Software Foundation; either version 2, or (at your option) 10any later version. 11 12GNU CC is distributed in the hope that it will be useful, 13but WITHOUT ANY WARRANTY; without even the implied warranty of 14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15GNU General Public License for more details. 16 17You should have received a copy of the GNU General Public License 18along with GNU CC; see the file COPYING. If not, write to 19the Free Software Foundation, 59 Temple Place - Suite 330, 20Boston, MA 02111-1307, USA. */ 21 22 23#include "config.h" 24#include "system.h" 25 26#include "machmode.h" 27#include "hard-reg-set.h" 28#include "rtl.h" 29#include "obstack.h" 30#include "insn-config.h" 31#include "insn-flags.h" 32#include "insn-codes.h" 33#include "flags.h" 34#include "expr.h" 35#include "regs.h" 36#include "basic-block.h" 37#include "reload.h" 38#include "recog.h" 39#include "output.h" 40#include "real.h" 41#include "toplev.h" 42 43#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY 44#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY 45#endif 46 47/* This file contains the reload pass of the compiler, which is 48 run after register allocation has been done. It checks that 49 each insn is valid (operands required to be in registers really 50 are in registers of the proper class) and fixes up invalid ones 51 by copying values temporarily into registers for the insns 52 that need them. 53 54 The results of register allocation are described by the vector 55 reg_renumber; the insns still contain pseudo regs, but reg_renumber 56 can be used to find which hard reg, if any, a pseudo reg is in. 57 58 The technique we always use is to free up a few hard regs that are 59 called ``reload regs'', and for each place where a pseudo reg 60 must be in a hard reg, copy it temporarily into one of the reload regs. 61 62 Reload regs are allocated locally for every instruction that needs 63 reloads. When there are pseudos which are allocated to a register that 64 has been chosen as a reload reg, such pseudos must be ``spilled''. 65 This means that they go to other hard regs, or to stack slots if no other 66 available hard regs can be found. Spilling can invalidate more 67 insns, requiring additional need for reloads, so we must keep checking 68 until the process stabilizes. 69 70 For machines with different classes of registers, we must keep track 71 of the register class needed for each reload, and make sure that 72 we allocate enough reload registers of each class. 73 74 The file reload.c contains the code that checks one insn for 75 validity and reports the reloads that it needs. This file 76 is in charge of scanning the entire rtl code, accumulating the 77 reload needs, spilling, assigning reload registers to use for 78 fixing up each insn, and generating the new insns to copy values 79 into the reload registers. */ 80 81 82#ifndef REGISTER_MOVE_COST 83#define REGISTER_MOVE_COST(x, y) 2 84#endif 85 86/* During reload_as_needed, element N contains a REG rtx for the hard reg 87 into which reg N has been reloaded (perhaps for a previous insn). */ 88static rtx *reg_last_reload_reg; 89 90/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn 91 for an output reload that stores into reg N. */ 92static char *reg_has_output_reload; 93 94/* Indicates which hard regs are reload-registers for an output reload 95 in the current insn. */ 96static HARD_REG_SET reg_is_output_reload; 97 98/* Element N is the constant value to which pseudo reg N is equivalent, 99 or zero if pseudo reg N is not equivalent to a constant. 100 find_reloads looks at this in order to replace pseudo reg N 101 with the constant it stands for. */ 102rtx *reg_equiv_constant; 103 104/* Element N is a memory location to which pseudo reg N is equivalent, 105 prior to any register elimination (such as frame pointer to stack 106 pointer). Depending on whether or not it is a valid address, this value 107 is transferred to either reg_equiv_address or reg_equiv_mem. */ 108rtx *reg_equiv_memory_loc; 109 110/* Element N is the address of stack slot to which pseudo reg N is equivalent. 111 This is used when the address is not valid as a memory address 112 (because its displacement is too big for the machine.) */ 113rtx *reg_equiv_address; 114 115/* Element N is the memory slot to which pseudo reg N is equivalent, 116 or zero if pseudo reg N is not equivalent to a memory slot. */ 117rtx *reg_equiv_mem; 118 119/* Widest width in which each pseudo reg is referred to (via subreg). */ 120static int *reg_max_ref_width; 121 122/* Element N is the list of insns that initialized reg N from its equivalent 123 constant or memory slot. */ 124static rtx *reg_equiv_init; 125 126/* Vector to remember old contents of reg_renumber before spilling. */ 127static short *reg_old_renumber; 128 129/* During reload_as_needed, element N contains the last pseudo regno reloaded 130 into hard register N. If that pseudo reg occupied more than one register, 131 reg_reloaded_contents points to that pseudo for each spill register in 132 use; all of these must remain set for an inheritance to occur. */ 133static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER]; 134 135/* During reload_as_needed, element N contains the insn for which 136 hard register N was last used. Its contents are significant only 137 when reg_reloaded_valid is set for this register. */ 138static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER]; 139 140/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */ 141static HARD_REG_SET reg_reloaded_valid; 142/* Indicate if the register was dead at the end of the reload. 143 This is only valid if reg_reloaded_contents is set and valid. */ 144static HARD_REG_SET reg_reloaded_dead; 145 146/* Number of spill-regs so far; number of valid elements of spill_regs. */ 147static int n_spills; 148 149/* In parallel with spill_regs, contains REG rtx's for those regs. 150 Holds the last rtx used for any given reg, or 0 if it has never 151 been used for spilling yet. This rtx is reused, provided it has 152 the proper mode. */ 153static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER]; 154 155/* In parallel with spill_regs, contains nonzero for a spill reg 156 that was stored after the last time it was used. 157 The precise value is the insn generated to do the store. */ 158static rtx spill_reg_store[FIRST_PSEUDO_REGISTER]; 159 160/* This is the register that was stored with spill_reg_store. This is a 161 copy of reload_out / reload_out_reg when the value was stored; if 162 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */ 163static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER]; 164 165/* This table is the inverse mapping of spill_regs: 166 indexed by hard reg number, 167 it contains the position of that reg in spill_regs, 168 or -1 for something that is not in spill_regs. 169 170 ?!? This is no longer accurate. */ 171static short spill_reg_order[FIRST_PSEUDO_REGISTER]; 172 173/* This reg set indicates registers that can't be used as spill registers for 174 the currently processed insn. These are the hard registers which are live 175 during the insn, but not allocated to pseudos, as well as fixed 176 registers. */ 177static HARD_REG_SET bad_spill_regs; 178 179/* These are the hard registers that can't be used as spill register for any 180 insn. This includes registers used for user variables and registers that 181 we can't eliminate. A register that appears in this set also can't be used 182 to retry register allocation. */ 183static HARD_REG_SET bad_spill_regs_global; 184 185/* Describes order of use of registers for reloading 186 of spilled pseudo-registers. `n_spills' is the number of 187 elements that are actually valid; new ones are added at the end. 188 189 Both spill_regs and spill_reg_order are used on two occasions: 190 once during find_reload_regs, where they keep track of the spill registers 191 for a single insn, but also during reload_as_needed where they show all 192 the registers ever used by reload. For the latter case, the information 193 is calculated during finish_spills. */ 194static short spill_regs[FIRST_PSEUDO_REGISTER]; 195 196/* This vector of reg sets indicates, for each pseudo, which hard registers 197 may not be used for retrying global allocation because the register was 198 formerly spilled from one of them. If we allowed reallocating a pseudo to 199 a register that it was already allocated to, reload might not 200 terminate. */ 201static HARD_REG_SET *pseudo_previous_regs; 202 203/* This vector of reg sets indicates, for each pseudo, which hard 204 registers may not be used for retrying global allocation because they 205 are used as spill registers during one of the insns in which the 206 pseudo is live. */ 207static HARD_REG_SET *pseudo_forbidden_regs; 208 209/* All hard regs that have been used as spill registers for any insn are 210 marked in this set. */ 211static HARD_REG_SET used_spill_regs; 212 213/* Index of last register assigned as a spill register. We allocate in 214 a round-robin fashion. */ 215static int last_spill_reg; 216 217/* Describes order of preference for putting regs into spill_regs. 218 Contains the numbers of all the hard regs, in order most preferred first. 219 This order is different for each function. 220 It is set up by order_regs_for_reload. 221 Empty elements at the end contain -1. */ 222static short potential_reload_regs[FIRST_PSEUDO_REGISTER]; 223 224/* Nonzero if indirect addressing is supported on the machine; this means 225 that spilling (REG n) does not require reloading it into a register in 226 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The 227 value indicates the level of indirect addressing supported, e.g., two 228 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get 229 a hard register. */ 230static char spill_indirect_levels; 231 232/* Nonzero if indirect addressing is supported when the innermost MEM is 233 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to 234 which these are valid is the same as spill_indirect_levels, above. */ 235char indirect_symref_ok; 236 237/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */ 238char double_reg_address_ok; 239 240/* Record the stack slot for each spilled hard register. */ 241static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER]; 242 243/* Width allocated so far for that stack slot. */ 244static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER]; 245 246/* Record which pseudos needed to be spilled. */ 247static regset spilled_pseudos; 248 249/* First uid used by insns created by reload in this function. 250 Used in find_equiv_reg. */ 251int reload_first_uid; 252 253/* Flag set by local-alloc or global-alloc if anything is live in 254 a call-clobbered reg across calls. */ 255int caller_save_needed; 256 257/* Set to 1 while reload_as_needed is operating. 258 Required by some machines to handle any generated moves differently. */ 259int reload_in_progress = 0; 260 261/* These arrays record the insn_code of insns that may be needed to 262 perform input and output reloads of special objects. They provide a 263 place to pass a scratch register. */ 264enum insn_code reload_in_optab[NUM_MACHINE_MODES]; 265enum insn_code reload_out_optab[NUM_MACHINE_MODES]; 266 267/* This obstack is used for allocation of rtl during register elimination. 268 The allocated storage can be freed once find_reloads has processed the 269 insn. */ 270struct obstack reload_obstack; 271 272/* Points to the beginning of the reload_obstack. All insn_chain structures 273 are allocated first. */ 274char *reload_startobj; 275 276/* The point after all insn_chain structures. Used to quickly deallocate 277 memory used while processing one insn. */ 278char *reload_firstobj; 279 280#define obstack_chunk_alloc xmalloc 281#define obstack_chunk_free free 282 283/* List of labels that must never be deleted. */ 284extern rtx forced_labels; 285 286/* List of insn_chain instructions, one for every insn that reload needs to 287 examine. */ 288struct insn_chain *reload_insn_chain; 289 290#ifdef TREE_CODE 291extern tree current_function_decl; 292#else 293extern union tree_node *current_function_decl; 294#endif 295 296/* List of all insns needing reloads. */ 297static struct insn_chain *insns_need_reload; 298 299/* This structure is used to record information about register eliminations. 300 Each array entry describes one possible way of eliminating a register 301 in favor of another. If there is more than one way of eliminating a 302 particular register, the most preferred should be specified first. */ 303 304struct elim_table 305{ 306 int from; /* Register number to be eliminated. */ 307 int to; /* Register number used as replacement. */ 308 int initial_offset; /* Initial difference between values. */ 309 int can_eliminate; /* Non-zero if this elimination can be done. */ 310 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over 311 insns made by reload. */ 312 int offset; /* Current offset between the two regs. */ 313 int previous_offset; /* Offset at end of previous insn. */ 314 int ref_outside_mem; /* "to" has been referenced outside a MEM. */ 315 rtx from_rtx; /* REG rtx for the register to be eliminated. 316 We cannot simply compare the number since 317 we might then spuriously replace a hard 318 register corresponding to a pseudo 319 assigned to the reg to be eliminated. */ 320 rtx to_rtx; /* REG rtx for the replacement. */ 321}; 322 323static struct elim_table * reg_eliminate = 0; 324 325/* This is an intermediate structure to initialize the table. It has 326 exactly the members provided by ELIMINABLE_REGS. */ 327static struct elim_table_1 328{ 329 int from; 330 int to; 331} reg_eliminate_1[] = 332 333/* If a set of eliminable registers was specified, define the table from it. 334 Otherwise, default to the normal case of the frame pointer being 335 replaced by the stack pointer. */ 336 337#ifdef ELIMINABLE_REGS 338 ELIMINABLE_REGS; 339#else 340 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}}; 341#endif 342 343#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate_1/sizeof reg_eliminate_1[0]) 344 345/* Record the number of pending eliminations that have an offset not equal 346 to their initial offset. If non-zero, we use a new copy of each 347 replacement result in any insns encountered. */ 348int num_not_at_initial_offset; 349 350/* Count the number of registers that we may be able to eliminate. */ 351static int num_eliminable; 352/* And the number of registers that are equivalent to a constant that 353 can be eliminated to frame_pointer / arg_pointer + constant. */ 354static int num_eliminable_invariants; 355 356/* For each label, we record the offset of each elimination. If we reach 357 a label by more than one path and an offset differs, we cannot do the 358 elimination. This information is indexed by the number of the label. 359 The first table is an array of flags that records whether we have yet 360 encountered a label and the second table is an array of arrays, one 361 entry in the latter array for each elimination. */ 362 363static char *offsets_known_at; 364static int (*offsets_at)[NUM_ELIMINABLE_REGS]; 365 366/* Number of labels in the current function. */ 367 368static int num_labels; 369 370struct hard_reg_n_uses 371{ 372 int regno; 373 unsigned int uses; 374}; 375 376static void maybe_fix_stack_asms PROTO((void)); 377static void calculate_needs_all_insns PROTO((int)); 378static void calculate_needs PROTO((struct insn_chain *)); 379static void find_reload_regs PROTO((struct insn_chain *chain, 380 FILE *)); 381static void find_tworeg_group PROTO((struct insn_chain *, int, 382 FILE *)); 383static void find_group PROTO((struct insn_chain *, int, 384 FILE *)); 385static int possible_group_p PROTO((struct insn_chain *, int)); 386static void count_possible_groups PROTO((struct insn_chain *, int)); 387static int modes_equiv_for_class_p PROTO((enum machine_mode, 388 enum machine_mode, 389 enum reg_class)); 390static void delete_caller_save_insns PROTO((void)); 391 392static void spill_failure PROTO((rtx)); 393static void new_spill_reg PROTO((struct insn_chain *, int, int, 394 int, FILE *)); 395static void maybe_mark_pseudo_spilled PROTO((int)); 396static void delete_dead_insn PROTO((rtx)); 397static void alter_reg PROTO((int, int)); 398static void set_label_offsets PROTO((rtx, rtx, int)); 399static int eliminate_regs_in_insn PROTO((rtx, int)); 400static void update_eliminable_offsets PROTO((void)); 401static void mark_not_eliminable PROTO((rtx, rtx)); 402static void set_initial_elim_offsets PROTO((void)); 403static void verify_initial_elim_offsets PROTO((void)); 404static void set_initial_label_offsets PROTO((void)); 405static void set_offsets_for_label PROTO((rtx)); 406static void init_elim_table PROTO((void)); 407static void update_eliminables PROTO((HARD_REG_SET *)); 408static void spill_hard_reg PROTO((int, FILE *, int)); 409static int finish_spills PROTO((int, FILE *)); 410static void ior_hard_reg_set PROTO((HARD_REG_SET *, HARD_REG_SET *)); 411static void scan_paradoxical_subregs PROTO((rtx)); 412static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR)); 413static void count_pseudo PROTO((struct hard_reg_n_uses *, int)); 414static void order_regs_for_reload PROTO((struct insn_chain *)); 415static void reload_as_needed PROTO((int)); 416static void forget_old_reloads_1 PROTO((rtx, rtx)); 417static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR)); 418static void mark_reload_reg_in_use PROTO((int, int, enum reload_type, 419 enum machine_mode)); 420static void clear_reload_reg_in_use PROTO((int, int, enum reload_type, 421 enum machine_mode)); 422static int reload_reg_free_p PROTO((int, int, enum reload_type)); 423static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx, rtx, int, int)); 424static int free_for_value_p PROTO((int, enum machine_mode, int, 425 enum reload_type, rtx, rtx, 426 int, int)); 427static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type)); 428static int allocate_reload_reg PROTO((struct insn_chain *, int, int, 429 int)); 430static int conflicts_with_override PROTO((rtx)); 431static void choose_reload_regs PROTO((struct insn_chain *)); 432static void merge_assigned_reloads PROTO((rtx)); 433static void emit_reload_insns PROTO((struct insn_chain *)); 434static void delete_output_reload PROTO((rtx, int, int)); 435static void delete_address_reloads PROTO((rtx, rtx)); 436static void delete_address_reloads_1 PROTO((rtx, rtx, rtx)); 437static rtx inc_for_reload PROTO((rtx, rtx, rtx, int)); 438static int constraint_accepts_reg_p PROTO((const char *, rtx)); 439static void reload_cse_regs_1 PROTO((rtx)); 440static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int)); 441static int reload_cse_mem_conflict_p PROTO((rtx, rtx)); 442static void reload_cse_invalidate_mem PROTO((rtx)); 443static void reload_cse_invalidate_rtx PROTO((rtx, rtx)); 444static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode)); 445static int reload_cse_noop_set_p PROTO((rtx, rtx)); 446static int reload_cse_simplify_set PROTO((rtx, rtx)); 447static int reload_cse_simplify_operands PROTO((rtx)); 448static void reload_cse_check_clobber PROTO((rtx, rtx)); 449static void reload_cse_record_set PROTO((rtx, rtx)); 450static void reload_combine PROTO((void)); 451static void reload_combine_note_use PROTO((rtx *, rtx)); 452static void reload_combine_note_store PROTO((rtx, rtx)); 453static void reload_cse_move2add PROTO((rtx)); 454static void move2add_note_store PROTO((rtx, rtx)); 455#ifdef AUTO_INC_DEC 456static void add_auto_inc_notes PROTO((rtx, rtx)); 457#endif 458 459/* Initialize the reload pass once per compilation. */ 460 461void 462init_reload () 463{ 464 register int i; 465 466 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack. 467 Set spill_indirect_levels to the number of levels such addressing is 468 permitted, zero if it is not permitted at all. */ 469 470 register rtx tem 471 = gen_rtx_MEM (Pmode, 472 gen_rtx_PLUS (Pmode, 473 gen_rtx_REG (Pmode, LAST_VIRTUAL_REGISTER + 1), 474 GEN_INT (4))); 475 spill_indirect_levels = 0; 476 477 while (memory_address_p (QImode, tem)) 478 { 479 spill_indirect_levels++; 480 tem = gen_rtx_MEM (Pmode, tem); 481 } 482 483 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */ 484 485 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo")); 486 indirect_symref_ok = memory_address_p (QImode, tem); 487 488 /* See if reg+reg is a valid (and offsettable) address. */ 489 490 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 491 { 492 tem = gen_rtx_PLUS (Pmode, 493 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM), 494 gen_rtx_REG (Pmode, i)); 495 /* This way, we make sure that reg+reg is an offsettable address. */ 496 tem = plus_constant (tem, 4); 497 498 if (memory_address_p (QImode, tem)) 499 { 500 double_reg_address_ok = 1; 501 break; 502 } 503 } 504 505 /* Initialize obstack for our rtl allocation. */ 506 gcc_obstack_init (&reload_obstack); 507 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0); 508} 509 510/* List of insn chains that are currently unused. */ 511static struct insn_chain *unused_insn_chains = 0; 512 513/* Allocate an empty insn_chain structure. */ 514struct insn_chain * 515new_insn_chain () 516{ 517 struct insn_chain *c; 518 519 if (unused_insn_chains == 0) 520 { 521 c = (struct insn_chain *) 522 obstack_alloc (&reload_obstack, sizeof (struct insn_chain)); 523 c->live_before = OBSTACK_ALLOC_REG_SET (&reload_obstack); 524 c->live_after = OBSTACK_ALLOC_REG_SET (&reload_obstack); 525 } 526 else 527 { 528 c = unused_insn_chains; 529 unused_insn_chains = c->next; 530 } 531 c->is_caller_save_insn = 0; 532 c->need_operand_change = 0; 533 c->need_reload = 0; 534 c->need_elim = 0; 535 return c; 536} 537 538/* Small utility function to set all regs in hard reg set TO which are 539 allocated to pseudos in regset FROM. */ 540void 541compute_use_by_pseudos (to, from) 542 HARD_REG_SET *to; 543 regset from; 544{ 545 int regno; 546 EXECUTE_IF_SET_IN_REG_SET 547 (from, FIRST_PSEUDO_REGISTER, regno, 548 { 549 int r = reg_renumber[regno]; 550 int nregs; 551 if (r < 0) 552 { 553 /* reload_combine uses the information from 554 BASIC_BLOCK->global_live_at_start, which might still 555 contain registers that have not actually been allocated 556 since they have an equivalence. */ 557 if (! reload_completed) 558 abort (); 559 } 560 else 561 { 562 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno)); 563 while (nregs-- > 0) 564 SET_HARD_REG_BIT (*to, r + nregs); 565 } 566 }); 567} 568 569/* Global variables used by reload and its subroutines. */ 570 571/* Set during calculate_needs if an insn needs register elimination. */ 572static int something_needs_elimination; 573/* Set during calculate_needs if an insn needs an operand changed. */ 574int something_needs_operands_changed; 575 576/* Nonzero means we couldn't get enough spill regs. */ 577static int failure; 578 579/* Main entry point for the reload pass. 580 581 FIRST is the first insn of the function being compiled. 582 583 GLOBAL nonzero means we were called from global_alloc 584 and should attempt to reallocate any pseudoregs that we 585 displace from hard regs we will use for reloads. 586 If GLOBAL is zero, we do not have enough information to do that, 587 so any pseudo reg that is spilled must go to the stack. 588 589 DUMPFILE is the global-reg debugging dump file stream, or 0. 590 If it is nonzero, messages are written to it to describe 591 which registers are seized as reload regs, which pseudo regs 592 are spilled from them, and where the pseudo regs are reallocated to. 593 594 Return value is nonzero if reload failed 595 and we must not do any more for this function. */ 596 597int 598reload (first, global, dumpfile) 599 rtx first; 600 int global; 601 FILE *dumpfile; 602{ 603 register int i; 604 register rtx insn; 605 register struct elim_table *ep; 606 607 /* The two pointers used to track the true location of the memory used 608 for label offsets. */ 609 char *real_known_ptr = NULL_PTR; 610 int (*real_at_ptr)[NUM_ELIMINABLE_REGS]; 611 612 /* Make sure even insns with volatile mem refs are recognizable. */ 613 init_recog (); 614 615 failure = 0; 616 617 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0); 618 619 /* Make sure that the last insn in the chain 620 is not something that needs reloading. */ 621 emit_note (NULL_PTR, NOTE_INSN_DELETED); 622 623 /* Enable find_equiv_reg to distinguish insns made by reload. */ 624 reload_first_uid = get_max_uid (); 625 626#ifdef SECONDARY_MEMORY_NEEDED 627 /* Initialize the secondary memory table. */ 628 clear_secondary_mem (); 629#endif 630 631 /* We don't have a stack slot for any spill reg yet. */ 632 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot); 633 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width); 634 635 /* Initialize the save area information for caller-save, in case some 636 are needed. */ 637 init_save_areas (); 638 639 /* Compute which hard registers are now in use 640 as homes for pseudo registers. 641 This is done here rather than (eg) in global_alloc 642 because this point is reached even if not optimizing. */ 643 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 644 mark_home_live (i); 645 646 /* A function that receives a nonlocal goto must save all call-saved 647 registers. */ 648 if (current_function_has_nonlocal_label) 649 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 650 { 651 if (! call_used_regs[i] && ! fixed_regs[i]) 652 regs_ever_live[i] = 1; 653 } 654 655 /* Find all the pseudo registers that didn't get hard regs 656 but do have known equivalent constants or memory slots. 657 These include parameters (known equivalent to parameter slots) 658 and cse'd or loop-moved constant memory addresses. 659 660 Record constant equivalents in reg_equiv_constant 661 so they will be substituted by find_reloads. 662 Record memory equivalents in reg_mem_equiv so they can 663 be substituted eventually by altering the REG-rtx's. */ 664 665 reg_equiv_constant = (rtx *) xmalloc (max_regno * sizeof (rtx)); 666 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx)); 667 reg_equiv_memory_loc = (rtx *) xmalloc (max_regno * sizeof (rtx)); 668 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx)); 669 reg_equiv_mem = (rtx *) xmalloc (max_regno * sizeof (rtx)); 670 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx)); 671 reg_equiv_init = (rtx *) xmalloc (max_regno * sizeof (rtx)); 672 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx)); 673 reg_equiv_address = (rtx *) xmalloc (max_regno * sizeof (rtx)); 674 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx)); 675 reg_max_ref_width = (int *) xmalloc (max_regno * sizeof (int)); 676 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int)); 677 reg_old_renumber = (short *) xmalloc (max_regno * sizeof (short)); 678 bcopy ((PTR) reg_renumber, (PTR) reg_old_renumber, max_regno * sizeof (short)); 679 pseudo_forbidden_regs 680 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET)); 681 pseudo_previous_regs 682 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET)); 683 684 CLEAR_HARD_REG_SET (bad_spill_regs_global); 685 bzero ((char *) pseudo_previous_regs, max_regno * sizeof (HARD_REG_SET)); 686 687 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to. 688 Also find all paradoxical subregs and find largest such for each pseudo. 689 On machines with small register classes, record hard registers that 690 are used for user variables. These can never be used for spills. 691 Also look for a "constant" NOTE_INSN_SETJMP. This means that all 692 caller-saved registers must be marked live. */ 693 694 num_eliminable_invariants = 0; 695 for (insn = first; insn; insn = NEXT_INSN (insn)) 696 { 697 rtx set = single_set (insn); 698 699 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn) 700 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP) 701 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 702 if (! call_used_regs[i]) 703 regs_ever_live[i] = 1; 704 705 if (set != 0 && GET_CODE (SET_DEST (set)) == REG) 706 { 707 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX); 708 if (note 709#ifdef LEGITIMATE_PIC_OPERAND_P 710 && (! function_invariant_p (XEXP (note, 0)) 711 || ! flag_pic 712 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0))) 713#endif 714 ) 715 { 716 rtx x = XEXP (note, 0); 717 i = REGNO (SET_DEST (set)); 718 if (i > LAST_VIRTUAL_REGISTER) 719 { 720 if (GET_CODE (x) == MEM) 721 { 722 /* If the operand is a PLUS, the MEM may be shared, 723 so make sure we have an unshared copy here. */ 724 if (GET_CODE (XEXP (x, 0)) == PLUS) 725 x = copy_rtx (x); 726 727 reg_equiv_memory_loc[i] = x; 728 } 729 else if (function_invariant_p (x)) 730 { 731 if (GET_CODE (x) == PLUS) 732 { 733 /* This is PLUS of frame pointer and a constant, 734 and might be shared. Unshare it. */ 735 reg_equiv_constant[i] = copy_rtx (x); 736 num_eliminable_invariants++; 737 } 738 else if (x == frame_pointer_rtx 739 || x == arg_pointer_rtx) 740 { 741 reg_equiv_constant[i] = x; 742 num_eliminable_invariants++; 743 } 744 else if (LEGITIMATE_CONSTANT_P (x)) 745 reg_equiv_constant[i] = x; 746 else 747 reg_equiv_memory_loc[i] 748 = force_const_mem (GET_MODE (SET_DEST (set)), x); 749 } 750 else 751 continue; 752 753 /* If this register is being made equivalent to a MEM 754 and the MEM is not SET_SRC, the equivalencing insn 755 is one with the MEM as a SET_DEST and it occurs later. 756 So don't mark this insn now. */ 757 if (GET_CODE (x) != MEM 758 || rtx_equal_p (SET_SRC (set), x)) 759 reg_equiv_init[i] 760 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]); 761 } 762 } 763 } 764 765 /* If this insn is setting a MEM from a register equivalent to it, 766 this is the equivalencing insn. */ 767 else if (set && GET_CODE (SET_DEST (set)) == MEM 768 && GET_CODE (SET_SRC (set)) == REG 769 && reg_equiv_memory_loc[REGNO (SET_SRC (set))] 770 && rtx_equal_p (SET_DEST (set), 771 reg_equiv_memory_loc[REGNO (SET_SRC (set))])) 772 reg_equiv_init[REGNO (SET_SRC (set))] 773 = gen_rtx_INSN_LIST (VOIDmode, insn, 774 reg_equiv_init[REGNO (SET_SRC (set))]); 775 776 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') 777 scan_paradoxical_subregs (PATTERN (insn)); 778 } 779 780 init_elim_table (); 781 782 num_labels = max_label_num () - get_first_label_num (); 783 784 /* Allocate the tables used to store offset information at labels. */ 785 /* We used to use alloca here, but the size of what it would try to 786 allocate would occasionally cause it to exceed the stack limit and 787 cause a core dump. */ 788 real_known_ptr = xmalloc (num_labels); 789 real_at_ptr 790 = (int (*)[NUM_ELIMINABLE_REGS]) 791 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int)); 792 793 offsets_known_at = real_known_ptr - get_first_label_num (); 794 offsets_at 795 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ()); 796 797 /* Alter each pseudo-reg rtx to contain its hard reg number. 798 Assign stack slots to the pseudos that lack hard regs or equivalents. 799 Do not touch virtual registers. */ 800 801 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++) 802 alter_reg (i, -1); 803 804 /* If we have some registers we think can be eliminated, scan all insns to 805 see if there is an insn that sets one of these registers to something 806 other than itself plus a constant. If so, the register cannot be 807 eliminated. Doing this scan here eliminates an extra pass through the 808 main reload loop in the most common case where register elimination 809 cannot be done. */ 810 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn)) 811 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN 812 || GET_CODE (insn) == CALL_INSN) 813 note_stores (PATTERN (insn), mark_not_eliminable); 814 815#ifndef REGISTER_CONSTRAINTS 816 /* If all the pseudo regs have hard regs, 817 except for those that are never referenced, 818 we know that no reloads are needed. */ 819 /* But that is not true if there are register constraints, since 820 in that case some pseudos might be in the wrong kind of hard reg. */ 821 822 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 823 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0) 824 break; 825 826 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed) 827 { 828 free (real_known_ptr); 829 free (real_at_ptr); 830 free (reg_equiv_constant); 831 free (reg_equiv_memory_loc); 832 free (reg_equiv_mem); 833 free (reg_equiv_init); 834 free (reg_equiv_address); 835 free (reg_max_ref_width); 836 free (reg_old_renumber); 837 free (pseudo_previous_regs); 838 free (pseudo_forbidden_regs); 839 return 0; 840 } 841#endif 842 843 maybe_fix_stack_asms (); 844 845 insns_need_reload = 0; 846 something_needs_elimination = 0; 847 848 /* Initialize to -1, which means take the first spill register. */ 849 last_spill_reg = -1; 850 851 spilled_pseudos = ALLOCA_REG_SET (); 852 853 /* Spill any hard regs that we know we can't eliminate. */ 854 CLEAR_HARD_REG_SET (used_spill_regs); 855 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 856 if (! ep->can_eliminate) 857 spill_hard_reg (ep->from, dumpfile, 1); 858 859#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM 860 if (frame_pointer_needed) 861 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, dumpfile, 1); 862#endif 863 finish_spills (global, dumpfile); 864 865 /* From now on, we may need to generate moves differently. We may also 866 allow modifications of insns which cause them to not be recognized. 867 Any such modifications will be cleaned up during reload itself. */ 868 reload_in_progress = 1; 869 870 /* This loop scans the entire function each go-round 871 and repeats until one repetition spills no additional hard regs. */ 872 for (;;) 873 { 874 int something_changed; 875 int did_spill; 876 struct insn_chain *chain; 877 878 HOST_WIDE_INT starting_frame_size; 879 880 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done 881 here because the stack size may be a part of the offset computation 882 for register elimination, and there might have been new stack slots 883 created in the last iteration of this loop. */ 884 assign_stack_local (BLKmode, 0, 0); 885 886 starting_frame_size = get_frame_size (); 887 888 set_initial_elim_offsets (); 889 set_initial_label_offsets (); 890 891 /* For each pseudo register that has an equivalent location defined, 892 try to eliminate any eliminable registers (such as the frame pointer) 893 assuming initial offsets for the replacement register, which 894 is the normal case. 895 896 If the resulting location is directly addressable, substitute 897 the MEM we just got directly for the old REG. 898 899 If it is not addressable but is a constant or the sum of a hard reg 900 and constant, it is probably not addressable because the constant is 901 out of range, in that case record the address; we will generate 902 hairy code to compute the address in a register each time it is 903 needed. Similarly if it is a hard register, but one that is not 904 valid as an address register. 905 906 If the location is not addressable, but does not have one of the 907 above forms, assign a stack slot. We have to do this to avoid the 908 potential of producing lots of reloads if, e.g., a location involves 909 a pseudo that didn't get a hard register and has an equivalent memory 910 location that also involves a pseudo that didn't get a hard register. 911 912 Perhaps at some point we will improve reload_when_needed handling 913 so this problem goes away. But that's very hairy. */ 914 915 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 916 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i]) 917 { 918 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX); 919 920 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]), 921 XEXP (x, 0))) 922 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0; 923 else if (CONSTANT_P (XEXP (x, 0)) 924 || (GET_CODE (XEXP (x, 0)) == REG 925 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER) 926 || (GET_CODE (XEXP (x, 0)) == PLUS 927 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG 928 && (REGNO (XEXP (XEXP (x, 0), 0)) 929 < FIRST_PSEUDO_REGISTER) 930 && CONSTANT_P (XEXP (XEXP (x, 0), 1)))) 931 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0; 932 else 933 { 934 /* Make a new stack slot. Then indicate that something 935 changed so we go back and recompute offsets for 936 eliminable registers because the allocation of memory 937 below might change some offset. reg_equiv_{mem,address} 938 will be set up for this pseudo on the next pass around 939 the loop. */ 940 reg_equiv_memory_loc[i] = 0; 941 reg_equiv_init[i] = 0; 942 alter_reg (i, -1); 943 } 944 } 945 946 if (caller_save_needed) 947 setup_save_areas (); 948 949 /* If we allocated another stack slot, redo elimination bookkeeping. */ 950 if (starting_frame_size != get_frame_size ()) 951 continue; 952 953 if (caller_save_needed) 954 { 955 save_call_clobbered_regs (); 956 /* That might have allocated new insn_chain structures. */ 957 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0); 958 } 959 960 calculate_needs_all_insns (global); 961 962 CLEAR_REG_SET (spilled_pseudos); 963 did_spill = 0; 964 965 something_changed = 0; 966 967 /* If we allocated any new memory locations, make another pass 968 since it might have changed elimination offsets. */ 969 if (starting_frame_size != get_frame_size ()) 970 something_changed = 1; 971 972 { 973 HARD_REG_SET to_spill; 974 CLEAR_HARD_REG_SET (to_spill); 975 update_eliminables (&to_spill); 976 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 977 if (TEST_HARD_REG_BIT (to_spill, i)) 978 { 979 spill_hard_reg (i, dumpfile, 1); 980 did_spill = 1; 981 982 /* Regardless of the state of spills, if we previously had 983 a register that we thought we could eliminate, but no can 984 not eliminate, we must run another pass. 985 986 Consider pseudos which have an entry in reg_equiv_* which 987 reference an eliminable register. We must make another pass 988 to update reg_equiv_* so that we do not substitute in the 989 old value from when we thought the elimination could be 990 performed. */ 991 something_changed = 1; 992 } 993 } 994 995 CLEAR_HARD_REG_SET (used_spill_regs); 996 /* Try to satisfy the needs for each insn. */ 997 for (chain = insns_need_reload; chain != 0; 998 chain = chain->next_need_reload) 999 find_reload_regs (chain, dumpfile); 1000 1001 if (failure) 1002 goto failed; 1003 1004 if (insns_need_reload != 0 || did_spill) 1005 something_changed |= finish_spills (global, dumpfile); 1006 1007 if (! something_changed) 1008 break; 1009 1010 if (caller_save_needed) 1011 delete_caller_save_insns (); 1012 } 1013 1014 /* If global-alloc was run, notify it of any register eliminations we have 1015 done. */ 1016 if (global) 1017 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 1018 if (ep->can_eliminate) 1019 mark_elimination (ep->from, ep->to); 1020 1021 /* If a pseudo has no hard reg, delete the insns that made the equivalence. 1022 If that insn didn't set the register (i.e., it copied the register to 1023 memory), just delete that insn instead of the equivalencing insn plus 1024 anything now dead. If we call delete_dead_insn on that insn, we may 1025 delete the insn that actually sets the register if the register dies 1026 there and that is incorrect. */ 1027 1028 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 1029 { 1030 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0) 1031 { 1032 rtx list; 1033 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1)) 1034 { 1035 rtx equiv_insn = XEXP (list, 0); 1036 if (GET_CODE (equiv_insn) == NOTE) 1037 continue; 1038 if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn))) 1039 delete_dead_insn (equiv_insn); 1040 else 1041 { 1042 PUT_CODE (equiv_insn, NOTE); 1043 NOTE_SOURCE_FILE (equiv_insn) = 0; 1044 NOTE_LINE_NUMBER (equiv_insn) = NOTE_INSN_DELETED; 1045 } 1046 } 1047 } 1048 } 1049 1050 /* Use the reload registers where necessary 1051 by generating move instructions to move the must-be-register 1052 values into or out of the reload registers. */ 1053 1054 if (insns_need_reload != 0 || something_needs_elimination 1055 || something_needs_operands_changed) 1056 { 1057 int old_frame_size = get_frame_size (); 1058 1059 reload_as_needed (global); 1060 1061 if (old_frame_size != get_frame_size ()) 1062 abort (); 1063 1064 if (num_eliminable) 1065 verify_initial_elim_offsets (); 1066 } 1067 1068 /* If we were able to eliminate the frame pointer, show that it is no 1069 longer live at the start of any basic block. If it ls live by 1070 virtue of being in a pseudo, that pseudo will be marked live 1071 and hence the frame pointer will be known to be live via that 1072 pseudo. */ 1073 1074 if (! frame_pointer_needed) 1075 for (i = 0; i < n_basic_blocks; i++) 1076 CLEAR_REGNO_REG_SET (BASIC_BLOCK (i)->global_live_at_start, 1077 HARD_FRAME_POINTER_REGNUM); 1078 1079 /* Come here (with failure set nonzero) if we can't get enough spill regs 1080 and we decide not to abort about it. */ 1081 failed: 1082 1083 reload_in_progress = 0; 1084 1085 /* Now eliminate all pseudo regs by modifying them into 1086 their equivalent memory references. 1087 The REG-rtx's for the pseudos are modified in place, 1088 so all insns that used to refer to them now refer to memory. 1089 1090 For a reg that has a reg_equiv_address, all those insns 1091 were changed by reloading so that no insns refer to it any longer; 1092 but the DECL_RTL of a variable decl may refer to it, 1093 and if so this causes the debugging info to mention the variable. */ 1094 1095 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 1096 { 1097 rtx addr = 0; 1098 int in_struct = 0; 1099 int is_scalar; 1100 int is_readonly = 0; 1101 1102 if (reg_equiv_memory_loc[i]) 1103 { 1104 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]); 1105 is_scalar = MEM_SCALAR_P (reg_equiv_memory_loc[i]); 1106 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]); 1107 } 1108 1109 if (reg_equiv_mem[i]) 1110 addr = XEXP (reg_equiv_mem[i], 0); 1111 1112 if (reg_equiv_address[i]) 1113 addr = reg_equiv_address[i]; 1114 1115 if (addr) 1116 { 1117 if (reg_renumber[i] < 0) 1118 { 1119 rtx reg = regno_reg_rtx[i]; 1120 XEXP (reg, 0) = addr; 1121 REG_USERVAR_P (reg) = 0; 1122 RTX_UNCHANGING_P (reg) = is_readonly; 1123 MEM_IN_STRUCT_P (reg) = in_struct; 1124 MEM_SCALAR_P (reg) = is_scalar; 1125 /* We have no alias information about this newly created 1126 MEM. */ 1127 MEM_ALIAS_SET (reg) = 0; 1128 PUT_CODE (reg, MEM); 1129 } 1130 else if (reg_equiv_mem[i]) 1131 XEXP (reg_equiv_mem[i], 0) = addr; 1132 } 1133 } 1134 1135 /* We must set reload_completed now since the cleanup_subreg_operands call 1136 below will re-recognize each insn and reload may have generated insns 1137 which are only valid during and after reload. */ 1138 reload_completed = 1; 1139 1140 /* Make a pass over all the insns and delete all USEs which we 1141 inserted only to tag a REG_EQUAL note on them. Remove all 1142 REG_DEAD and REG_UNUSED notes. Delete all CLOBBER insns and 1143 simplify (subreg (reg)) operands. Also remove all REG_RETVAL and 1144 REG_LIBCALL notes since they are no longer useful or accurate. 1145 Strip and regenerate REG_INC notes that may have been moved 1146 around. */ 1147 1148 for (insn = first; insn; insn = NEXT_INSN (insn)) 1149 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') 1150 { 1151 rtx *pnote; 1152 1153 if ((GET_CODE (PATTERN (insn)) == USE 1154 && find_reg_note (insn, REG_EQUAL, NULL_RTX)) 1155 || GET_CODE (PATTERN (insn)) == CLOBBER) 1156 { 1157 PUT_CODE (insn, NOTE); 1158 NOTE_SOURCE_FILE (insn) = 0; 1159 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 1160 continue; 1161 } 1162 1163 pnote = ®_NOTES (insn); 1164 while (*pnote != 0) 1165 { 1166 if (REG_NOTE_KIND (*pnote) == REG_DEAD 1167 || REG_NOTE_KIND (*pnote) == REG_UNUSED 1168 || REG_NOTE_KIND (*pnote) == REG_INC 1169 || REG_NOTE_KIND (*pnote) == REG_RETVAL 1170 || REG_NOTE_KIND (*pnote) == REG_LIBCALL) 1171 *pnote = XEXP (*pnote, 1); 1172 else 1173 pnote = &XEXP (*pnote, 1); 1174 } 1175 1176#ifdef AUTO_INC_DEC 1177 add_auto_inc_notes (insn, PATTERN (insn)); 1178#endif 1179 1180 /* And simplify (subreg (reg)) if it appears as an operand. */ 1181 cleanup_subreg_operands (insn); 1182 } 1183 1184 /* If we are doing stack checking, give a warning if this function's 1185 frame size is larger than we expect. */ 1186 if (flag_stack_check && ! STACK_CHECK_BUILTIN) 1187 { 1188 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE; 1189 static int verbose_warned = 0; 1190 1191 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1192 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i]) 1193 size += UNITS_PER_WORD; 1194 1195 if (size > STACK_CHECK_MAX_FRAME_SIZE) 1196 { 1197 warning ("frame size too large for reliable stack checking"); 1198 if (! verbose_warned) 1199 { 1200 warning ("try reducing the number of local variables"); 1201 verbose_warned = 1; 1202 } 1203 } 1204 } 1205 1206 /* Indicate that we no longer have known memory locations or constants. */ 1207 if (reg_equiv_constant) 1208 free (reg_equiv_constant); 1209 reg_equiv_constant = 0; 1210 if (reg_equiv_memory_loc) 1211 free (reg_equiv_memory_loc); 1212 reg_equiv_memory_loc = 0; 1213 1214 if (real_known_ptr) 1215 free (real_known_ptr); 1216 if (real_at_ptr) 1217 free (real_at_ptr); 1218 1219 free (reg_equiv_mem); 1220 free (reg_equiv_init); 1221 free (reg_equiv_address); 1222 free (reg_max_ref_width); 1223 free (reg_old_renumber); 1224 free (pseudo_previous_regs); 1225 free (pseudo_forbidden_regs); 1226 1227 FREE_REG_SET (spilled_pseudos); 1228 1229 CLEAR_HARD_REG_SET (used_spill_regs); 1230 for (i = 0; i < n_spills; i++) 1231 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]); 1232 1233 /* Free all the insn_chain structures at once. */ 1234 obstack_free (&reload_obstack, reload_startobj); 1235 unused_insn_chains = 0; 1236 1237 return failure; 1238} 1239 1240/* Yet another special case. Unfortunately, reg-stack forces people to 1241 write incorrect clobbers in asm statements. These clobbers must not 1242 cause the register to appear in bad_spill_regs, otherwise we'll call 1243 fatal_insn later. We clear the corresponding regnos in the live 1244 register sets to avoid this. 1245 The whole thing is rather sick, I'm afraid. */ 1246static void 1247maybe_fix_stack_asms () 1248{ 1249#ifdef STACK_REGS 1250 char *constraints[MAX_RECOG_OPERANDS]; 1251 enum machine_mode operand_mode[MAX_RECOG_OPERANDS]; 1252 struct insn_chain *chain; 1253 1254 for (chain = reload_insn_chain; chain != 0; chain = chain->next) 1255 { 1256 int i, noperands; 1257 HARD_REG_SET clobbered, allowed; 1258 rtx pat; 1259 1260 if (GET_RTX_CLASS (GET_CODE (chain->insn)) != 'i' 1261 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0) 1262 continue; 1263 pat = PATTERN (chain->insn); 1264 if (GET_CODE (pat) != PARALLEL) 1265 continue; 1266 1267 CLEAR_HARD_REG_SET (clobbered); 1268 CLEAR_HARD_REG_SET (allowed); 1269 1270 /* First, make a mask of all stack regs that are clobbered. */ 1271 for (i = 0; i < XVECLEN (pat, 0); i++) 1272 { 1273 rtx t = XVECEXP (pat, 0, i); 1274 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0))) 1275 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0))); 1276 } 1277 1278 /* Get the operand values and constraints out of the insn. */ 1279 decode_asm_operands (pat, recog_operand, recog_operand_loc, 1280 constraints, operand_mode); 1281 1282 /* For every operand, see what registers are allowed. */ 1283 for (i = 0; i < noperands; i++) 1284 { 1285 char *p = constraints[i]; 1286 /* For every alternative, we compute the class of registers allowed 1287 for reloading in CLS, and merge its contents into the reg set 1288 ALLOWED. */ 1289 int cls = (int) NO_REGS; 1290 1291 for (;;) 1292 { 1293 char c = *p++; 1294 1295 if (c == '\0' || c == ',' || c == '#') 1296 { 1297 /* End of one alternative - mark the regs in the current 1298 class, and reset the class. */ 1299 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]); 1300 cls = NO_REGS; 1301 if (c == '#') 1302 do { 1303 c = *p++; 1304 } while (c != '\0' && c != ','); 1305 if (c == '\0') 1306 break; 1307 continue; 1308 } 1309 1310 switch (c) 1311 { 1312 case '=': case '+': case '*': case '%': case '?': case '!': 1313 case '0': case '1': case '2': case '3': case '4': case 'm': 1314 case '<': case '>': case 'V': case 'o': case '&': case 'E': 1315 case 'F': case 's': case 'i': case 'n': case 'X': case 'I': 1316 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': 1317 case 'P': 1318#ifdef EXTRA_CONSTRAINT 1319 case 'Q': case 'R': case 'S': case 'T': case 'U': 1320#endif 1321 break; 1322 1323 case 'p': 1324 cls = (int) reg_class_subunion[cls][(int) BASE_REG_CLASS]; 1325 break; 1326 1327 case 'g': 1328 case 'r': 1329 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS]; 1330 break; 1331 1332 default: 1333 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)]; 1334 1335 } 1336 } 1337 } 1338 /* Those of the registers which are clobbered, but allowed by the 1339 constraints, must be usable as reload registers. So clear them 1340 out of the life information. */ 1341 AND_HARD_REG_SET (allowed, clobbered); 1342 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1343 if (TEST_HARD_REG_BIT (allowed, i)) 1344 { 1345 CLEAR_REGNO_REG_SET (chain->live_before, i); 1346 CLEAR_REGNO_REG_SET (chain->live_after, i); 1347 } 1348 } 1349 1350#endif 1351} 1352 1353 1354/* Walk the chain of insns, and determine for each whether it needs reloads 1355 and/or eliminations. Build the corresponding insns_need_reload list, and 1356 set something_needs_elimination as appropriate. */ 1357static void 1358calculate_needs_all_insns (global) 1359 int global; 1360{ 1361 struct insn_chain **pprev_reload = &insns_need_reload; 1362 struct insn_chain **pchain; 1363 1364 something_needs_elimination = 0; 1365 1366 for (pchain = &reload_insn_chain; *pchain != 0; pchain = &(*pchain)->next) 1367 { 1368 rtx insn; 1369 struct insn_chain *chain; 1370 1371 chain = *pchain; 1372 insn = chain->insn; 1373 1374 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might 1375 include REG_LABEL), we need to see what effects this has on the 1376 known offsets at labels. */ 1377 1378 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN 1379 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i' 1380 && REG_NOTES (insn) != 0)) 1381 set_label_offsets (insn, insn, 0); 1382 1383 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') 1384 { 1385 rtx old_body = PATTERN (insn); 1386 int old_code = INSN_CODE (insn); 1387 rtx old_notes = REG_NOTES (insn); 1388 int did_elimination = 0; 1389 int operands_changed = 0; 1390 rtx set = single_set (insn); 1391 1392 /* Skip insns that only set an equivalence. */ 1393 if (set && GET_CODE (SET_DEST (set)) == REG 1394 && reg_renumber[REGNO (SET_DEST (set))] < 0 1395 && reg_equiv_constant[REGNO (SET_DEST (set))]) 1396 { 1397 /* Must clear out the shortcuts, in case they were set last 1398 time through. */ 1399 chain->need_elim = 0; 1400 chain->need_reload = 0; 1401 chain->need_operand_change = 0; 1402 continue; 1403 } 1404 1405 /* If needed, eliminate any eliminable registers. */ 1406 if (num_eliminable || num_eliminable_invariants) 1407 did_elimination = eliminate_regs_in_insn (insn, 0); 1408 1409 /* Analyze the instruction. */ 1410 operands_changed = find_reloads (insn, 0, spill_indirect_levels, 1411 global, spill_reg_order); 1412 1413 /* If a no-op set needs more than one reload, this is likely 1414 to be something that needs input address reloads. We 1415 can't get rid of this cleanly later, and it is of no use 1416 anyway, so discard it now. 1417 We only do this when expensive_optimizations is enabled, 1418 since this complements reload inheritance / output 1419 reload deletion, and it can make debugging harder. */ 1420 if (flag_expensive_optimizations && n_reloads > 1) 1421 { 1422 rtx set = single_set (insn); 1423 if (set 1424 && SET_SRC (set) == SET_DEST (set) 1425 && GET_CODE (SET_SRC (set)) == REG 1426 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER) 1427 { 1428 PUT_CODE (insn, NOTE); 1429 NOTE_SOURCE_FILE (insn) = 0; 1430 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 1431 continue; 1432 } 1433 } 1434 if (num_eliminable) 1435 update_eliminable_offsets (); 1436 1437 /* Remember for later shortcuts which insns had any reloads or 1438 register eliminations. */ 1439 chain->need_elim = did_elimination; 1440 chain->need_reload = n_reloads > 0; 1441 chain->need_operand_change = operands_changed; 1442 1443 /* Discard any register replacements done. */ 1444 if (did_elimination) 1445 { 1446 obstack_free (&reload_obstack, reload_firstobj); 1447 PATTERN (insn) = old_body; 1448 INSN_CODE (insn) = old_code; 1449 REG_NOTES (insn) = old_notes; 1450 something_needs_elimination = 1; 1451 } 1452 1453 something_needs_operands_changed |= operands_changed; 1454 1455 if (n_reloads != 0) 1456 { 1457 *pprev_reload = chain; 1458 pprev_reload = &chain->next_need_reload; 1459 1460 calculate_needs (chain); 1461 } 1462 } 1463 } 1464 *pprev_reload = 0; 1465} 1466 1467/* Compute the most additional registers needed by one instruction, 1468 given by CHAIN. Collect information separately for each class of regs. 1469 1470 To compute the number of reload registers of each class needed for an 1471 insn, we must simulate what choose_reload_regs can do. We do this by 1472 splitting an insn into an "input" and an "output" part. RELOAD_OTHER 1473 reloads are used in both. The input part uses those reloads, 1474 RELOAD_FOR_INPUT reloads, which must be live over the entire input section 1475 of reloads, and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and 1476 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the inputs. 1477 1478 The registers needed for output are RELOAD_OTHER and RELOAD_FOR_OUTPUT, 1479 which are live for the entire output portion, and the maximum of all the 1480 RELOAD_FOR_OUTPUT_ADDRESS reloads for each operand. 1481 1482 The total number of registers needed is the maximum of the 1483 inputs and outputs. */ 1484 1485static void 1486calculate_needs (chain) 1487 struct insn_chain *chain; 1488{ 1489 int i; 1490 1491 /* Each `struct needs' corresponds to one RELOAD_... type. */ 1492 struct { 1493 struct needs other; 1494 struct needs input; 1495 struct needs output; 1496 struct needs insn; 1497 struct needs other_addr; 1498 struct needs op_addr; 1499 struct needs op_addr_reload; 1500 struct needs in_addr[MAX_RECOG_OPERANDS]; 1501 struct needs in_addr_addr[MAX_RECOG_OPERANDS]; 1502 struct needs out_addr[MAX_RECOG_OPERANDS]; 1503 struct needs out_addr_addr[MAX_RECOG_OPERANDS]; 1504 } insn_needs; 1505 1506 bzero ((char *) chain->group_size, sizeof chain->group_size); 1507 for (i = 0; i < N_REG_CLASSES; i++) 1508 chain->group_mode[i] = VOIDmode; 1509 bzero ((char *) &insn_needs, sizeof insn_needs); 1510 1511 /* Count each reload once in every class 1512 containing the reload's own class. */ 1513 1514 for (i = 0; i < n_reloads; i++) 1515 { 1516 register enum reg_class *p; 1517 enum reg_class class = reload_reg_class[i]; 1518 int size; 1519 enum machine_mode mode; 1520 struct needs *this_needs; 1521 1522 /* Don't count the dummy reloads, for which one of the 1523 regs mentioned in the insn can be used for reloading. 1524 Don't count optional reloads. 1525 Don't count reloads that got combined with others. */ 1526 if (reload_reg_rtx[i] != 0 1527 || reload_optional[i] != 0 1528 || (reload_out[i] == 0 && reload_in[i] == 0 1529 && ! reload_secondary_p[i])) 1530 continue; 1531 1532 mode = reload_inmode[i]; 1533 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode)) 1534 mode = reload_outmode[i]; 1535 size = CLASS_MAX_NREGS (class, mode); 1536 1537 /* Decide which time-of-use to count this reload for. */ 1538 switch (reload_when_needed[i]) 1539 { 1540 case RELOAD_OTHER: 1541 this_needs = &insn_needs.other; 1542 break; 1543 case RELOAD_FOR_INPUT: 1544 this_needs = &insn_needs.input; 1545 break; 1546 case RELOAD_FOR_OUTPUT: 1547 this_needs = &insn_needs.output; 1548 break; 1549 case RELOAD_FOR_INSN: 1550 this_needs = &insn_needs.insn; 1551 break; 1552 case RELOAD_FOR_OTHER_ADDRESS: 1553 this_needs = &insn_needs.other_addr; 1554 break; 1555 case RELOAD_FOR_INPUT_ADDRESS: 1556 this_needs = &insn_needs.in_addr[reload_opnum[i]]; 1557 break; 1558 case RELOAD_FOR_INPADDR_ADDRESS: 1559 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]]; 1560 break; 1561 case RELOAD_FOR_OUTPUT_ADDRESS: 1562 this_needs = &insn_needs.out_addr[reload_opnum[i]]; 1563 break; 1564 case RELOAD_FOR_OUTADDR_ADDRESS: 1565 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]]; 1566 break; 1567 case RELOAD_FOR_OPERAND_ADDRESS: 1568 this_needs = &insn_needs.op_addr; 1569 break; 1570 case RELOAD_FOR_OPADDR_ADDR: 1571 this_needs = &insn_needs.op_addr_reload; 1572 break; 1573 default: 1574 abort(); 1575 } 1576 1577 if (size > 1) 1578 { 1579 enum machine_mode other_mode, allocate_mode; 1580 1581 /* Count number of groups needed separately from 1582 number of individual regs needed. */ 1583 this_needs->groups[(int) class]++; 1584 p = reg_class_superclasses[(int) class]; 1585 while (*p != LIM_REG_CLASSES) 1586 this_needs->groups[(int) *p++]++; 1587 1588 /* Record size and mode of a group of this class. */ 1589 /* If more than one size group is needed, 1590 make all groups the largest needed size. */ 1591 if (chain->group_size[(int) class] < size) 1592 { 1593 other_mode = chain->group_mode[(int) class]; 1594 allocate_mode = mode; 1595 1596 chain->group_size[(int) class] = size; 1597 chain->group_mode[(int) class] = mode; 1598 } 1599 else 1600 { 1601 other_mode = mode; 1602 allocate_mode = chain->group_mode[(int) class]; 1603 } 1604 1605 /* Crash if two dissimilar machine modes both need 1606 groups of consecutive regs of the same class. */ 1607 1608 if (other_mode != VOIDmode && other_mode != allocate_mode 1609 && ! modes_equiv_for_class_p (allocate_mode, 1610 other_mode, class)) 1611 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class", 1612 chain->insn); 1613 } 1614 else if (size == 1) 1615 { 1616 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) class] += 1; 1617 p = reg_class_superclasses[(int) class]; 1618 while (*p != LIM_REG_CLASSES) 1619 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) *p++] += 1; 1620 } 1621 else 1622 abort (); 1623 } 1624 1625 /* All reloads have been counted for this insn; 1626 now merge the various times of use. 1627 This sets insn_needs, etc., to the maximum total number 1628 of registers needed at any point in this insn. */ 1629 1630 for (i = 0; i < N_REG_CLASSES; i++) 1631 { 1632 int j, in_max, out_max; 1633 1634 /* Compute normal and nongroup needs. */ 1635 for (j = 0; j <= 1; j++) 1636 { 1637 int k; 1638 for (in_max = 0, out_max = 0, k = 0; k < reload_n_operands; k++) 1639 { 1640 in_max = MAX (in_max, 1641 (insn_needs.in_addr[k].regs[j][i] 1642 + insn_needs.in_addr_addr[k].regs[j][i])); 1643 out_max = MAX (out_max, insn_needs.out_addr[k].regs[j][i]); 1644 out_max = MAX (out_max, 1645 insn_needs.out_addr_addr[k].regs[j][i]); 1646 } 1647 1648 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs, 1649 and operand addresses but not things used to reload 1650 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads 1651 don't conflict with things needed to reload inputs or 1652 outputs. */ 1653 1654 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i], 1655 insn_needs.op_addr_reload.regs[j][i]), 1656 in_max); 1657 1658 out_max = MAX (out_max, insn_needs.insn.regs[j][i]); 1659 1660 insn_needs.input.regs[j][i] 1661 = MAX (insn_needs.input.regs[j][i] 1662 + insn_needs.op_addr.regs[j][i] 1663 + insn_needs.insn.regs[j][i], 1664 in_max + insn_needs.input.regs[j][i]); 1665 1666 insn_needs.output.regs[j][i] += out_max; 1667 insn_needs.other.regs[j][i] 1668 += MAX (MAX (insn_needs.input.regs[j][i], 1669 insn_needs.output.regs[j][i]), 1670 insn_needs.other_addr.regs[j][i]); 1671 1672 } 1673 1674 /* Now compute group needs. */ 1675 for (in_max = 0, out_max = 0, j = 0; j < reload_n_operands; j++) 1676 { 1677 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]); 1678 in_max = MAX (in_max, insn_needs.in_addr_addr[j].groups[i]); 1679 out_max = MAX (out_max, insn_needs.out_addr[j].groups[i]); 1680 out_max = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]); 1681 } 1682 1683 in_max = MAX (MAX (insn_needs.op_addr.groups[i], 1684 insn_needs.op_addr_reload.groups[i]), 1685 in_max); 1686 out_max = MAX (out_max, insn_needs.insn.groups[i]); 1687 1688 insn_needs.input.groups[i] 1689 = MAX (insn_needs.input.groups[i] 1690 + insn_needs.op_addr.groups[i] 1691 + insn_needs.insn.groups[i], 1692 in_max + insn_needs.input.groups[i]); 1693 1694 insn_needs.output.groups[i] += out_max; 1695 insn_needs.other.groups[i] 1696 += MAX (MAX (insn_needs.input.groups[i], 1697 insn_needs.output.groups[i]), 1698 insn_needs.other_addr.groups[i]); 1699 } 1700 1701 /* Record the needs for later. */ 1702 chain->need = insn_needs.other; 1703} 1704 1705/* Find a group of exactly 2 registers. 1706 1707 First try to fill out the group by spilling a single register which 1708 would allow completion of the group. 1709 1710 Then try to create a new group from a pair of registers, neither of 1711 which are explicitly used. 1712 1713 Then try to create a group from any pair of registers. */ 1714 1715static void 1716find_tworeg_group (chain, class, dumpfile) 1717 struct insn_chain *chain; 1718 int class; 1719 FILE *dumpfile; 1720{ 1721 int i; 1722 /* First, look for a register that will complete a group. */ 1723 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1724 { 1725 int j, other; 1726 1727 j = potential_reload_regs[i]; 1728 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j) 1729 && ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0) 1730 && TEST_HARD_REG_BIT (reg_class_contents[class], j) 1731 && TEST_HARD_REG_BIT (reg_class_contents[class], other) 1732 && HARD_REGNO_MODE_OK (other, chain->group_mode[class]) 1733 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other) 1734 /* We don't want one part of another group. 1735 We could get "two groups" that overlap! */ 1736 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other)) 1737 || (j < FIRST_PSEUDO_REGISTER - 1 1738 && (other = j + 1, spill_reg_order[other] >= 0) 1739 && TEST_HARD_REG_BIT (reg_class_contents[class], j) 1740 && TEST_HARD_REG_BIT (reg_class_contents[class], other) 1741 && HARD_REGNO_MODE_OK (j, chain->group_mode[class]) 1742 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other) 1743 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other)))) 1744 { 1745 register enum reg_class *p; 1746 1747 /* We have found one that will complete a group, 1748 so count off one group as provided. */ 1749 chain->need.groups[class]--; 1750 p = reg_class_superclasses[class]; 1751 while (*p != LIM_REG_CLASSES) 1752 { 1753 if (chain->group_size [(int) *p] <= chain->group_size [class]) 1754 chain->need.groups[(int) *p]--; 1755 p++; 1756 } 1757 1758 /* Indicate both these regs are part of a group. */ 1759 SET_HARD_REG_BIT (chain->counted_for_groups, j); 1760 SET_HARD_REG_BIT (chain->counted_for_groups, other); 1761 break; 1762 } 1763 } 1764 /* We can't complete a group, so start one. */ 1765 if (i == FIRST_PSEUDO_REGISTER) 1766 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1767 { 1768 int j, k; 1769 j = potential_reload_regs[i]; 1770 /* Verify that J+1 is a potential reload reg. */ 1771 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++) 1772 if (potential_reload_regs[k] == j + 1) 1773 break; 1774 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER 1775 && k < FIRST_PSEUDO_REGISTER 1776 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0 1777 && TEST_HARD_REG_BIT (reg_class_contents[class], j) 1778 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1) 1779 && HARD_REGNO_MODE_OK (j, chain->group_mode[class]) 1780 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, j + 1) 1781 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)) 1782 break; 1783 } 1784 1785 /* I should be the index in potential_reload_regs 1786 of the new reload reg we have found. */ 1787 1788 new_spill_reg (chain, i, class, 0, dumpfile); 1789} 1790 1791/* Find a group of more than 2 registers. 1792 Look for a sufficient sequence of unspilled registers, and spill them all 1793 at once. */ 1794 1795static void 1796find_group (chain, class, dumpfile) 1797 struct insn_chain *chain; 1798 int class; 1799 FILE *dumpfile; 1800{ 1801 int i; 1802 1803 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1804 { 1805 int j = potential_reload_regs[i]; 1806 1807 if (j >= 0 1808 && j + chain->group_size[class] <= FIRST_PSEUDO_REGISTER 1809 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])) 1810 { 1811 int k; 1812 /* Check each reg in the sequence. */ 1813 for (k = 0; k < chain->group_size[class]; k++) 1814 if (! (spill_reg_order[j + k] < 0 1815 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k) 1816 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k))) 1817 break; 1818 /* We got a full sequence, so spill them all. */ 1819 if (k == chain->group_size[class]) 1820 { 1821 register enum reg_class *p; 1822 for (k = 0; k < chain->group_size[class]; k++) 1823 { 1824 int idx; 1825 SET_HARD_REG_BIT (chain->counted_for_groups, j + k); 1826 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++) 1827 if (potential_reload_regs[idx] == j + k) 1828 break; 1829 new_spill_reg (chain, idx, class, 0, dumpfile); 1830 } 1831 1832 /* We have found one that will complete a group, 1833 so count off one group as provided. */ 1834 chain->need.groups[class]--; 1835 p = reg_class_superclasses[class]; 1836 while (*p != LIM_REG_CLASSES) 1837 { 1838 if (chain->group_size [(int) *p] 1839 <= chain->group_size [class]) 1840 chain->need.groups[(int) *p]--; 1841 p++; 1842 } 1843 return; 1844 } 1845 } 1846 } 1847 /* There are no groups left. */ 1848 spill_failure (chain->insn); 1849 failure = 1; 1850} 1851 1852/* If pseudo REG conflicts with one of our reload registers, mark it as 1853 spilled. */ 1854static void 1855maybe_mark_pseudo_spilled (reg) 1856 int reg; 1857{ 1858 int i; 1859 int r = reg_renumber[reg]; 1860 int nregs; 1861 1862 if (r < 0) 1863 abort (); 1864 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg)); 1865 for (i = 0; i < n_spills; i++) 1866 if (r <= spill_regs[i] && r + nregs > spill_regs[i]) 1867 { 1868 SET_REGNO_REG_SET (spilled_pseudos, reg); 1869 return; 1870 } 1871} 1872 1873/* Find more reload regs to satisfy the remaining need of an insn, which 1874 is given by CHAIN. 1875 Do it by ascending class number, since otherwise a reg 1876 might be spilled for a big class and might fail to count 1877 for a smaller class even though it belongs to that class. 1878 1879 Count spilled regs in `spills', and add entries to 1880 `spill_regs' and `spill_reg_order'. 1881 1882 ??? Note there is a problem here. 1883 When there is a need for a group in a high-numbered class, 1884 and also need for non-group regs that come from a lower class, 1885 the non-group regs are chosen first. If there aren't many regs, 1886 they might leave no room for a group. 1887 1888 This was happening on the 386. To fix it, we added the code 1889 that calls possible_group_p, so that the lower class won't 1890 break up the last possible group. 1891 1892 Really fixing the problem would require changes above 1893 in counting the regs already spilled, and in choose_reload_regs. 1894 It might be hard to avoid introducing bugs there. */ 1895 1896static void 1897find_reload_regs (chain, dumpfile) 1898 struct insn_chain *chain; 1899 FILE *dumpfile; 1900{ 1901 int i, class; 1902 short *group_needs = chain->need.groups; 1903 short *simple_needs = chain->need.regs[0]; 1904 short *nongroup_needs = chain->need.regs[1]; 1905 1906 if (dumpfile) 1907 fprintf (dumpfile, "Spilling for insn %d.\n", INSN_UID (chain->insn)); 1908 1909 /* Compute the order of preference for hard registers to spill. 1910 Store them by decreasing preference in potential_reload_regs. */ 1911 1912 order_regs_for_reload (chain); 1913 1914 /* So far, no hard regs have been spilled. */ 1915 n_spills = 0; 1916 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1917 spill_reg_order[i] = -1; 1918 1919 CLEAR_HARD_REG_SET (chain->used_spill_regs); 1920 CLEAR_HARD_REG_SET (chain->counted_for_groups); 1921 CLEAR_HARD_REG_SET (chain->counted_for_nongroups); 1922 1923 for (class = 0; class < N_REG_CLASSES; class++) 1924 { 1925 /* First get the groups of registers. 1926 If we got single registers first, we might fragment 1927 possible groups. */ 1928 while (group_needs[class] > 0) 1929 { 1930 /* If any single spilled regs happen to form groups, 1931 count them now. Maybe we don't really need 1932 to spill another group. */ 1933 count_possible_groups (chain, class); 1934 1935 if (group_needs[class] <= 0) 1936 break; 1937 1938 /* Groups of size 2, the only groups used on most machines, 1939 are treated specially. */ 1940 if (chain->group_size[class] == 2) 1941 find_tworeg_group (chain, class, dumpfile); 1942 else 1943 find_group (chain, class, dumpfile); 1944 if (failure) 1945 return; 1946 } 1947 1948 /* Now similarly satisfy all need for single registers. */ 1949 1950 while (simple_needs[class] > 0 || nongroup_needs[class] > 0) 1951 { 1952 /* If we spilled enough regs, but they weren't counted 1953 against the non-group need, see if we can count them now. 1954 If so, we can avoid some actual spilling. */ 1955 if (simple_needs[class] <= 0 && nongroup_needs[class] > 0) 1956 for (i = 0; i < n_spills; i++) 1957 { 1958 int regno = spill_regs[i]; 1959 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno) 1960 && !TEST_HARD_REG_BIT (chain->counted_for_groups, regno) 1961 && !TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno) 1962 && nongroup_needs[class] > 0) 1963 { 1964 register enum reg_class *p; 1965 1966 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno); 1967 nongroup_needs[class]--; 1968 p = reg_class_superclasses[class]; 1969 while (*p != LIM_REG_CLASSES) 1970 nongroup_needs[(int) *p++]--; 1971 } 1972 } 1973 1974 if (simple_needs[class] <= 0 && nongroup_needs[class] <= 0) 1975 break; 1976 1977 /* Consider the potential reload regs that aren't 1978 yet in use as reload regs, in order of preference. 1979 Find the most preferred one that's in this class. */ 1980 1981 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1982 { 1983 int regno = potential_reload_regs[i]; 1984 if (regno >= 0 1985 && TEST_HARD_REG_BIT (reg_class_contents[class], regno) 1986 /* If this reg will not be available for groups, 1987 pick one that does not foreclose possible groups. 1988 This is a kludge, and not very general, 1989 but it should be sufficient to make the 386 work, 1990 and the problem should not occur on machines with 1991 more registers. */ 1992 && (nongroup_needs[class] == 0 1993 || possible_group_p (chain, regno))) 1994 break; 1995 } 1996 1997 /* If we couldn't get a register, try to get one even if we 1998 might foreclose possible groups. This may cause problems 1999 later, but that's better than aborting now, since it is 2000 possible that we will, in fact, be able to form the needed 2001 group even with this allocation. */ 2002 2003 if (i >= FIRST_PSEUDO_REGISTER 2004 && asm_noperands (chain->insn) < 0) 2005 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 2006 if (potential_reload_regs[i] >= 0 2007 && TEST_HARD_REG_BIT (reg_class_contents[class], 2008 potential_reload_regs[i])) 2009 break; 2010 2011 /* I should be the index in potential_reload_regs 2012 of the new reload reg we have found. */ 2013 2014 new_spill_reg (chain, i, class, 1, dumpfile); 2015 if (failure) 2016 return; 2017 } 2018 } 2019 2020 /* We know which hard regs to use, now mark the pseudos that live in them 2021 as needing to be kicked out. */ 2022 EXECUTE_IF_SET_IN_REG_SET 2023 (chain->live_before, FIRST_PSEUDO_REGISTER, i, 2024 { 2025 maybe_mark_pseudo_spilled (i); 2026 }); 2027 EXECUTE_IF_SET_IN_REG_SET 2028 (chain->live_after, FIRST_PSEUDO_REGISTER, i, 2029 { 2030 maybe_mark_pseudo_spilled (i); 2031 }); 2032 2033 IOR_HARD_REG_SET (used_spill_regs, chain->used_spill_regs); 2034} 2035 2036void 2037dump_needs (chain, dumpfile) 2038 struct insn_chain *chain; 2039 FILE *dumpfile; 2040{ 2041 static char *reg_class_names[] = REG_CLASS_NAMES; 2042 int i; 2043 struct needs *n = &chain->need; 2044 2045 for (i = 0; i < N_REG_CLASSES; i++) 2046 { 2047 if (n->regs[i][0] > 0) 2048 fprintf (dumpfile, 2049 ";; Need %d reg%s of class %s.\n", 2050 n->regs[i][0], n->regs[i][0] == 1 ? "" : "s", 2051 reg_class_names[i]); 2052 if (n->regs[i][1] > 0) 2053 fprintf (dumpfile, 2054 ";; Need %d nongroup reg%s of class %s.\n", 2055 n->regs[i][1], n->regs[i][1] == 1 ? "" : "s", 2056 reg_class_names[i]); 2057 if (n->groups[i] > 0) 2058 fprintf (dumpfile, 2059 ";; Need %d group%s (%smode) of class %s.\n", 2060 n->groups[i], n->groups[i] == 1 ? "" : "s", 2061 mode_name[(int) chain->group_mode[i]], 2062 reg_class_names[i]); 2063 } 2064} 2065 2066/* Delete all insns that were inserted by emit_caller_save_insns during 2067 this iteration. */ 2068static void 2069delete_caller_save_insns () 2070{ 2071 struct insn_chain *c = reload_insn_chain; 2072 2073 while (c != 0) 2074 { 2075 while (c != 0 && c->is_caller_save_insn) 2076 { 2077 struct insn_chain *next = c->next; 2078 rtx insn = c->insn; 2079 2080 if (insn == BLOCK_HEAD (c->block)) 2081 BLOCK_HEAD (c->block) = NEXT_INSN (insn); 2082 if (insn == BLOCK_END (c->block)) 2083 BLOCK_END (c->block) = PREV_INSN (insn); 2084 if (c == reload_insn_chain) 2085 reload_insn_chain = next; 2086 2087 if (NEXT_INSN (insn) != 0) 2088 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn); 2089 if (PREV_INSN (insn) != 0) 2090 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn); 2091 2092 if (next) 2093 next->prev = c->prev; 2094 if (c->prev) 2095 c->prev->next = next; 2096 c->next = unused_insn_chains; 2097 unused_insn_chains = c; 2098 c = next; 2099 } 2100 if (c != 0) 2101 c = c->next; 2102 } 2103} 2104 2105/* Nonzero if, after spilling reg REGNO for non-groups, 2106 it will still be possible to find a group if we still need one. */ 2107 2108static int 2109possible_group_p (chain, regno) 2110 struct insn_chain *chain; 2111 int regno; 2112{ 2113 int i; 2114 int class = (int) NO_REGS; 2115 2116 for (i = 0; i < (int) N_REG_CLASSES; i++) 2117 if (chain->need.groups[i] > 0) 2118 { 2119 class = i; 2120 break; 2121 } 2122 2123 if (class == (int) NO_REGS) 2124 return 1; 2125 2126 /* Consider each pair of consecutive registers. */ 2127 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++) 2128 { 2129 /* Ignore pairs that include reg REGNO. */ 2130 if (i == regno || i + 1 == regno) 2131 continue; 2132 2133 /* Ignore pairs that are outside the class that needs the group. 2134 ??? Here we fail to handle the case where two different classes 2135 independently need groups. But this never happens with our 2136 current machine descriptions. */ 2137 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i) 2138 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1))) 2139 continue; 2140 2141 /* A pair of consecutive regs we can still spill does the trick. */ 2142 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0 2143 && ! TEST_HARD_REG_BIT (bad_spill_regs, i) 2144 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)) 2145 return 1; 2146 2147 /* A pair of one already spilled and one we can spill does it 2148 provided the one already spilled is not otherwise reserved. */ 2149 if (spill_reg_order[i] < 0 2150 && ! TEST_HARD_REG_BIT (bad_spill_regs, i) 2151 && spill_reg_order[i + 1] >= 0 2152 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i + 1) 2153 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i + 1)) 2154 return 1; 2155 if (spill_reg_order[i + 1] < 0 2156 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1) 2157 && spill_reg_order[i] >= 0 2158 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i) 2159 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i)) 2160 return 1; 2161 } 2162 2163 return 0; 2164} 2165 2166/* Count any groups of CLASS that can be formed from the registers recently 2167 spilled. */ 2168 2169static void 2170count_possible_groups (chain, class) 2171 struct insn_chain *chain; 2172 int class; 2173{ 2174 HARD_REG_SET new; 2175 int i, j; 2176 2177 /* Now find all consecutive groups of spilled registers 2178 and mark each group off against the need for such groups. 2179 But don't count them against ordinary need, yet. */ 2180 2181 if (chain->group_size[class] == 0) 2182 return; 2183 2184 CLEAR_HARD_REG_SET (new); 2185 2186 /* Make a mask of all the regs that are spill regs in class I. */ 2187 for (i = 0; i < n_spills; i++) 2188 { 2189 int regno = spill_regs[i]; 2190 2191 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno) 2192 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, regno) 2193 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno)) 2194 SET_HARD_REG_BIT (new, regno); 2195 } 2196 2197 /* Find each consecutive group of them. */ 2198 for (i = 0; i < FIRST_PSEUDO_REGISTER && chain->need.groups[class] > 0; i++) 2199 if (TEST_HARD_REG_BIT (new, i) 2200 && i + chain->group_size[class] <= FIRST_PSEUDO_REGISTER 2201 && HARD_REGNO_MODE_OK (i, chain->group_mode[class])) 2202 { 2203 for (j = 1; j < chain->group_size[class]; j++) 2204 if (! TEST_HARD_REG_BIT (new, i + j)) 2205 break; 2206 2207 if (j == chain->group_size[class]) 2208 { 2209 /* We found a group. Mark it off against this class's need for 2210 groups, and against each superclass too. */ 2211 register enum reg_class *p; 2212 2213 chain->need.groups[class]--; 2214 p = reg_class_superclasses[class]; 2215 while (*p != LIM_REG_CLASSES) 2216 { 2217 if (chain->group_size [(int) *p] <= chain->group_size [class]) 2218 chain->need.groups[(int) *p]--; 2219 p++; 2220 } 2221 2222 /* Don't count these registers again. */ 2223 for (j = 0; j < chain->group_size[class]; j++) 2224 SET_HARD_REG_BIT (chain->counted_for_groups, i + j); 2225 } 2226 2227 /* Skip to the last reg in this group. When i is incremented above, 2228 it will then point to the first reg of the next possible group. */ 2229 i += j - 1; 2230 } 2231} 2232 2233/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is 2234 another mode that needs to be reloaded for the same register class CLASS. 2235 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail. 2236 ALLOCATE_MODE will never be smaller than OTHER_MODE. 2237 2238 This code used to also fail if any reg in CLASS allows OTHER_MODE but not 2239 ALLOCATE_MODE. This test is unnecessary, because we will never try to put 2240 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this 2241 causes unnecessary failures on machines requiring alignment of register 2242 groups when the two modes are different sizes, because the larger mode has 2243 more strict alignment rules than the smaller mode. */ 2244 2245static int 2246modes_equiv_for_class_p (allocate_mode, other_mode, class) 2247 enum machine_mode allocate_mode, other_mode; 2248 enum reg_class class; 2249{ 2250 register int regno; 2251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 2252 { 2253 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno) 2254 && HARD_REGNO_MODE_OK (regno, allocate_mode) 2255 && ! HARD_REGNO_MODE_OK (regno, other_mode)) 2256 return 0; 2257 } 2258 return 1; 2259} 2260 2261/* Handle the failure to find a register to spill. 2262 INSN should be one of the insns which needed this particular spill reg. */ 2263 2264static void 2265spill_failure (insn) 2266 rtx insn; 2267{ 2268 if (asm_noperands (PATTERN (insn)) >= 0) 2269 error_for_asm (insn, "`asm' needs too many reloads"); 2270 else 2271 fatal_insn ("Unable to find a register to spill.", insn); 2272} 2273 2274/* Add a new register to the tables of available spill-registers. 2275 CHAIN is the insn for which the register will be used; we decrease the 2276 needs of that insn. 2277 I is the index of this register in potential_reload_regs. 2278 CLASS is the regclass whose need is being satisfied. 2279 NONGROUP is 0 if this register is part of a group. 2280 DUMPFILE is the same as the one that `reload' got. */ 2281 2282static void 2283new_spill_reg (chain, i, class, nongroup, dumpfile) 2284 struct insn_chain *chain; 2285 int i; 2286 int class; 2287 int nongroup; 2288 FILE *dumpfile; 2289{ 2290 register enum reg_class *p; 2291 int regno = potential_reload_regs[i]; 2292 2293 if (i >= FIRST_PSEUDO_REGISTER) 2294 { 2295 spill_failure (chain->insn); 2296 failure = 1; 2297 return; 2298 } 2299 2300 if (TEST_HARD_REG_BIT (bad_spill_regs, regno)) 2301 { 2302 static char *reg_class_names[] = REG_CLASS_NAMES; 2303 2304 if (asm_noperands (PATTERN (chain->insn)) < 0) 2305 { 2306 /* The error message is still correct - we know only that it wasn't 2307 an asm statement that caused the problem, but one of the global 2308 registers declared by the users might have screwed us. */ 2309 error ("fixed or forbidden register %d (%s) was spilled for class %s.", 2310 regno, reg_names[regno], reg_class_names[class]); 2311 error ("This may be due to a compiler bug or to impossible asm"); 2312 error ("statements or clauses."); 2313 fatal_insn ("This is the instruction:", chain->insn); 2314 } 2315 error_for_asm (chain->insn, "Invalid `asm' statement:"); 2316 error_for_asm (chain->insn, 2317 "fixed or forbidden register %d (%s) was spilled for class %s.", 2318 regno, reg_names[regno], reg_class_names[class]); 2319 failure = 1; 2320 return; 2321 } 2322 2323 /* Make reg REGNO an additional reload reg. */ 2324 2325 potential_reload_regs[i] = -1; 2326 spill_regs[n_spills] = regno; 2327 spill_reg_order[regno] = n_spills; 2328 if (dumpfile) 2329 fprintf (dumpfile, "Spilling reg %d.\n", regno); 2330 SET_HARD_REG_BIT (chain->used_spill_regs, regno); 2331 2332 /* Clear off the needs we just satisfied. */ 2333 2334 chain->need.regs[0][class]--; 2335 p = reg_class_superclasses[class]; 2336 while (*p != LIM_REG_CLASSES) 2337 chain->need.regs[0][(int) *p++]--; 2338 2339 if (nongroup && chain->need.regs[1][class] > 0) 2340 { 2341 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno); 2342 chain->need.regs[1][class]--; 2343 p = reg_class_superclasses[class]; 2344 while (*p != LIM_REG_CLASSES) 2345 chain->need.regs[1][(int) *p++]--; 2346 } 2347 2348 n_spills++; 2349} 2350 2351/* Delete an unneeded INSN and any previous insns who sole purpose is loading 2352 data that is dead in INSN. */ 2353 2354static void 2355delete_dead_insn (insn) 2356 rtx insn; 2357{ 2358 rtx prev = prev_real_insn (insn); 2359 rtx prev_dest; 2360 2361 /* If the previous insn sets a register that dies in our insn, delete it 2362 too. */ 2363 if (prev && GET_CODE (PATTERN (prev)) == SET 2364 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG) 2365 && reg_mentioned_p (prev_dest, PATTERN (insn)) 2366 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)) 2367 && ! side_effects_p (SET_SRC (PATTERN (prev)))) 2368 delete_dead_insn (prev); 2369 2370 PUT_CODE (insn, NOTE); 2371 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 2372 NOTE_SOURCE_FILE (insn) = 0; 2373} 2374 2375/* Modify the home of pseudo-reg I. 2376 The new home is present in reg_renumber[I]. 2377 2378 FROM_REG may be the hard reg that the pseudo-reg is being spilled from; 2379 or it may be -1, meaning there is none or it is not relevant. 2380 This is used so that all pseudos spilled from a given hard reg 2381 can share one stack slot. */ 2382 2383static void 2384alter_reg (i, from_reg) 2385 register int i; 2386 int from_reg; 2387{ 2388 /* When outputting an inline function, this can happen 2389 for a reg that isn't actually used. */ 2390 if (regno_reg_rtx[i] == 0) 2391 return; 2392 2393 /* If the reg got changed to a MEM at rtl-generation time, 2394 ignore it. */ 2395 if (GET_CODE (regno_reg_rtx[i]) != REG) 2396 return; 2397 2398 /* Modify the reg-rtx to contain the new hard reg 2399 number or else to contain its pseudo reg number. */ 2400 REGNO (regno_reg_rtx[i]) 2401 = reg_renumber[i] >= 0 ? reg_renumber[i] : i; 2402 2403 /* If we have a pseudo that is needed but has no hard reg or equivalent, 2404 allocate a stack slot for it. */ 2405 2406 if (reg_renumber[i] < 0 2407 && REG_N_REFS (i) > 0 2408 && reg_equiv_constant[i] == 0 2409 && reg_equiv_memory_loc[i] == 0) 2410 { 2411 register rtx x; 2412 int inherent_size = PSEUDO_REGNO_BYTES (i); 2413 int total_size = MAX (inherent_size, reg_max_ref_width[i]); 2414 int adjust = 0; 2415 2416 /* Each pseudo reg has an inherent size which comes from its own mode, 2417 and a total size which provides room for paradoxical subregs 2418 which refer to the pseudo reg in wider modes. 2419 2420 We can use a slot already allocated if it provides both 2421 enough inherent space and enough total space. 2422 Otherwise, we allocate a new slot, making sure that it has no less 2423 inherent space, and no less total space, then the previous slot. */ 2424 if (from_reg == -1) 2425 { 2426 /* No known place to spill from => no slot to reuse. */ 2427 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, 2428 inherent_size == total_size ? 0 : -1); 2429 if (BYTES_BIG_ENDIAN) 2430 /* Cancel the big-endian correction done in assign_stack_local. 2431 Get the address of the beginning of the slot. 2432 This is so we can do a big-endian correction unconditionally 2433 below. */ 2434 adjust = inherent_size - total_size; 2435 2436 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]); 2437 } 2438 /* Reuse a stack slot if possible. */ 2439 else if (spill_stack_slot[from_reg] != 0 2440 && spill_stack_slot_width[from_reg] >= total_size 2441 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg])) 2442 >= inherent_size)) 2443 x = spill_stack_slot[from_reg]; 2444 /* Allocate a bigger slot. */ 2445 else 2446 { 2447 /* Compute maximum size needed, both for inherent size 2448 and for total size. */ 2449 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]); 2450 rtx stack_slot; 2451 if (spill_stack_slot[from_reg]) 2452 { 2453 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg])) 2454 > inherent_size) 2455 mode = GET_MODE (spill_stack_slot[from_reg]); 2456 if (spill_stack_slot_width[from_reg] > total_size) 2457 total_size = spill_stack_slot_width[from_reg]; 2458 } 2459 /* Make a slot with that size. */ 2460 x = assign_stack_local (mode, total_size, 2461 inherent_size == total_size ? 0 : -1); 2462 stack_slot = x; 2463 if (BYTES_BIG_ENDIAN) 2464 { 2465 /* Cancel the big-endian correction done in assign_stack_local. 2466 Get the address of the beginning of the slot. 2467 This is so we can do a big-endian correction unconditionally 2468 below. */ 2469 adjust = GET_MODE_SIZE (mode) - total_size; 2470 if (adjust) 2471 stack_slot = gen_rtx_MEM (mode_for_size (total_size 2472 * BITS_PER_UNIT, 2473 MODE_INT, 1), 2474 plus_constant (XEXP (x, 0), adjust)); 2475 } 2476 spill_stack_slot[from_reg] = stack_slot; 2477 spill_stack_slot_width[from_reg] = total_size; 2478 } 2479 2480 /* On a big endian machine, the "address" of the slot 2481 is the address of the low part that fits its inherent mode. */ 2482 if (BYTES_BIG_ENDIAN && inherent_size < total_size) 2483 adjust += (total_size - inherent_size); 2484 2485 /* If we have any adjustment to make, or if the stack slot is the 2486 wrong mode, make a new stack slot. */ 2487 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i])) 2488 { 2489 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]), 2490 plus_constant (XEXP (x, 0), adjust)); 2491 2492 /* If this was shared among registers, must ensure we never 2493 set it readonly since that can cause scheduling 2494 problems. Note we would only have in this adjustment 2495 case in any event, since the code above doesn't set it. */ 2496 2497 if (from_reg == -1) 2498 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]); 2499 } 2500 2501 /* Save the stack slot for later. */ 2502 reg_equiv_memory_loc[i] = x; 2503 } 2504} 2505 2506/* Mark the slots in regs_ever_live for the hard regs 2507 used by pseudo-reg number REGNO. */ 2508 2509void 2510mark_home_live (regno) 2511 int regno; 2512{ 2513 register int i, lim; 2514 i = reg_renumber[regno]; 2515 if (i < 0) 2516 return; 2517 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno)); 2518 while (i < lim) 2519 regs_ever_live[i++] = 1; 2520} 2521 2522/* This function handles the tracking of elimination offsets around branches. 2523 2524 X is a piece of RTL being scanned. 2525 2526 INSN is the insn that it came from, if any. 2527 2528 INITIAL_P is non-zero if we are to set the offset to be the initial 2529 offset and zero if we are setting the offset of the label to be the 2530 current offset. */ 2531 2532static void 2533set_label_offsets (x, insn, initial_p) 2534 rtx x; 2535 rtx insn; 2536 int initial_p; 2537{ 2538 enum rtx_code code = GET_CODE (x); 2539 rtx tem; 2540 unsigned int i; 2541 struct elim_table *p; 2542 2543 switch (code) 2544 { 2545 case LABEL_REF: 2546 if (LABEL_REF_NONLOCAL_P (x)) 2547 return; 2548 2549 x = XEXP (x, 0); 2550 2551 /* ... fall through ... */ 2552 2553 case CODE_LABEL: 2554 /* If we know nothing about this label, set the desired offsets. Note 2555 that this sets the offset at a label to be the offset before a label 2556 if we don't know anything about the label. This is not correct for 2557 the label after a BARRIER, but is the best guess we can make. If 2558 we guessed wrong, we will suppress an elimination that might have 2559 been possible had we been able to guess correctly. */ 2560 2561 if (! offsets_known_at[CODE_LABEL_NUMBER (x)]) 2562 { 2563 for (i = 0; i < NUM_ELIMINABLE_REGS; i++) 2564 offsets_at[CODE_LABEL_NUMBER (x)][i] 2565 = (initial_p ? reg_eliminate[i].initial_offset 2566 : reg_eliminate[i].offset); 2567 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1; 2568 } 2569 2570 /* Otherwise, if this is the definition of a label and it is 2571 preceded by a BARRIER, set our offsets to the known offset of 2572 that label. */ 2573 2574 else if (x == insn 2575 && (tem = prev_nonnote_insn (insn)) != 0 2576 && GET_CODE (tem) == BARRIER) 2577 set_offsets_for_label (insn); 2578 else 2579 /* If neither of the above cases is true, compare each offset 2580 with those previously recorded and suppress any eliminations 2581 where the offsets disagree. */ 2582 2583 for (i = 0; i < NUM_ELIMINABLE_REGS; i++) 2584 if (offsets_at[CODE_LABEL_NUMBER (x)][i] 2585 != (initial_p ? reg_eliminate[i].initial_offset 2586 : reg_eliminate[i].offset)) 2587 reg_eliminate[i].can_eliminate = 0; 2588 2589 return; 2590 2591 case JUMP_INSN: 2592 set_label_offsets (PATTERN (insn), insn, initial_p); 2593 2594 /* ... fall through ... */ 2595 2596 case INSN: 2597 case CALL_INSN: 2598 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly 2599 and hence must have all eliminations at their initial offsets. */ 2600 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1)) 2601 if (REG_NOTE_KIND (tem) == REG_LABEL) 2602 set_label_offsets (XEXP (tem, 0), insn, 1); 2603 return; 2604 2605 case ADDR_VEC: 2606 case ADDR_DIFF_VEC: 2607 /* Each of the labels in the address vector must be at their initial 2608 offsets. We want the first field for ADDR_VEC and the second 2609 field for ADDR_DIFF_VEC. */ 2610 2611 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++) 2612 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i), 2613 insn, initial_p); 2614 return; 2615 2616 case SET: 2617 /* We only care about setting PC. If the source is not RETURN, 2618 IF_THEN_ELSE, or a label, disable any eliminations not at 2619 their initial offsets. Similarly if any arm of the IF_THEN_ELSE 2620 isn't one of those possibilities. For branches to a label, 2621 call ourselves recursively. 2622 2623 Note that this can disable elimination unnecessarily when we have 2624 a non-local goto since it will look like a non-constant jump to 2625 someplace in the current function. This isn't a significant 2626 problem since such jumps will normally be when all elimination 2627 pairs are back to their initial offsets. */ 2628 2629 if (SET_DEST (x) != pc_rtx) 2630 return; 2631 2632 switch (GET_CODE (SET_SRC (x))) 2633 { 2634 case PC: 2635 case RETURN: 2636 return; 2637 2638 case LABEL_REF: 2639 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p); 2640 return; 2641 2642 case IF_THEN_ELSE: 2643 tem = XEXP (SET_SRC (x), 1); 2644 if (GET_CODE (tem) == LABEL_REF) 2645 set_label_offsets (XEXP (tem, 0), insn, initial_p); 2646 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN) 2647 break; 2648 2649 tem = XEXP (SET_SRC (x), 2); 2650 if (GET_CODE (tem) == LABEL_REF) 2651 set_label_offsets (XEXP (tem, 0), insn, initial_p); 2652 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN) 2653 break; 2654 return; 2655 2656 default: 2657 break; 2658 } 2659 2660 /* If we reach here, all eliminations must be at their initial 2661 offset because we are doing a jump to a variable address. */ 2662 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++) 2663 if (p->offset != p->initial_offset) 2664 p->can_eliminate = 0; 2665 break; 2666 2667 default: 2668 break; 2669 } 2670} 2671 2672/* Used for communication between the next two function to properly share 2673 the vector for an ASM_OPERANDS. */ 2674 2675static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec; 2676 2677/* Scan X and replace any eliminable registers (such as fp) with a 2678 replacement (such as sp), plus an offset. 2679 2680 MEM_MODE is the mode of an enclosing MEM. We need this to know how 2681 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a 2682 MEM, we are allowed to replace a sum of a register and the constant zero 2683 with the register, which we cannot do outside a MEM. In addition, we need 2684 to record the fact that a register is referenced outside a MEM. 2685 2686 If INSN is an insn, it is the insn containing X. If we replace a REG 2687 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a 2688 CLOBBER of the pseudo after INSN so find_equiv_regs will know that 2689 the REG is being modified. 2690 2691 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST). 2692 That's used when we eliminate in expressions stored in notes. 2693 This means, do not set ref_outside_mem even if the reference 2694 is outside of MEMs. 2695 2696 If we see a modification to a register we know about, take the 2697 appropriate action (see case SET, below). 2698 2699 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had 2700 replacements done assuming all offsets are at their initial values. If 2701 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we 2702 encounter, return the actual location so that find_reloads will do 2703 the proper thing. */ 2704 2705rtx 2706eliminate_regs (x, mem_mode, insn) 2707 rtx x; 2708 enum machine_mode mem_mode; 2709 rtx insn; 2710{ 2711 enum rtx_code code = GET_CODE (x); 2712 struct elim_table *ep; 2713 int regno; 2714 rtx new; 2715 int i, j; 2716 char *fmt; 2717 int copied = 0; 2718 2719 if (! current_function_decl) 2720 return x; 2721 2722 switch (code) 2723 { 2724 case CONST_INT: 2725 case CONST_DOUBLE: 2726 case CONST: 2727 case SYMBOL_REF: 2728 case CODE_LABEL: 2729 case PC: 2730 case CC0: 2731 case ASM_INPUT: 2732 case ADDR_VEC: 2733 case ADDR_DIFF_VEC: 2734 case RETURN: 2735 return x; 2736 2737 case ADDRESSOF: 2738 /* This is only for the benefit of the debugging backends, which call 2739 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are 2740 removed after CSE. */ 2741 new = eliminate_regs (XEXP (x, 0), 0, insn); 2742 if (GET_CODE (new) == MEM) 2743 return XEXP (new, 0); 2744 return x; 2745 2746 case REG: 2747 regno = REGNO (x); 2748 2749 /* First handle the case where we encounter a bare register that 2750 is eliminable. Replace it with a PLUS. */ 2751 if (regno < FIRST_PSEUDO_REGISTER) 2752 { 2753 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; 2754 ep++) 2755 if (ep->from_rtx == x && ep->can_eliminate) 2756 { 2757 if (! mem_mode 2758 /* Refs inside notes don't count for this purpose. */ 2759 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST 2760 || GET_CODE (insn) == INSN_LIST))) 2761 ep->ref_outside_mem = 1; 2762 return plus_constant (ep->to_rtx, ep->previous_offset); 2763 } 2764 2765 } 2766 else if (reg_renumber[regno] < 0 && reg_equiv_constant 2767 && reg_equiv_constant[regno] 2768 && ! CONSTANT_P (reg_equiv_constant[regno])) 2769 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]), 2770 mem_mode, insn); 2771 return x; 2772 2773 case PLUS: 2774 /* If this is the sum of an eliminable register and a constant, rework 2775 the sum. */ 2776 if (GET_CODE (XEXP (x, 0)) == REG 2777 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER 2778 && CONSTANT_P (XEXP (x, 1))) 2779 { 2780 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; 2781 ep++) 2782 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate) 2783 { 2784 if (! mem_mode 2785 /* Refs inside notes don't count for this purpose. */ 2786 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST 2787 || GET_CODE (insn) == INSN_LIST))) 2788 ep->ref_outside_mem = 1; 2789 2790 /* The only time we want to replace a PLUS with a REG (this 2791 occurs when the constant operand of the PLUS is the negative 2792 of the offset) is when we are inside a MEM. We won't want 2793 to do so at other times because that would change the 2794 structure of the insn in a way that reload can't handle. 2795 We special-case the commonest situation in 2796 eliminate_regs_in_insn, so just replace a PLUS with a 2797 PLUS here, unless inside a MEM. */ 2798 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT 2799 && INTVAL (XEXP (x, 1)) == - ep->previous_offset) 2800 return ep->to_rtx; 2801 else 2802 return gen_rtx_PLUS (Pmode, ep->to_rtx, 2803 plus_constant (XEXP (x, 1), 2804 ep->previous_offset)); 2805 } 2806 2807 /* If the register is not eliminable, we are done since the other 2808 operand is a constant. */ 2809 return x; 2810 } 2811 2812 /* If this is part of an address, we want to bring any constant to the 2813 outermost PLUS. We will do this by doing register replacement in 2814 our operands and seeing if a constant shows up in one of them. 2815 2816 We assume here this is part of an address (or a "load address" insn) 2817 since an eliminable register is not likely to appear in any other 2818 context. 2819 2820 If we have (plus (eliminable) (reg)), we want to produce 2821 (plus (plus (replacement) (reg) (const))). If this was part of a 2822 normal add insn, (plus (replacement) (reg)) will be pushed as a 2823 reload. This is the desired action. */ 2824 2825 { 2826 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn); 2827 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn); 2828 2829 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) 2830 { 2831 /* If one side is a PLUS and the other side is a pseudo that 2832 didn't get a hard register but has a reg_equiv_constant, 2833 we must replace the constant here since it may no longer 2834 be in the position of any operand. */ 2835 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG 2836 && REGNO (new1) >= FIRST_PSEUDO_REGISTER 2837 && reg_renumber[REGNO (new1)] < 0 2838 && reg_equiv_constant != 0 2839 && reg_equiv_constant[REGNO (new1)] != 0) 2840 new1 = reg_equiv_constant[REGNO (new1)]; 2841 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG 2842 && REGNO (new0) >= FIRST_PSEUDO_REGISTER 2843 && reg_renumber[REGNO (new0)] < 0 2844 && reg_equiv_constant[REGNO (new0)] != 0) 2845 new0 = reg_equiv_constant[REGNO (new0)]; 2846 2847 new = form_sum (new0, new1); 2848 2849 /* As above, if we are not inside a MEM we do not want to 2850 turn a PLUS into something else. We might try to do so here 2851 for an addition of 0 if we aren't optimizing. */ 2852 if (! mem_mode && GET_CODE (new) != PLUS) 2853 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx); 2854 else 2855 return new; 2856 } 2857 } 2858 return x; 2859 2860 case MULT: 2861 /* If this is the product of an eliminable register and a 2862 constant, apply the distribute law and move the constant out 2863 so that we have (plus (mult ..) ..). This is needed in order 2864 to keep load-address insns valid. This case is pathological. 2865 We ignore the possibility of overflow here. */ 2866 if (GET_CODE (XEXP (x, 0)) == REG 2867 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER 2868 && GET_CODE (XEXP (x, 1)) == CONST_INT) 2869 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; 2870 ep++) 2871 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate) 2872 { 2873 if (! mem_mode 2874 /* Refs inside notes don't count for this purpose. */ 2875 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST 2876 || GET_CODE (insn) == INSN_LIST))) 2877 ep->ref_outside_mem = 1; 2878 2879 return 2880 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)), 2881 ep->previous_offset * INTVAL (XEXP (x, 1))); 2882 } 2883 2884 /* ... fall through ... */ 2885 2886 case CALL: 2887 case COMPARE: 2888 case MINUS: 2889 case DIV: case UDIV: 2890 case MOD: case UMOD: 2891 case AND: case IOR: case XOR: 2892 case ROTATERT: case ROTATE: 2893 case ASHIFTRT: case LSHIFTRT: case ASHIFT: 2894 case NE: case EQ: 2895 case GE: case GT: case GEU: case GTU: 2896 case LE: case LT: case LEU: case LTU: 2897 { 2898 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn); 2899 rtx new1 2900 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0; 2901 2902 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) 2903 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1); 2904 } 2905 return x; 2906 2907 case EXPR_LIST: 2908 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */ 2909 if (XEXP (x, 0)) 2910 { 2911 new = eliminate_regs (XEXP (x, 0), mem_mode, insn); 2912 if (new != XEXP (x, 0)) 2913 { 2914 /* If this is a REG_DEAD note, it is not valid anymore. 2915 Using the eliminated version could result in creating a 2916 REG_DEAD note for the stack or frame pointer. */ 2917 if (GET_MODE (x) == REG_DEAD) 2918 return (XEXP (x, 1) 2919 ? eliminate_regs (XEXP (x, 1), mem_mode, insn) 2920 : NULL_RTX); 2921 2922 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1)); 2923 } 2924 } 2925 2926 /* ... fall through ... */ 2927 2928 case INSN_LIST: 2929 /* Now do eliminations in the rest of the chain. If this was 2930 an EXPR_LIST, this might result in allocating more memory than is 2931 strictly needed, but it simplifies the code. */ 2932 if (XEXP (x, 1)) 2933 { 2934 new = eliminate_regs (XEXP (x, 1), mem_mode, insn); 2935 if (new != XEXP (x, 1)) 2936 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new); 2937 } 2938 return x; 2939 2940 case PRE_INC: 2941 case POST_INC: 2942 case PRE_DEC: 2943 case POST_DEC: 2944 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 2945 if (ep->to_rtx == XEXP (x, 0)) 2946 { 2947 int size = GET_MODE_SIZE (mem_mode); 2948 2949 /* If more bytes than MEM_MODE are pushed, account for them. */ 2950#ifdef PUSH_ROUNDING 2951 if (ep->to_rtx == stack_pointer_rtx) 2952 size = PUSH_ROUNDING (size); 2953#endif 2954 if (code == PRE_DEC || code == POST_DEC) 2955 ep->offset += size; 2956 else 2957 ep->offset -= size; 2958 } 2959 2960 /* Fall through to generic unary operation case. */ 2961 case STRICT_LOW_PART: 2962 case NEG: case NOT: 2963 case SIGN_EXTEND: case ZERO_EXTEND: 2964 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: 2965 case FLOAT: case FIX: 2966 case UNSIGNED_FIX: case UNSIGNED_FLOAT: 2967 case ABS: 2968 case SQRT: 2969 case FFS: 2970 new = eliminate_regs (XEXP (x, 0), mem_mode, insn); 2971 if (new != XEXP (x, 0)) 2972 return gen_rtx_fmt_e (code, GET_MODE (x), new); 2973 return x; 2974 2975 case SUBREG: 2976 /* Similar to above processing, but preserve SUBREG_WORD. 2977 Convert (subreg (mem)) to (mem) if not paradoxical. 2978 Also, if we have a non-paradoxical (subreg (pseudo)) and the 2979 pseudo didn't get a hard reg, we must replace this with the 2980 eliminated version of the memory location because push_reloads 2981 may do the replacement in certain circumstances. */ 2982 if (GET_CODE (SUBREG_REG (x)) == REG 2983 && (GET_MODE_SIZE (GET_MODE (x)) 2984 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) 2985 && reg_equiv_memory_loc != 0 2986 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0) 2987 { 2988#if 0 2989 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))], 2990 mem_mode, insn); 2991 2992 /* If we didn't change anything, we must retain the pseudo. */ 2993 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))]) 2994 new = SUBREG_REG (x); 2995 else 2996 { 2997 /* In this case, we must show that the pseudo is used in this 2998 insn so that delete_output_reload will do the right thing. */ 2999 if (insn != 0 && GET_CODE (insn) != EXPR_LIST 3000 && GET_CODE (insn) != INSN_LIST) 3001 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode, 3002 SUBREG_REG (x)), 3003 insn)) 3004 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX); 3005 3006 /* Ensure NEW isn't shared in case we have to reload it. */ 3007 new = copy_rtx (new); 3008 } 3009#else 3010 new = SUBREG_REG (x); 3011#endif 3012 } 3013 else 3014 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn); 3015 3016 if (new != XEXP (x, 0)) 3017 { 3018 int x_size = GET_MODE_SIZE (GET_MODE (x)); 3019 int new_size = GET_MODE_SIZE (GET_MODE (new)); 3020 3021 if (GET_CODE (new) == MEM 3022 && ((x_size < new_size 3023#ifdef WORD_REGISTER_OPERATIONS 3024 /* On these machines, combine can create rtl of the form 3025 (set (subreg:m1 (reg:m2 R) 0) ...) 3026 where m1 < m2, and expects something interesting to 3027 happen to the entire word. Moreover, it will use the 3028 (reg:m2 R) later, expecting all bits to be preserved. 3029 So if the number of words is the same, preserve the 3030 subreg so that push_reloads can see it. */ 3031 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD) 3032#endif 3033 ) 3034 || (x_size == new_size)) 3035 ) 3036 { 3037 int offset = SUBREG_WORD (x) * UNITS_PER_WORD; 3038 enum machine_mode mode = GET_MODE (x); 3039 3040 if (BYTES_BIG_ENDIAN) 3041 offset += (MIN (UNITS_PER_WORD, 3042 GET_MODE_SIZE (GET_MODE (new))) 3043 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))); 3044 3045 PUT_MODE (new, mode); 3046 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset); 3047 return new; 3048 } 3049 else 3050 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x)); 3051 } 3052 3053 return x; 3054 3055 case USE: 3056 /* If using a register that is the source of an eliminate we still 3057 think can be performed, note it cannot be performed since we don't 3058 know how this register is used. */ 3059 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3060 if (ep->from_rtx == XEXP (x, 0)) 3061 ep->can_eliminate = 0; 3062 3063 new = eliminate_regs (XEXP (x, 0), mem_mode, insn); 3064 if (new != XEXP (x, 0)) 3065 return gen_rtx_fmt_e (code, GET_MODE (x), new); 3066 return x; 3067 3068 case CLOBBER: 3069 /* If clobbering a register that is the replacement register for an 3070 elimination we still think can be performed, note that it cannot 3071 be performed. Otherwise, we need not be concerned about it. */ 3072 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3073 if (ep->to_rtx == XEXP (x, 0)) 3074 ep->can_eliminate = 0; 3075 3076 new = eliminate_regs (XEXP (x, 0), mem_mode, insn); 3077 if (new != XEXP (x, 0)) 3078 return gen_rtx_fmt_e (code, GET_MODE (x), new); 3079 return x; 3080 3081 case ASM_OPERANDS: 3082 { 3083 rtx *temp_vec; 3084 /* Properly handle sharing input and constraint vectors. */ 3085 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec) 3086 { 3087 /* When we come to a new vector not seen before, 3088 scan all its elements; keep the old vector if none 3089 of them changes; otherwise, make a copy. */ 3090 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x); 3091 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx)); 3092 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) 3093 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i), 3094 mem_mode, insn); 3095 3096 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) 3097 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i)) 3098 break; 3099 3100 if (i == ASM_OPERANDS_INPUT_LENGTH (x)) 3101 new_asm_operands_vec = old_asm_operands_vec; 3102 else 3103 new_asm_operands_vec 3104 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec); 3105 } 3106 3107 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */ 3108 if (new_asm_operands_vec == old_asm_operands_vec) 3109 return x; 3110 3111 new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x), 3112 ASM_OPERANDS_OUTPUT_CONSTRAINT (x), 3113 ASM_OPERANDS_OUTPUT_IDX (x), 3114 new_asm_operands_vec, 3115 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x), 3116 ASM_OPERANDS_SOURCE_FILE (x), 3117 ASM_OPERANDS_SOURCE_LINE (x)); 3118 new->volatil = x->volatil; 3119 return new; 3120 } 3121 3122 case SET: 3123 /* Check for setting a register that we know about. */ 3124 if (GET_CODE (SET_DEST (x)) == REG) 3125 { 3126 /* See if this is setting the replacement register for an 3127 elimination. 3128 3129 If DEST is the hard frame pointer, we do nothing because we 3130 assume that all assignments to the frame pointer are for 3131 non-local gotos and are being done at a time when they are valid 3132 and do not disturb anything else. Some machines want to 3133 eliminate a fake argument pointer (or even a fake frame pointer) 3134 with either the real frame or the stack pointer. Assignments to 3135 the hard frame pointer must not prevent this elimination. */ 3136 3137 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; 3138 ep++) 3139 if (ep->to_rtx == SET_DEST (x) 3140 && SET_DEST (x) != hard_frame_pointer_rtx) 3141 { 3142 /* If it is being incremented, adjust the offset. Otherwise, 3143 this elimination can't be done. */ 3144 rtx src = SET_SRC (x); 3145 3146 if (GET_CODE (src) == PLUS 3147 && XEXP (src, 0) == SET_DEST (x) 3148 && GET_CODE (XEXP (src, 1)) == CONST_INT) 3149 ep->offset -= INTVAL (XEXP (src, 1)); 3150 else 3151 ep->can_eliminate = 0; 3152 } 3153 3154 /* Now check to see we are assigning to a register that can be 3155 eliminated. If so, it must be as part of a PARALLEL, since we 3156 will not have been called if this is a single SET. So indicate 3157 that we can no longer eliminate this reg. */ 3158 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; 3159 ep++) 3160 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate) 3161 ep->can_eliminate = 0; 3162 } 3163 3164 /* Now avoid the loop below in this common case. */ 3165 { 3166 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn); 3167 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn); 3168 3169 /* If SET_DEST changed from a REG to a MEM and INSN is an insn, 3170 write a CLOBBER insn. */ 3171 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM 3172 && insn != 0 && GET_CODE (insn) != EXPR_LIST 3173 && GET_CODE (insn) != INSN_LIST) 3174 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn); 3175 3176 if (new0 != SET_DEST (x) || new1 != SET_SRC (x)) 3177 return gen_rtx_SET (VOIDmode, new0, new1); 3178 } 3179 3180 return x; 3181 3182 case MEM: 3183 /* This is only for the benefit of the debugging backends, which call 3184 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are 3185 removed after CSE. */ 3186 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF) 3187 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn); 3188 3189 /* Our only special processing is to pass the mode of the MEM to our 3190 recursive call and copy the flags. While we are here, handle this 3191 case more efficiently. */ 3192 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn); 3193 if (new != XEXP (x, 0)) 3194 { 3195 new = gen_rtx_MEM (GET_MODE (x), new); 3196 new->volatil = x->volatil; 3197 new->unchanging = x->unchanging; 3198 new->in_struct = x->in_struct; 3199 return new; 3200 } 3201 else 3202 return x; 3203 3204 default: 3205 break; 3206 } 3207 3208 /* Process each of our operands recursively. If any have changed, make a 3209 copy of the rtx. */ 3210 fmt = GET_RTX_FORMAT (code); 3211 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) 3212 { 3213 if (*fmt == 'e') 3214 { 3215 new = eliminate_regs (XEXP (x, i), mem_mode, insn); 3216 if (new != XEXP (x, i) && ! copied) 3217 { 3218 rtx new_x = rtx_alloc (code); 3219 bcopy ((char *) x, (char *) new_x, 3220 (sizeof (*new_x) - sizeof (new_x->fld) 3221 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code))); 3222 x = new_x; 3223 copied = 1; 3224 } 3225 XEXP (x, i) = new; 3226 } 3227 else if (*fmt == 'E') 3228 { 3229 int copied_vec = 0; 3230 for (j = 0; j < XVECLEN (x, i); j++) 3231 { 3232 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn); 3233 if (new != XVECEXP (x, i, j) && ! copied_vec) 3234 { 3235 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i), 3236 XVEC (x, i)->elem); 3237 if (! copied) 3238 { 3239 rtx new_x = rtx_alloc (code); 3240 bcopy ((char *) x, (char *) new_x, 3241 (sizeof (*new_x) - sizeof (new_x->fld) 3242 + (sizeof (new_x->fld[0]) 3243 * GET_RTX_LENGTH (code)))); 3244 x = new_x; 3245 copied = 1; 3246 } 3247 XVEC (x, i) = new_v; 3248 copied_vec = 1; 3249 } 3250 XVECEXP (x, i, j) = new; 3251 } 3252 } 3253 } 3254 3255 return x; 3256} 3257 3258/* Scan INSN and eliminate all eliminable registers in it. 3259 3260 If REPLACE is nonzero, do the replacement destructively. Also 3261 delete the insn as dead it if it is setting an eliminable register. 3262 3263 If REPLACE is zero, do all our allocations in reload_obstack. 3264 3265 If no eliminations were done and this insn doesn't require any elimination 3266 processing (these are not identical conditions: it might be updating sp, 3267 but not referencing fp; this needs to be seen during reload_as_needed so 3268 that the offset between fp and sp can be taken into consideration), zero 3269 is returned. Otherwise, 1 is returned. */ 3270 3271static int 3272eliminate_regs_in_insn (insn, replace) 3273 rtx insn; 3274 int replace; 3275{ 3276 rtx old_body = PATTERN (insn); 3277 rtx old_set = single_set (insn); 3278 rtx new_body; 3279 int val = 0; 3280 struct elim_table *ep; 3281 3282 if (! replace) 3283 push_obstacks (&reload_obstack, &reload_obstack); 3284 3285 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG 3286 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER) 3287 { 3288 /* Check for setting an eliminable register. */ 3289 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3290 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate) 3291 { 3292#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM 3293 /* If this is setting the frame pointer register to the 3294 hardware frame pointer register and this is an elimination 3295 that will be done (tested above), this insn is really 3296 adjusting the frame pointer downward to compensate for 3297 the adjustment done before a nonlocal goto. */ 3298 if (ep->from == FRAME_POINTER_REGNUM 3299 && ep->to == HARD_FRAME_POINTER_REGNUM) 3300 { 3301 rtx src = SET_SRC (old_set); 3302 int offset = 0, ok = 0; 3303 rtx prev_insn, prev_set; 3304 3305 if (src == ep->to_rtx) 3306 offset = 0, ok = 1; 3307 else if (GET_CODE (src) == PLUS 3308 && GET_CODE (XEXP (src, 0)) == CONST_INT 3309 && XEXP (src, 1) == ep->to_rtx) 3310 offset = INTVAL (XEXP (src, 0)), ok = 1; 3311 else if (GET_CODE (src) == PLUS 3312 && GET_CODE (XEXP (src, 1)) == CONST_INT 3313 && XEXP (src, 0) == ep->to_rtx) 3314 offset = INTVAL (XEXP (src, 1)), ok = 1; 3315 else if ((prev_insn = prev_nonnote_insn (insn)) != 0 3316 && (prev_set = single_set (prev_insn)) != 0 3317 && rtx_equal_p (SET_DEST (prev_set), src)) 3318 { 3319 src = SET_SRC (prev_set); 3320 if (src == ep->to_rtx) 3321 offset = 0, ok = 1; 3322 else if (GET_CODE (src) == PLUS 3323 && GET_CODE (XEXP (src, 0)) == CONST_INT 3324 && XEXP (src, 1) == ep->to_rtx) 3325 offset = INTVAL (XEXP (src, 0)), ok = 1; 3326 else if (GET_CODE (src) == PLUS 3327 && GET_CODE (XEXP (src, 1)) == CONST_INT 3328 && XEXP (src, 0) == ep->to_rtx) 3329 offset = INTVAL (XEXP (src, 1)), ok = 1; 3330 } 3331 3332 if (ok) 3333 { 3334 if (replace) 3335 { 3336 rtx src 3337 = plus_constant (ep->to_rtx, offset - ep->offset); 3338 3339 /* First see if this insn remains valid when we 3340 make the change. If not, keep the INSN_CODE 3341 the same and let reload fit it up. */ 3342 validate_change (insn, &SET_SRC (old_set), src, 1); 3343 validate_change (insn, &SET_DEST (old_set), 3344 ep->to_rtx, 1); 3345 if (! apply_change_group ()) 3346 { 3347 SET_SRC (old_set) = src; 3348 SET_DEST (old_set) = ep->to_rtx; 3349 } 3350 } 3351 3352 val = 1; 3353 goto done; 3354 } 3355 } 3356#endif 3357 3358 /* In this case this insn isn't serving a useful purpose. We 3359 will delete it in reload_as_needed once we know that this 3360 elimination is, in fact, being done. 3361 3362 If REPLACE isn't set, we can't delete this insn, but needn't 3363 process it since it won't be used unless something changes. */ 3364 if (replace) 3365 delete_dead_insn (insn); 3366 val = 1; 3367 goto done; 3368 } 3369 3370 /* Check for (set (reg) (plus (reg from) (offset))) where the offset 3371 in the insn is the negative of the offset in FROM. Substitute 3372 (set (reg) (reg to)) for the insn and change its code. 3373 3374 We have to do this here, rather than in eliminate_regs, so that we can 3375 change the insn code. */ 3376 3377 if (GET_CODE (SET_SRC (old_set)) == PLUS 3378 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG 3379 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT) 3380 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; 3381 ep++) 3382 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0) 3383 && ep->can_eliminate) 3384 { 3385 /* We must stop at the first elimination that will be used. 3386 If this one would replace the PLUS with a REG, do it 3387 now. Otherwise, quit the loop and let eliminate_regs 3388 do its normal replacement. */ 3389 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1))) 3390 { 3391 /* We assume here that we don't need a PARALLEL of 3392 any CLOBBERs for this assignment. There's not 3393 much we can do if we do need it. */ 3394 PATTERN (insn) = gen_rtx_SET (VOIDmode, 3395 SET_DEST (old_set), 3396 ep->to_rtx); 3397 INSN_CODE (insn) = -1; 3398 val = 1; 3399 goto done; 3400 } 3401 3402 break; 3403 } 3404 } 3405 3406 old_asm_operands_vec = 0; 3407 3408 /* Replace the body of this insn with a substituted form. If we changed 3409 something, return non-zero. 3410 3411 If we are replacing a body that was a (set X (plus Y Z)), try to 3412 re-recognize the insn. We do this in case we had a simple addition 3413 but now can do this as a load-address. This saves an insn in this 3414 common case. */ 3415 3416 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX); 3417 if (new_body != old_body) 3418 { 3419 /* If we aren't replacing things permanently and we changed something, 3420 make another copy to ensure that all the RTL is new. Otherwise 3421 things can go wrong if find_reload swaps commutative operands 3422 and one is inside RTL that has been copied while the other is not. */ 3423 3424 /* Don't copy an asm_operands because (1) there's no need and (2) 3425 copy_rtx can't do it properly when there are multiple outputs. */ 3426 if (! replace && asm_noperands (old_body) < 0) 3427 new_body = copy_rtx (new_body); 3428 3429 /* If we had a move insn but now we don't, rerecognize it. This will 3430 cause spurious re-recognition if the old move had a PARALLEL since 3431 the new one still will, but we can't call single_set without 3432 having put NEW_BODY into the insn and the re-recognition won't 3433 hurt in this rare case. */ 3434 if (old_set != 0 3435 && ((GET_CODE (SET_SRC (old_set)) == REG 3436 && (GET_CODE (new_body) != SET 3437 || GET_CODE (SET_SRC (new_body)) != REG)) 3438 /* If this was a load from or store to memory, compare 3439 the MEM in recog_operand to the one in the insn. If they 3440 are not equal, then rerecognize the insn. */ 3441 || (old_set != 0 3442 && ((GET_CODE (SET_SRC (old_set)) == MEM 3443 && SET_SRC (old_set) != recog_operand[1]) 3444 || (GET_CODE (SET_DEST (old_set)) == MEM 3445 && SET_DEST (old_set) != recog_operand[0]))) 3446 /* If this was an add insn before, rerecognize. */ 3447 || GET_CODE (SET_SRC (old_set)) == PLUS)) 3448 { 3449 if (! validate_change (insn, &PATTERN (insn), new_body, 0)) 3450 /* If recognition fails, store the new body anyway. 3451 It's normal to have recognition failures here 3452 due to bizarre memory addresses; reloading will fix them. */ 3453 PATTERN (insn) = new_body; 3454 } 3455 else 3456 PATTERN (insn) = new_body; 3457 3458 val = 1; 3459 } 3460 3461 /* Loop through all elimination pairs. See if any have changed. 3462 3463 We also detect a cases where register elimination cannot be done, 3464 namely, if a register would be both changed and referenced outside a MEM 3465 in the resulting insn since such an insn is often undefined and, even if 3466 not, we cannot know what meaning will be given to it. Note that it is 3467 valid to have a register used in an address in an insn that changes it 3468 (presumably with a pre- or post-increment or decrement). 3469 3470 If anything changes, return nonzero. */ 3471 3472 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3473 { 3474 if (ep->previous_offset != ep->offset && ep->ref_outside_mem) 3475 ep->can_eliminate = 0; 3476 3477 ep->ref_outside_mem = 0; 3478 3479 if (ep->previous_offset != ep->offset) 3480 val = 1; 3481 } 3482 3483 done: 3484 /* If we changed something, perform elimination in REG_NOTES. This is 3485 needed even when REPLACE is zero because a REG_DEAD note might refer 3486 to a register that we eliminate and could cause a different number 3487 of spill registers to be needed in the final reload pass than in 3488 the pre-passes. */ 3489 if (val && REG_NOTES (insn) != 0) 3490 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn)); 3491 3492 if (! replace) 3493 pop_obstacks (); 3494 3495 return val; 3496} 3497 3498/* Loop through all elimination pairs. 3499 Recalculate the number not at initial offset. 3500 3501 Compute the maximum offset (minimum offset if the stack does not 3502 grow downward) for each elimination pair. */ 3503 3504static void 3505update_eliminable_offsets () 3506{ 3507 struct elim_table *ep; 3508 3509 num_not_at_initial_offset = 0; 3510 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3511 { 3512 ep->previous_offset = ep->offset; 3513 if (ep->can_eliminate && ep->offset != ep->initial_offset) 3514 num_not_at_initial_offset++; 3515 } 3516} 3517 3518/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register 3519 replacement we currently believe is valid, mark it as not eliminable if X 3520 modifies DEST in any way other than by adding a constant integer to it. 3521 3522 If DEST is the frame pointer, we do nothing because we assume that 3523 all assignments to the hard frame pointer are nonlocal gotos and are being 3524 done at a time when they are valid and do not disturb anything else. 3525 Some machines want to eliminate a fake argument pointer with either the 3526 frame or stack pointer. Assignments to the hard frame pointer must not 3527 prevent this elimination. 3528 3529 Called via note_stores from reload before starting its passes to scan 3530 the insns of the function. */ 3531 3532static void 3533mark_not_eliminable (dest, x) 3534 rtx dest; 3535 rtx x; 3536{ 3537 register unsigned int i; 3538 3539 /* A SUBREG of a hard register here is just changing its mode. We should 3540 not see a SUBREG of an eliminable hard register, but check just in 3541 case. */ 3542 if (GET_CODE (dest) == SUBREG) 3543 dest = SUBREG_REG (dest); 3544 3545 if (dest == hard_frame_pointer_rtx) 3546 return; 3547 3548 for (i = 0; i < NUM_ELIMINABLE_REGS; i++) 3549 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx 3550 && (GET_CODE (x) != SET 3551 || GET_CODE (SET_SRC (x)) != PLUS 3552 || XEXP (SET_SRC (x), 0) != dest 3553 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT)) 3554 { 3555 reg_eliminate[i].can_eliminate_previous 3556 = reg_eliminate[i].can_eliminate = 0; 3557 num_eliminable--; 3558 } 3559} 3560 3561/* Verify that the initial elimination offsets did not change since the 3562 last call to set_initial_elim_offsets. This is used to catch cases 3563 where something illegal happened during reload_as_needed that could 3564 cause incorrect code to be generated if we did not check for it. */ 3565static void 3566verify_initial_elim_offsets () 3567{ 3568 int t; 3569 3570#ifdef ELIMINABLE_REGS 3571 struct elim_table *ep; 3572 3573 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3574 { 3575 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t); 3576 if (t != ep->initial_offset) 3577 abort (); 3578 } 3579#else 3580 INITIAL_FRAME_POINTER_OFFSET (t); 3581 if (t != reg_eliminate[0].initial_offset) 3582 abort (); 3583#endif 3584} 3585 3586/* Reset all offsets on eliminable registers to their initial values. */ 3587static void 3588set_initial_elim_offsets () 3589{ 3590 struct elim_table *ep = reg_eliminate; 3591 3592#ifdef ELIMINABLE_REGS 3593 for (; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3594 { 3595 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset); 3596 ep->previous_offset = ep->offset = ep->initial_offset; 3597 } 3598#else 3599 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset); 3600 ep->previous_offset = ep->offset = ep->initial_offset; 3601#endif 3602 3603 num_not_at_initial_offset = 0; 3604} 3605 3606/* Initialize the known label offsets. 3607 Set a known offset for each forced label to be at the initial offset 3608 of each elimination. We do this because we assume that all 3609 computed jumps occur from a location where each elimination is 3610 at its initial offset. 3611 For all other labels, show that we don't know the offsets. */ 3612 3613static void 3614set_initial_label_offsets () 3615{ 3616 rtx x; 3617 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels); 3618 3619 for (x = forced_labels; x; x = XEXP (x, 1)) 3620 if (XEXP (x, 0)) 3621 set_label_offsets (XEXP (x, 0), NULL_RTX, 1); 3622} 3623 3624/* Set all elimination offsets to the known values for the code label given 3625 by INSN. */ 3626static void 3627set_offsets_for_label (insn) 3628 rtx insn; 3629{ 3630 unsigned int i; 3631 int label_nr = CODE_LABEL_NUMBER (insn); 3632 struct elim_table *ep; 3633 3634 num_not_at_initial_offset = 0; 3635 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++) 3636 { 3637 ep->offset = ep->previous_offset = offsets_at[label_nr][i]; 3638 if (ep->can_eliminate && ep->offset != ep->initial_offset) 3639 num_not_at_initial_offset++; 3640 } 3641} 3642 3643/* See if anything that happened changes which eliminations are valid. 3644 For example, on the Sparc, whether or not the frame pointer can 3645 be eliminated can depend on what registers have been used. We need 3646 not check some conditions again (such as flag_omit_frame_pointer) 3647 since they can't have changed. */ 3648 3649static void 3650update_eliminables (pset) 3651 HARD_REG_SET *pset; 3652{ 3653#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM 3654 int previous_frame_pointer_needed = frame_pointer_needed; 3655#endif 3656 struct elim_table *ep; 3657 3658 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3659 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED) 3660#ifdef ELIMINABLE_REGS 3661 || ! CAN_ELIMINATE (ep->from, ep->to) 3662#endif 3663 ) 3664 ep->can_eliminate = 0; 3665 3666 /* Look for the case where we have discovered that we can't replace 3667 register A with register B and that means that we will now be 3668 trying to replace register A with register C. This means we can 3669 no longer replace register C with register B and we need to disable 3670 such an elimination, if it exists. This occurs often with A == ap, 3671 B == sp, and C == fp. */ 3672 3673 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3674 { 3675 struct elim_table *op; 3676 register int new_to = -1; 3677 3678 if (! ep->can_eliminate && ep->can_eliminate_previous) 3679 { 3680 /* Find the current elimination for ep->from, if there is a 3681 new one. */ 3682 for (op = reg_eliminate; 3683 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++) 3684 if (op->from == ep->from && op->can_eliminate) 3685 { 3686 new_to = op->to; 3687 break; 3688 } 3689 3690 /* See if there is an elimination of NEW_TO -> EP->TO. If so, 3691 disable it. */ 3692 for (op = reg_eliminate; 3693 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++) 3694 if (op->from == new_to && op->to == ep->to) 3695 op->can_eliminate = 0; 3696 } 3697 } 3698 3699 /* See if any registers that we thought we could eliminate the previous 3700 time are no longer eliminable. If so, something has changed and we 3701 must spill the register. Also, recompute the number of eliminable 3702 registers and see if the frame pointer is needed; it is if there is 3703 no elimination of the frame pointer that we can perform. */ 3704 3705 frame_pointer_needed = 1; 3706 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3707 { 3708 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM 3709 && ep->to != HARD_FRAME_POINTER_REGNUM) 3710 frame_pointer_needed = 0; 3711 3712 if (! ep->can_eliminate && ep->can_eliminate_previous) 3713 { 3714 ep->can_eliminate_previous = 0; 3715 SET_HARD_REG_BIT (*pset, ep->from); 3716 num_eliminable--; 3717 } 3718 } 3719 3720#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM 3721 /* If we didn't need a frame pointer last time, but we do now, spill 3722 the hard frame pointer. */ 3723 if (frame_pointer_needed && ! previous_frame_pointer_needed) 3724 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM); 3725#endif 3726} 3727 3728/* Initialize the table of registers to eliminate. */ 3729static void 3730init_elim_table () 3731{ 3732 struct elim_table *ep; 3733#ifdef ELIMINABLE_REGS 3734 struct elim_table_1 *ep1; 3735#endif 3736 3737 if (!reg_eliminate) 3738 { 3739 reg_eliminate = (struct elim_table *) 3740 xmalloc(sizeof(struct elim_table) * NUM_ELIMINABLE_REGS); 3741 bzero ((PTR) reg_eliminate, 3742 sizeof(struct elim_table) * NUM_ELIMINABLE_REGS); 3743 } 3744 3745 /* Does this function require a frame pointer? */ 3746 3747 frame_pointer_needed = (! flag_omit_frame_pointer 3748#ifdef EXIT_IGNORE_STACK 3749 /* ?? If EXIT_IGNORE_STACK is set, we will not save 3750 and restore sp for alloca. So we can't eliminate 3751 the frame pointer in that case. At some point, 3752 we should improve this by emitting the 3753 sp-adjusting insns for this case. */ 3754 || (current_function_calls_alloca 3755 && EXIT_IGNORE_STACK) 3756#endif 3757 || FRAME_POINTER_REQUIRED); 3758 3759 num_eliminable = 0; 3760 3761#ifdef ELIMINABLE_REGS 3762 for (ep = reg_eliminate, ep1 = reg_eliminate_1; 3763 ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++) 3764 { 3765 ep->from = ep1->from; 3766 ep->to = ep1->to; 3767 ep->can_eliminate = ep->can_eliminate_previous 3768 = (CAN_ELIMINATE (ep->from, ep->to) 3769 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed)); 3770 } 3771#else 3772 reg_eliminate[0].from = reg_eliminate_1[0].from; 3773 reg_eliminate[0].to = reg_eliminate_1[0].to; 3774 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous 3775 = ! frame_pointer_needed; 3776#endif 3777 3778 /* Count the number of eliminable registers and build the FROM and TO 3779 REG rtx's. Note that code in gen_rtx will cause, e.g., 3780 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx. 3781 We depend on this. */ 3782 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) 3783 { 3784 num_eliminable += ep->can_eliminate; 3785 ep->from_rtx = gen_rtx_REG (Pmode, ep->from); 3786 ep->to_rtx = gen_rtx_REG (Pmode, ep->to); 3787 } 3788} 3789 3790/* Kick all pseudos out of hard register REGNO. 3791 If DUMPFILE is nonzero, log actions taken on that file. 3792 3793 If CANT_ELIMINATE is nonzero, it means that we are doing this spill 3794 because we found we can't eliminate some register. In the case, no pseudos 3795 are allowed to be in the register, even if they are only in a block that 3796 doesn't require spill registers, unlike the case when we are spilling this 3797 hard reg to produce another spill register. 3798 3799 Return nonzero if any pseudos needed to be kicked out. */ 3800 3801static void 3802spill_hard_reg (regno, dumpfile, cant_eliminate) 3803 register int regno; 3804 FILE *dumpfile; 3805 int cant_eliminate; 3806{ 3807 register int i; 3808 3809 if (cant_eliminate) 3810 { 3811 SET_HARD_REG_BIT (bad_spill_regs_global, regno); 3812 regs_ever_live[regno] = 1; 3813 } 3814 3815 /* Spill every pseudo reg that was allocated to this reg 3816 or to something that overlaps this reg. */ 3817 3818 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 3819 if (reg_renumber[i] >= 0 3820 && reg_renumber[i] <= regno 3821 && (reg_renumber[i] 3822 + HARD_REGNO_NREGS (reg_renumber[i], 3823 PSEUDO_REGNO_MODE (i)) 3824 > regno)) 3825 SET_REGNO_REG_SET (spilled_pseudos, i); 3826} 3827 3828/* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET 3829 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */ 3830static void 3831ior_hard_reg_set (set1, set2) 3832 HARD_REG_SET *set1, *set2; 3833{ 3834 IOR_HARD_REG_SET (*set1, *set2); 3835} 3836 3837/* After find_reload_regs has been run for all insn that need reloads, 3838 and/or spill_hard_regs was called, this function is used to actually 3839 spill pseudo registers and try to reallocate them. It also sets up the 3840 spill_regs array for use by choose_reload_regs. */ 3841 3842static int 3843finish_spills (global, dumpfile) 3844 int global; 3845 FILE *dumpfile; 3846{ 3847 struct insn_chain *chain; 3848 int something_changed = 0; 3849 int i; 3850 3851 /* Build the spill_regs array for the function. */ 3852 /* If there are some registers still to eliminate and one of the spill regs 3853 wasn't ever used before, additional stack space may have to be 3854 allocated to store this register. Thus, we may have changed the offset 3855 between the stack and frame pointers, so mark that something has changed. 3856 3857 One might think that we need only set VAL to 1 if this is a call-used 3858 register. However, the set of registers that must be saved by the 3859 prologue is not identical to the call-used set. For example, the 3860 register used by the call insn for the return PC is a call-used register, 3861 but must be saved by the prologue. */ 3862 3863 n_spills = 0; 3864 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 3865 if (TEST_HARD_REG_BIT (used_spill_regs, i)) 3866 { 3867 spill_reg_order[i] = n_spills; 3868 spill_regs[n_spills++] = i; 3869 if (num_eliminable && ! regs_ever_live[i]) 3870 something_changed = 1; 3871 regs_ever_live[i] = 1; 3872 } 3873 else 3874 spill_reg_order[i] = -1; 3875 3876 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 3877 if (REGNO_REG_SET_P (spilled_pseudos, i)) 3878 { 3879 /* Record the current hard register the pseudo is allocated to in 3880 pseudo_previous_regs so we avoid reallocating it to the same 3881 hard reg in a later pass. */ 3882 if (reg_renumber[i] < 0) 3883 abort (); 3884 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]); 3885 /* Mark it as no longer having a hard register home. */ 3886 reg_renumber[i] = -1; 3887 /* We will need to scan everything again. */ 3888 something_changed = 1; 3889 } 3890 3891 /* Retry global register allocation if possible. */ 3892 if (global) 3893 { 3894 bzero ((char *) pseudo_forbidden_regs, max_regno * sizeof (HARD_REG_SET)); 3895 /* For every insn that needs reloads, set the registers used as spill 3896 regs in pseudo_forbidden_regs for every pseudo live across the 3897 insn. */ 3898 for (chain = insns_need_reload; chain; chain = chain->next_need_reload) 3899 { 3900 EXECUTE_IF_SET_IN_REG_SET 3901 (chain->live_before, FIRST_PSEUDO_REGISTER, i, 3902 { 3903 ior_hard_reg_set (pseudo_forbidden_regs + i, 3904 &chain->used_spill_regs); 3905 }); 3906 EXECUTE_IF_SET_IN_REG_SET 3907 (chain->live_after, FIRST_PSEUDO_REGISTER, i, 3908 { 3909 ior_hard_reg_set (pseudo_forbidden_regs + i, 3910 &chain->used_spill_regs); 3911 }); 3912 } 3913 3914 /* Retry allocating the spilled pseudos. For each reg, merge the 3915 various reg sets that indicate which hard regs can't be used, 3916 and call retry_global_alloc. 3917 We change spill_pseudos here to only contain pseudos that did not 3918 get a new hard register. */ 3919 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 3920 if (reg_old_renumber[i] != reg_renumber[i]) 3921 { 3922 HARD_REG_SET forbidden; 3923 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global); 3924 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]); 3925 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]); 3926 retry_global_alloc (i, forbidden); 3927 if (reg_renumber[i] >= 0) 3928 CLEAR_REGNO_REG_SET (spilled_pseudos, i); 3929 } 3930 } 3931 3932 /* Fix up the register information in the insn chain. 3933 This involves deleting those of the spilled pseudos which did not get 3934 a new hard register home from the live_{before,after} sets. */ 3935 for (chain = reload_insn_chain; chain; chain = chain->next) 3936 { 3937 HARD_REG_SET used_by_pseudos; 3938 HARD_REG_SET used_by_pseudos2; 3939 3940 AND_COMPL_REG_SET (chain->live_before, spilled_pseudos); 3941 AND_COMPL_REG_SET (chain->live_after, spilled_pseudos); 3942 3943 /* Mark any unallocated hard regs as available for spills. That 3944 makes inheritance work somewhat better. */ 3945 if (chain->need_reload) 3946 { 3947 REG_SET_TO_HARD_REG_SET (used_by_pseudos, chain->live_before); 3948 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, chain->live_after); 3949 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2); 3950 3951 /* Save the old value for the sanity test below. */ 3952 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs); 3953 3954 compute_use_by_pseudos (&used_by_pseudos, chain->live_before); 3955 compute_use_by_pseudos (&used_by_pseudos, chain->live_after); 3956 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos); 3957 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs); 3958 3959 /* Make sure we only enlarge the set. */ 3960 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok); 3961 abort (); 3962 ok:; 3963 } 3964 } 3965 3966 /* Let alter_reg modify the reg rtx's for the modified pseudos. */ 3967 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 3968 { 3969 int regno = reg_renumber[i]; 3970 if (reg_old_renumber[i] == regno) 3971 continue; 3972 3973 alter_reg (i, reg_old_renumber[i]); 3974 reg_old_renumber[i] = regno; 3975 if (dumpfile) 3976 { 3977 if (regno == -1) 3978 fprintf (dumpfile, " Register %d now on stack.\n\n", i); 3979 else 3980 fprintf (dumpfile, " Register %d now in %d.\n\n", 3981 i, reg_renumber[i]); 3982 } 3983 } 3984 3985 return something_changed; 3986} 3987 3988/* Find all paradoxical subregs within X and update reg_max_ref_width. 3989 Also mark any hard registers used to store user variables as 3990 forbidden from being used for spill registers. */ 3991 3992static void 3993scan_paradoxical_subregs (x) 3994 register rtx x; 3995{ 3996 register int i; 3997 register char *fmt; 3998 register enum rtx_code code = GET_CODE (x); 3999 4000 switch (code) 4001 { 4002 case REG: 4003#if 0 4004 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER 4005 && REG_USERVAR_P (x)) 4006 SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x)); 4007#endif 4008 return; 4009 4010 case CONST_INT: 4011 case CONST: 4012 case SYMBOL_REF: 4013 case LABEL_REF: 4014 case CONST_DOUBLE: 4015 case CC0: 4016 case PC: 4017 case USE: 4018 case CLOBBER: 4019 return; 4020 4021 case SUBREG: 4022 if (GET_CODE (SUBREG_REG (x)) == REG 4023 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) 4024 reg_max_ref_width[REGNO (SUBREG_REG (x))] 4025 = GET_MODE_SIZE (GET_MODE (x)); 4026 return; 4027 4028 default: 4029 break; 4030 } 4031 4032 fmt = GET_RTX_FORMAT (code); 4033 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 4034 { 4035 if (fmt[i] == 'e') 4036 scan_paradoxical_subregs (XEXP (x, i)); 4037 else if (fmt[i] == 'E') 4038 { 4039 register int j; 4040 for (j = XVECLEN (x, i) - 1; j >=0; j--) 4041 scan_paradoxical_subregs (XVECEXP (x, i, j)); 4042 } 4043 } 4044} 4045 4046static int 4047hard_reg_use_compare (p1p, p2p) 4048 const GENERIC_PTR p1p; 4049 const GENERIC_PTR p2p; 4050{ 4051 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p; 4052 struct hard_reg_n_uses *p2 = (struct hard_reg_n_uses *)p2p; 4053 int bad1 = TEST_HARD_REG_BIT (bad_spill_regs, p1->regno); 4054 int bad2 = TEST_HARD_REG_BIT (bad_spill_regs, p2->regno); 4055 if (bad1 && bad2) 4056 return p1->regno - p2->regno; 4057 if (bad1) 4058 return 1; 4059 if (bad2) 4060 return -1; 4061 if (p1->uses > p2->uses) 4062 return 1; 4063 if (p1->uses < p2->uses) 4064 return -1; 4065 /* If regs are equally good, sort by regno, 4066 so that the results of qsort leave nothing to chance. */ 4067 return p1->regno - p2->regno; 4068} 4069 4070/* Used for communication between order_regs_for_reload and count_pseudo. 4071 Used to avoid counting one pseudo twice. */ 4072static regset pseudos_counted; 4073 4074/* Update the costs in N_USES, considering that pseudo REG is live. */ 4075static void 4076count_pseudo (n_uses, reg) 4077 struct hard_reg_n_uses *n_uses; 4078 int reg; 4079{ 4080 int r = reg_renumber[reg]; 4081 int nregs; 4082 4083 if (REGNO_REG_SET_P (pseudos_counted, reg)) 4084 return; 4085 SET_REGNO_REG_SET (pseudos_counted, reg); 4086 4087 if (r < 0) 4088 abort (); 4089 4090 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg)); 4091 while (nregs-- > 0) 4092 n_uses[r++].uses += REG_N_REFS (reg); 4093} 4094/* Choose the order to consider regs for use as reload registers 4095 based on how much trouble would be caused by spilling one. 4096 Store them in order of decreasing preference in potential_reload_regs. */ 4097 4098static void 4099order_regs_for_reload (chain) 4100 struct insn_chain *chain; 4101{ 4102 register int i; 4103 register int o = 0; 4104 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER]; 4105 4106 pseudos_counted = ALLOCA_REG_SET (); 4107 4108 COPY_HARD_REG_SET (bad_spill_regs, bad_spill_regs_global); 4109 4110 /* Count number of uses of each hard reg by pseudo regs allocated to it 4111 and then order them by decreasing use. */ 4112 4113 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 4114 { 4115 int j; 4116 4117 hard_reg_n_uses[i].regno = i; 4118 hard_reg_n_uses[i].uses = 0; 4119 4120 /* Test the various reasons why we can't use a register for 4121 spilling in this insn. */ 4122 if (fixed_regs[i] 4123 || REGNO_REG_SET_P (chain->live_before, i) 4124 || REGNO_REG_SET_P (chain->live_after, i)) 4125 { 4126 SET_HARD_REG_BIT (bad_spill_regs, i); 4127 continue; 4128 } 4129 4130 /* Now find out which pseudos are allocated to it, and update 4131 hard_reg_n_uses. */ 4132 CLEAR_REG_SET (pseudos_counted); 4133 4134 EXECUTE_IF_SET_IN_REG_SET 4135 (chain->live_before, FIRST_PSEUDO_REGISTER, j, 4136 { 4137 count_pseudo (hard_reg_n_uses, j); 4138 }); 4139 EXECUTE_IF_SET_IN_REG_SET 4140 (chain->live_after, FIRST_PSEUDO_REGISTER, j, 4141 { 4142 count_pseudo (hard_reg_n_uses, j); 4143 }); 4144 } 4145 4146 FREE_REG_SET (pseudos_counted); 4147 4148 /* Prefer registers not so far used, for use in temporary loading. 4149 Among them, if REG_ALLOC_ORDER is defined, use that order. 4150 Otherwise, prefer registers not preserved by calls. */ 4151 4152#ifdef REG_ALLOC_ORDER 4153 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 4154 { 4155 int regno = reg_alloc_order[i]; 4156 4157 if (hard_reg_n_uses[regno].uses == 0 4158 && !fixed_regs[regno] 4159 && ! TEST_HARD_REG_BIT (bad_spill_regs, regno)) 4160 potential_reload_regs[o++] = regno; 4161 } 4162#else 4163 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 4164 { 4165 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i] 4166 && !fixed_regs[i] 4167 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)) 4168 potential_reload_regs[o++] = i; 4169 } 4170 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 4171 { 4172 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i] 4173 && !fixed_regs[i] 4174 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)) 4175 potential_reload_regs[o++] = i; 4176 } 4177#endif 4178 4179 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER, 4180 sizeof hard_reg_n_uses[0], hard_reg_use_compare); 4181 4182 /* Now add the regs that are already used, 4183 preferring those used less often. The fixed and otherwise forbidden 4184 registers will be at the end of this list. */ 4185 4186 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 4187 if (hard_reg_n_uses[i].uses != 0 4188 && !fixed_regs[hard_reg_n_uses[i].regno] 4189 && ! TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno)) 4190 potential_reload_regs[o++] = hard_reg_n_uses[i].regno; 4191 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 4192 if (TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno) 4193 && !fixed_regs[hard_reg_n_uses[i].regno] 4194) 4195 potential_reload_regs[o++] = hard_reg_n_uses[i].regno; 4196} 4197 4198/* Reload pseudo-registers into hard regs around each insn as needed. 4199 Additional register load insns are output before the insn that needs it 4200 and perhaps store insns after insns that modify the reloaded pseudo reg. 4201 4202 reg_last_reload_reg and reg_reloaded_contents keep track of 4203 which registers are already available in reload registers. 4204 We update these for the reloads that we perform, 4205 as the insns are scanned. */ 4206 4207static void 4208reload_as_needed (live_known) 4209 int live_known; 4210{ 4211 struct insn_chain *chain; 4212#if defined (AUTO_INC_DEC) || defined (INSN_CLOBBERS_REGNO_P) 4213 register int i; 4214#endif 4215 rtx x; 4216 4217 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx); 4218 bzero ((char *) spill_reg_store, sizeof spill_reg_store); 4219 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx)); 4220 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx)); 4221 reg_has_output_reload = (char *) alloca (max_regno); 4222 CLEAR_HARD_REG_SET (reg_reloaded_valid); 4223 4224 set_initial_elim_offsets (); 4225 4226 for (chain = reload_insn_chain; chain; chain = chain->next) 4227 { 4228 rtx prev; 4229 rtx insn = chain->insn; 4230 rtx old_next = NEXT_INSN (insn); 4231 4232 /* If we pass a label, copy the offsets from the label information 4233 into the current offsets of each elimination. */ 4234 if (GET_CODE (insn) == CODE_LABEL) 4235 set_offsets_for_label (insn); 4236 4237 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') 4238 { 4239 rtx oldpat = PATTERN (insn); 4240 4241 /* If this is a USE and CLOBBER of a MEM, ensure that any 4242 references to eliminable registers have been removed. */ 4243 4244 if ((GET_CODE (PATTERN (insn)) == USE 4245 || GET_CODE (PATTERN (insn)) == CLOBBER) 4246 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM) 4247 XEXP (XEXP (PATTERN (insn), 0), 0) 4248 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0), 4249 GET_MODE (XEXP (PATTERN (insn), 0)), 4250 NULL_RTX); 4251 4252 /* If we need to do register elimination processing, do so. 4253 This might delete the insn, in which case we are done. */ 4254 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim) 4255 { 4256 eliminate_regs_in_insn (insn, 1); 4257 if (GET_CODE (insn) == NOTE) 4258 { 4259 update_eliminable_offsets (); 4260 continue; 4261 } 4262 } 4263 4264 /* If need_elim is nonzero but need_reload is zero, one might think 4265 that we could simply set n_reloads to 0. However, find_reloads 4266 could have done some manipulation of the insn (such as swapping 4267 commutative operands), and these manipulations are lost during 4268 the first pass for every insn that needs register elimination. 4269 So the actions of find_reloads must be redone here. */ 4270 4271 if (! chain->need_elim && ! chain->need_reload 4272 && ! chain->need_operand_change) 4273 n_reloads = 0; 4274 /* First find the pseudo regs that must be reloaded for this insn. 4275 This info is returned in the tables reload_... (see reload.h). 4276 Also modify the body of INSN by substituting RELOAD 4277 rtx's for those pseudo regs. */ 4278 else 4279 { 4280 bzero (reg_has_output_reload, max_regno); 4281 CLEAR_HARD_REG_SET (reg_is_output_reload); 4282 4283 find_reloads (insn, 1, spill_indirect_levels, live_known, 4284 spill_reg_order); 4285 } 4286 4287 if (n_reloads > 0) 4288 { 4289 rtx next = NEXT_INSN (insn); 4290 rtx p; 4291 4292 prev = PREV_INSN (insn); 4293 4294 /* Now compute which reload regs to reload them into. Perhaps 4295 reusing reload regs from previous insns, or else output 4296 load insns to reload them. Maybe output store insns too. 4297 Record the choices of reload reg in reload_reg_rtx. */ 4298 choose_reload_regs (chain); 4299 4300 /* Merge any reloads that we didn't combine for fear of 4301 increasing the number of spill registers needed but now 4302 discover can be safely merged. */ 4303 if (SMALL_REGISTER_CLASSES) 4304 merge_assigned_reloads (insn); 4305 4306 /* Generate the insns to reload operands into or out of 4307 their reload regs. */ 4308 emit_reload_insns (chain); 4309 4310 /* Substitute the chosen reload regs from reload_reg_rtx 4311 into the insn's body (or perhaps into the bodies of other 4312 load and store insn that we just made for reloading 4313 and that we moved the structure into). */ 4314 subst_reloads (); 4315 4316 /* If this was an ASM, make sure that all the reload insns 4317 we have generated are valid. If not, give an error 4318 and delete them. */ 4319 4320 if (asm_noperands (PATTERN (insn)) >= 0) 4321 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p)) 4322 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i' 4323 && (recog_memoized (p) < 0 4324 || (extract_insn (p), ! constrain_operands (1)))) 4325 { 4326 error_for_asm (insn, 4327 "`asm' operand requires impossible reload"); 4328 PUT_CODE (p, NOTE); 4329 NOTE_SOURCE_FILE (p) = 0; 4330 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED; 4331 } 4332 } 4333 4334 if (num_eliminable && chain->need_elim) 4335 update_eliminable_offsets (); 4336 4337 /* Any previously reloaded spilled pseudo reg, stored in this insn, 4338 is no longer validly lying around to save a future reload. 4339 Note that this does not detect pseudos that were reloaded 4340 for this insn in order to be stored in 4341 (obeying register constraints). That is correct; such reload 4342 registers ARE still valid. */ 4343 note_stores (oldpat, forget_old_reloads_1); 4344 4345 /* There may have been CLOBBER insns placed after INSN. So scan 4346 between INSN and NEXT and use them to forget old reloads. */ 4347 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x)) 4348 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER) 4349 note_stores (PATTERN (x), forget_old_reloads_1); 4350 4351#ifdef AUTO_INC_DEC 4352 /* Likewise for regs altered by auto-increment in this insn. 4353 REG_INC notes have been changed by reloading: 4354 find_reloads_address_1 records substitutions for them, 4355 which have been performed by subst_reloads above. */ 4356 for (i = n_reloads - 1; i >= 0; i--) 4357 { 4358 rtx in_reg = reload_in_reg[i]; 4359 if (in_reg) 4360 { 4361 enum rtx_code code = GET_CODE (in_reg); 4362 /* PRE_INC / PRE_DEC will have the reload register ending up 4363 with the same value as the stack slot, but that doesn't 4364 hold true for POST_INC / POST_DEC. Either we have to 4365 convert the memory access to a true POST_INC / POST_DEC, 4366 or we can't use the reload register for inheritance. */ 4367 if ((code == POST_INC || code == POST_DEC) 4368 && TEST_HARD_REG_BIT (reg_reloaded_valid, 4369 REGNO (reload_reg_rtx[i])) 4370 /* Make sure it is the inc/dec pseudo, and not 4371 some other (e.g. output operand) pseudo. */ 4372 && (reg_reloaded_contents[REGNO (reload_reg_rtx[i])] 4373 == REGNO (XEXP (in_reg, 0)))) 4374 4375 { 4376 rtx reload_reg = reload_reg_rtx[i]; 4377 enum machine_mode mode = GET_MODE (reload_reg); 4378 int n = 0; 4379 rtx p; 4380 4381 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p)) 4382 { 4383 /* We really want to ignore REG_INC notes here, so 4384 use PATTERN (p) as argument to reg_set_p . */ 4385 if (reg_set_p (reload_reg, PATTERN (p))) 4386 break; 4387 n = count_occurrences (PATTERN (p), reload_reg); 4388 if (! n) 4389 continue; 4390 if (n == 1) 4391 { 4392 n = validate_replace_rtx (reload_reg, 4393 gen_rtx (code, mode, 4394 reload_reg), 4395 p); 4396 4397 /* We must also verify that the constraints 4398 are met after the replacement. */ 4399 extract_insn (p); 4400 if (n) 4401 n = constrain_operands (1); 4402 else 4403 break; 4404 4405 /* If the constraints were not met, then 4406 undo the replacement. */ 4407 if (!n) 4408 { 4409 validate_replace_rtx (gen_rtx (code, mode, 4410 reload_reg), 4411 reload_reg, p); 4412 break; 4413 } 4414 4415 } 4416 break; 4417 } 4418 if (n == 1) 4419 { 4420 REG_NOTES (p) 4421 = gen_rtx_EXPR_LIST (REG_INC, reload_reg, 4422 REG_NOTES (p)); 4423 /* Mark this as having an output reload so that the 4424 REG_INC processing code below won't invalidate 4425 the reload for inheritance. */ 4426 SET_HARD_REG_BIT (reg_is_output_reload, 4427 REGNO (reload_reg)); 4428 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1; 4429 } 4430 else 4431 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX); 4432 } 4433 else if ((code == PRE_INC || code == PRE_DEC) 4434 && TEST_HARD_REG_BIT (reg_reloaded_valid, 4435 REGNO (reload_reg_rtx[i])) 4436 /* Make sure it is the inc/dec pseudo, and not 4437 some other (e.g. output operand) pseudo. */ 4438 && (reg_reloaded_contents[REGNO (reload_reg_rtx[i])] 4439 == REGNO (XEXP (in_reg, 0)))) 4440 { 4441 SET_HARD_REG_BIT (reg_is_output_reload, 4442 REGNO (reload_reg_rtx[i])); 4443 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1; 4444 } 4445 } 4446 } 4447 /* If a pseudo that got a hard register is auto-incremented, 4448 we must purge records of copying it into pseudos without 4449 hard registers. */ 4450 for (x = REG_NOTES (insn); x; x = XEXP (x, 1)) 4451 if (REG_NOTE_KIND (x) == REG_INC) 4452 { 4453 /* See if this pseudo reg was reloaded in this insn. 4454 If so, its last-reload info is still valid 4455 because it is based on this insn's reload. */ 4456 for (i = 0; i < n_reloads; i++) 4457 if (reload_out[i] == XEXP (x, 0)) 4458 break; 4459 4460 if (i == n_reloads) 4461 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX); 4462 } 4463#endif 4464 } 4465 /* A reload reg's contents are unknown after a label. */ 4466 if (GET_CODE (insn) == CODE_LABEL) 4467 CLEAR_HARD_REG_SET (reg_reloaded_valid); 4468 4469 /* Don't assume a reload reg is still good after a call insn 4470 if it is a call-used reg. */ 4471 else if (GET_CODE (insn) == CALL_INSN) 4472 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set); 4473 4474 /* In case registers overlap, allow certain insns to invalidate 4475 particular hard registers. */ 4476 4477#ifdef INSN_CLOBBERS_REGNO_P 4478 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++) 4479 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i) 4480 && INSN_CLOBBERS_REGNO_P (insn, i)) 4481 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i); 4482#endif 4483 4484#ifdef USE_C_ALLOCA 4485 alloca (0); 4486#endif 4487 } 4488} 4489 4490/* Discard all record of any value reloaded from X, 4491 or reloaded in X from someplace else; 4492 unless X is an output reload reg of the current insn. 4493 4494 X may be a hard reg (the reload reg) 4495 or it may be a pseudo reg that was reloaded from. */ 4496 4497static void 4498forget_old_reloads_1 (x, ignored) 4499 rtx x; 4500 rtx ignored ATTRIBUTE_UNUSED; 4501{ 4502 register int regno; 4503 int nr; 4504 int offset = 0; 4505 4506 /* note_stores does give us subregs of hard regs. */ 4507 while (GET_CODE (x) == SUBREG) 4508 { 4509 offset += SUBREG_WORD (x); 4510 x = SUBREG_REG (x); 4511 } 4512 4513 if (GET_CODE (x) != REG) 4514 return; 4515 4516 regno = REGNO (x) + offset; 4517 4518 if (regno >= FIRST_PSEUDO_REGISTER) 4519 nr = 1; 4520 else 4521 { 4522 int i; 4523 nr = HARD_REGNO_NREGS (regno, GET_MODE (x)); 4524 /* Storing into a spilled-reg invalidates its contents. 4525 This can happen if a block-local pseudo is allocated to that reg 4526 and it wasn't spilled because this block's total need is 0. 4527 Then some insn might have an optional reload and use this reg. */ 4528 for (i = 0; i < nr; i++) 4529 /* But don't do this if the reg actually serves as an output 4530 reload reg in the current instruction. */ 4531 if (n_reloads == 0 4532 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)) 4533 { 4534 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i); 4535 spill_reg_store[regno + i] = 0; 4536 } 4537 } 4538 4539 /* Since value of X has changed, 4540 forget any value previously copied from it. */ 4541 4542 while (nr-- > 0) 4543 /* But don't forget a copy if this is the output reload 4544 that establishes the copy's validity. */ 4545 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0) 4546 reg_last_reload_reg[regno + nr] = 0; 4547} 4548 4549/* For each reload, the mode of the reload register. */ 4550static enum machine_mode reload_mode[MAX_RELOADS]; 4551 4552/* For each reload, the largest number of registers it will require. */ 4553static int reload_nregs[MAX_RELOADS]; 4554 4555/* Comparison function for qsort to decide which of two reloads 4556 should be handled first. *P1 and *P2 are the reload numbers. */ 4557 4558static int 4559reload_reg_class_lower (r1p, r2p) 4560 const GENERIC_PTR r1p; 4561 const GENERIC_PTR r2p; 4562{ 4563 register int r1 = *(short *)r1p, r2 = *(short *)r2p; 4564 register int t; 4565 4566 /* Consider required reloads before optional ones. */ 4567 t = reload_optional[r1] - reload_optional[r2]; 4568 if (t != 0) 4569 return t; 4570 4571 /* Count all solitary classes before non-solitary ones. */ 4572 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1) 4573 - (reg_class_size[(int) reload_reg_class[r1]] == 1)); 4574 if (t != 0) 4575 return t; 4576 4577 /* Aside from solitaires, consider all multi-reg groups first. */ 4578 t = reload_nregs[r2] - reload_nregs[r1]; 4579 if (t != 0) 4580 return t; 4581 4582 /* Consider reloads in order of increasing reg-class number. */ 4583 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2]; 4584 if (t != 0) 4585 return t; 4586 4587 /* If reloads are equally urgent, sort by reload number, 4588 so that the results of qsort leave nothing to chance. */ 4589 return r1 - r2; 4590} 4591 4592/* The following HARD_REG_SETs indicate when each hard register is 4593 used for a reload of various parts of the current insn. */ 4594 4595/* If reg is unavailable for all reloads. */ 4596static HARD_REG_SET reload_reg_unavailable; 4597/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */ 4598static HARD_REG_SET reload_reg_used; 4599/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */ 4600static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS]; 4601/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */ 4602static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS]; 4603/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */ 4604static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS]; 4605/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */ 4606static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS]; 4607/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */ 4608static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS]; 4609/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */ 4610static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS]; 4611/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */ 4612static HARD_REG_SET reload_reg_used_in_op_addr; 4613/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */ 4614static HARD_REG_SET reload_reg_used_in_op_addr_reload; 4615/* If reg is in use for a RELOAD_FOR_INSN reload. */ 4616static HARD_REG_SET reload_reg_used_in_insn; 4617/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */ 4618static HARD_REG_SET reload_reg_used_in_other_addr; 4619 4620/* If reg is in use as a reload reg for any sort of reload. */ 4621static HARD_REG_SET reload_reg_used_at_all; 4622 4623/* If reg is use as an inherited reload. We just mark the first register 4624 in the group. */ 4625static HARD_REG_SET reload_reg_used_for_inherit; 4626 4627/* Records which hard regs are used in any way, either as explicit use or 4628 by being allocated to a pseudo during any point of the current insn. */ 4629static HARD_REG_SET reg_used_in_insn; 4630 4631/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and 4632 TYPE. MODE is used to indicate how many consecutive regs are 4633 actually used. */ 4634 4635static void 4636mark_reload_reg_in_use (regno, opnum, type, mode) 4637 int regno; 4638 int opnum; 4639 enum reload_type type; 4640 enum machine_mode mode; 4641{ 4642 int nregs = HARD_REGNO_NREGS (regno, mode); 4643 int i; 4644 4645 for (i = regno; i < nregs + regno; i++) 4646 { 4647 switch (type) 4648 { 4649 case RELOAD_OTHER: 4650 SET_HARD_REG_BIT (reload_reg_used, i); 4651 break; 4652 4653 case RELOAD_FOR_INPUT_ADDRESS: 4654 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i); 4655 break; 4656 4657 case RELOAD_FOR_INPADDR_ADDRESS: 4658 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i); 4659 break; 4660 4661 case RELOAD_FOR_OUTPUT_ADDRESS: 4662 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i); 4663 break; 4664 4665 case RELOAD_FOR_OUTADDR_ADDRESS: 4666 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i); 4667 break; 4668 4669 case RELOAD_FOR_OPERAND_ADDRESS: 4670 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i); 4671 break; 4672 4673 case RELOAD_FOR_OPADDR_ADDR: 4674 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i); 4675 break; 4676 4677 case RELOAD_FOR_OTHER_ADDRESS: 4678 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i); 4679 break; 4680 4681 case RELOAD_FOR_INPUT: 4682 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i); 4683 break; 4684 4685 case RELOAD_FOR_OUTPUT: 4686 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i); 4687 break; 4688 4689 case RELOAD_FOR_INSN: 4690 SET_HARD_REG_BIT (reload_reg_used_in_insn, i); 4691 break; 4692 } 4693 4694 SET_HARD_REG_BIT (reload_reg_used_at_all, i); 4695 } 4696} 4697 4698/* Similarly, but show REGNO is no longer in use for a reload. */ 4699 4700static void 4701clear_reload_reg_in_use (regno, opnum, type, mode) 4702 int regno; 4703 int opnum; 4704 enum reload_type type; 4705 enum machine_mode mode; 4706{ 4707 int nregs = HARD_REGNO_NREGS (regno, mode); 4708 int start_regno, end_regno; 4709 int i; 4710 /* A complication is that for some reload types, inheritance might 4711 allow multiple reloads of the same types to share a reload register. 4712 We set check_opnum if we have to check only reloads with the same 4713 operand number, and check_any if we have to check all reloads. */ 4714 int check_opnum = 0; 4715 int check_any = 0; 4716 HARD_REG_SET *used_in_set; 4717 4718 switch (type) 4719 { 4720 case RELOAD_OTHER: 4721 used_in_set = &reload_reg_used; 4722 break; 4723 4724 case RELOAD_FOR_INPUT_ADDRESS: 4725 used_in_set = &reload_reg_used_in_input_addr[opnum]; 4726 break; 4727 4728 case RELOAD_FOR_INPADDR_ADDRESS: 4729 check_opnum = 1; 4730 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum]; 4731 break; 4732 4733 case RELOAD_FOR_OUTPUT_ADDRESS: 4734 used_in_set = &reload_reg_used_in_output_addr[opnum]; 4735 break; 4736 4737 case RELOAD_FOR_OUTADDR_ADDRESS: 4738 check_opnum = 1; 4739 used_in_set = &reload_reg_used_in_outaddr_addr[opnum]; 4740 break; 4741 4742 case RELOAD_FOR_OPERAND_ADDRESS: 4743 used_in_set = &reload_reg_used_in_op_addr; 4744 break; 4745 4746 case RELOAD_FOR_OPADDR_ADDR: 4747 check_any = 1; 4748 used_in_set = &reload_reg_used_in_op_addr_reload; 4749 break; 4750 4751 case RELOAD_FOR_OTHER_ADDRESS: 4752 used_in_set = &reload_reg_used_in_other_addr; 4753 check_any = 1; 4754 break; 4755 4756 case RELOAD_FOR_INPUT: 4757 used_in_set = &reload_reg_used_in_input[opnum]; 4758 break; 4759 4760 case RELOAD_FOR_OUTPUT: 4761 used_in_set = &reload_reg_used_in_output[opnum]; 4762 break; 4763 4764 case RELOAD_FOR_INSN: 4765 used_in_set = &reload_reg_used_in_insn; 4766 break; 4767 default: 4768 abort (); 4769 } 4770 /* We resolve conflicts with remaining reloads of the same type by 4771 excluding the intervals of of reload registers by them from the 4772 interval of freed reload registers. Since we only keep track of 4773 one set of interval bounds, we might have to exclude somewhat 4774 more then what would be necessary if we used a HARD_REG_SET here. 4775 But this should only happen very infrequently, so there should 4776 be no reason to worry about it. */ 4777 4778 start_regno = regno; 4779 end_regno = regno + nregs; 4780 if (check_opnum || check_any) 4781 { 4782 for (i = n_reloads - 1; i >= 0; i--) 4783 { 4784 if (reload_when_needed[i] == type 4785 && (check_any || reload_opnum[i] == opnum) 4786 && reload_reg_rtx[i]) 4787 { 4788 int conflict_start = true_regnum (reload_reg_rtx[i]); 4789 int conflict_end 4790 = (conflict_start 4791 + HARD_REGNO_NREGS (conflict_start, reload_mode[i])); 4792 4793 /* If there is an overlap with the first to-be-freed register, 4794 adjust the interval start. */ 4795 if (conflict_start <= start_regno && conflict_end > start_regno) 4796 start_regno = conflict_end; 4797 /* Otherwise, if there is a conflict with one of the other 4798 to-be-freed registers, adjust the interval end. */ 4799 if (conflict_start > start_regno && conflict_start < end_regno) 4800 end_regno = conflict_start; 4801 } 4802 } 4803 } 4804 for (i = start_regno; i < end_regno; i++) 4805 CLEAR_HARD_REG_BIT (*used_in_set, i); 4806} 4807 4808/* 1 if reg REGNO is free as a reload reg for a reload of the sort 4809 specified by OPNUM and TYPE. */ 4810 4811static int 4812reload_reg_free_p (regno, opnum, type) 4813 int regno; 4814 int opnum; 4815 enum reload_type type; 4816{ 4817 int i; 4818 4819 if (TEST_HARD_REG_BIT (reload_reg_used, regno) 4820 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno)) 4821 return 0; 4822 4823 switch (type) 4824 { 4825 case RELOAD_OTHER: 4826 /* In use for anything means we can't use it for RELOAD_OTHER. */ 4827 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno) 4828 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) 4829 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)) 4830 return 0; 4831 4832 for (i = 0; i < reload_n_operands; i++) 4833 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) 4834 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno) 4835 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 4836 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) 4837 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno) 4838 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 4839 return 0; 4840 4841 return 1; 4842 4843 case RELOAD_FOR_INPUT: 4844 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) 4845 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)) 4846 return 0; 4847 4848 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)) 4849 return 0; 4850 4851 /* If it is used for some other input, can't use it. */ 4852 for (i = 0; i < reload_n_operands; i++) 4853 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 4854 return 0; 4855 4856 /* If it is used in a later operand's address, can't use it. */ 4857 for (i = opnum + 1; i < reload_n_operands; i++) 4858 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) 4859 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)) 4860 return 0; 4861 4862 return 1; 4863 4864 case RELOAD_FOR_INPUT_ADDRESS: 4865 /* Can't use a register if it is used for an input address for this 4866 operand or used as an input in an earlier one. */ 4867 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno) 4868 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno)) 4869 return 0; 4870 4871 for (i = 0; i < opnum; i++) 4872 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 4873 return 0; 4874 4875 return 1; 4876 4877 case RELOAD_FOR_INPADDR_ADDRESS: 4878 /* Can't use a register if it is used for an input address 4879 for this operand or used as an input in an earlier 4880 one. */ 4881 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno)) 4882 return 0; 4883 4884 for (i = 0; i < opnum; i++) 4885 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 4886 return 0; 4887 4888 return 1; 4889 4890 case RELOAD_FOR_OUTPUT_ADDRESS: 4891 /* Can't use a register if it is used for an output address for this 4892 operand or used as an output in this or a later operand. */ 4893 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)) 4894 return 0; 4895 4896 for (i = opnum; i < reload_n_operands; i++) 4897 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 4898 return 0; 4899 4900 return 1; 4901 4902 case RELOAD_FOR_OUTADDR_ADDRESS: 4903 /* Can't use a register if it is used for an output address 4904 for this operand or used as an output in this or a 4905 later operand. */ 4906 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno)) 4907 return 0; 4908 4909 for (i = opnum; i < reload_n_operands; i++) 4910 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 4911 return 0; 4912 4913 return 1; 4914 4915 case RELOAD_FOR_OPERAND_ADDRESS: 4916 for (i = 0; i < reload_n_operands; i++) 4917 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 4918 return 0; 4919 4920 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) 4921 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)); 4922 4923 case RELOAD_FOR_OPADDR_ADDR: 4924 for (i = 0; i < reload_n_operands; i++) 4925 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 4926 return 0; 4927 4928 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)); 4929 4930 case RELOAD_FOR_OUTPUT: 4931 /* This cannot share a register with RELOAD_FOR_INSN reloads, other 4932 outputs, or an operand address for this or an earlier output. */ 4933 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)) 4934 return 0; 4935 4936 for (i = 0; i < reload_n_operands; i++) 4937 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 4938 return 0; 4939 4940 for (i = 0; i <= opnum; i++) 4941 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 4942 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)) 4943 return 0; 4944 4945 return 1; 4946 4947 case RELOAD_FOR_INSN: 4948 for (i = 0; i < reload_n_operands; i++) 4949 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno) 4950 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 4951 return 0; 4952 4953 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) 4954 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)); 4955 4956 case RELOAD_FOR_OTHER_ADDRESS: 4957 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno); 4958 } 4959 abort (); 4960} 4961 4962/* Return 1 if the value in reload reg REGNO, as used by a reload 4963 needed for the part of the insn specified by OPNUM and TYPE, 4964 is still available in REGNO at the end of the insn. 4965 4966 We can assume that the reload reg was already tested for availability 4967 at the time it is needed, and we should not check this again, 4968 in case the reg has already been marked in use. */ 4969 4970static int 4971reload_reg_reaches_end_p (regno, opnum, type) 4972 int regno; 4973 int opnum; 4974 enum reload_type type; 4975{ 4976 int i; 4977 4978 switch (type) 4979 { 4980 case RELOAD_OTHER: 4981 /* Since a RELOAD_OTHER reload claims the reg for the entire insn, 4982 its value must reach the end. */ 4983 return 1; 4984 4985 /* If this use is for part of the insn, 4986 its value reaches if no subsequent part uses the same register. 4987 Just like the above function, don't try to do this with lots 4988 of fallthroughs. */ 4989 4990 case RELOAD_FOR_OTHER_ADDRESS: 4991 /* Here we check for everything else, since these don't conflict 4992 with anything else and everything comes later. */ 4993 4994 for (i = 0; i < reload_n_operands; i++) 4995 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 4996 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) 4997 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno) 4998 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) 4999 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno) 5000 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5001 return 0; 5002 5003 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) 5004 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) 5005 && ! TEST_HARD_REG_BIT (reload_reg_used, regno)); 5006 5007 case RELOAD_FOR_INPUT_ADDRESS: 5008 case RELOAD_FOR_INPADDR_ADDRESS: 5009 /* Similar, except that we check only for this and subsequent inputs 5010 and the address of only subsequent inputs and we do not need 5011 to check for RELOAD_OTHER objects since they are known not to 5012 conflict. */ 5013 5014 for (i = opnum; i < reload_n_operands; i++) 5015 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5016 return 0; 5017 5018 for (i = opnum + 1; i < reload_n_operands; i++) 5019 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) 5020 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)) 5021 return 0; 5022 5023 for (i = 0; i < reload_n_operands; i++) 5024 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5025 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) 5026 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5027 return 0; 5028 5029 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)) 5030 return 0; 5031 5032 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) 5033 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)); 5034 5035 case RELOAD_FOR_INPUT: 5036 /* Similar to input address, except we start at the next operand for 5037 both input and input address and we do not check for 5038 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these 5039 would conflict. */ 5040 5041 for (i = opnum + 1; i < reload_n_operands; i++) 5042 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) 5043 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno) 5044 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5045 return 0; 5046 5047 /* ... fall through ... */ 5048 5049 case RELOAD_FOR_OPERAND_ADDRESS: 5050 /* Check outputs and their addresses. */ 5051 5052 for (i = 0; i < reload_n_operands; i++) 5053 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5054 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) 5055 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5056 return 0; 5057 5058 return 1; 5059 5060 case RELOAD_FOR_OPADDR_ADDR: 5061 for (i = 0; i < reload_n_operands; i++) 5062 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5063 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) 5064 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5065 return 0; 5066 5067 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) 5068 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)); 5069 5070 case RELOAD_FOR_INSN: 5071 /* These conflict with other outputs with RELOAD_OTHER. So 5072 we need only check for output addresses. */ 5073 5074 opnum = -1; 5075 5076 /* ... fall through ... */ 5077 5078 case RELOAD_FOR_OUTPUT: 5079 case RELOAD_FOR_OUTPUT_ADDRESS: 5080 case RELOAD_FOR_OUTADDR_ADDRESS: 5081 /* We already know these can't conflict with a later output. So the 5082 only thing to check are later output addresses. */ 5083 for (i = opnum + 1; i < reload_n_operands; i++) 5084 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5085 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)) 5086 return 0; 5087 5088 return 1; 5089 } 5090 5091 abort (); 5092} 5093 5094/* Return 1 if the reloads denoted by R1 and R2 cannot share a register. 5095 Return 0 otherwise. 5096 5097 This function uses the same algorithm as reload_reg_free_p above. */ 5098 5099int 5100reloads_conflict (r1, r2) 5101 int r1, r2; 5102{ 5103 enum reload_type r1_type = reload_when_needed[r1]; 5104 enum reload_type r2_type = reload_when_needed[r2]; 5105 int r1_opnum = reload_opnum[r1]; 5106 int r2_opnum = reload_opnum[r2]; 5107 5108 /* RELOAD_OTHER conflicts with everything. */ 5109 if (r2_type == RELOAD_OTHER) 5110 return 1; 5111 5112 /* Otherwise, check conflicts differently for each type. */ 5113 5114 switch (r1_type) 5115 { 5116 case RELOAD_FOR_INPUT: 5117 return (r2_type == RELOAD_FOR_INSN 5118 || r2_type == RELOAD_FOR_OPERAND_ADDRESS 5119 || r2_type == RELOAD_FOR_OPADDR_ADDR 5120 || r2_type == RELOAD_FOR_INPUT 5121 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS 5122 || r2_type == RELOAD_FOR_INPADDR_ADDRESS) 5123 && r2_opnum > r1_opnum)); 5124 5125 case RELOAD_FOR_INPUT_ADDRESS: 5126 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum) 5127 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum)); 5128 5129 case RELOAD_FOR_INPADDR_ADDRESS: 5130 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum) 5131 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum)); 5132 5133 case RELOAD_FOR_OUTPUT_ADDRESS: 5134 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum) 5135 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum)); 5136 5137 case RELOAD_FOR_OUTADDR_ADDRESS: 5138 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum) 5139 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum)); 5140 5141 case RELOAD_FOR_OPERAND_ADDRESS: 5142 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN 5143 || r2_type == RELOAD_FOR_OPERAND_ADDRESS); 5144 5145 case RELOAD_FOR_OPADDR_ADDR: 5146 return (r2_type == RELOAD_FOR_INPUT 5147 || r2_type == RELOAD_FOR_OPADDR_ADDR); 5148 5149 case RELOAD_FOR_OUTPUT: 5150 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT 5151 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS 5152 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS) 5153 && r2_opnum <= r1_opnum)); 5154 5155 case RELOAD_FOR_INSN: 5156 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT 5157 || r2_type == RELOAD_FOR_INSN 5158 || r2_type == RELOAD_FOR_OPERAND_ADDRESS); 5159 5160 case RELOAD_FOR_OTHER_ADDRESS: 5161 return r2_type == RELOAD_FOR_OTHER_ADDRESS; 5162 5163 case RELOAD_OTHER: 5164 return 1; 5165 5166 default: 5167 abort (); 5168 } 5169} 5170 5171/* Vector of reload-numbers showing the order in which the reloads should 5172 be processed. */ 5173short reload_order[MAX_RELOADS]; 5174 5175/* Indexed by reload number, 1 if incoming value 5176 inherited from previous insns. */ 5177char reload_inherited[MAX_RELOADS]; 5178 5179/* For an inherited reload, this is the insn the reload was inherited from, 5180 if we know it. Otherwise, this is 0. */ 5181rtx reload_inheritance_insn[MAX_RELOADS]; 5182 5183/* If non-zero, this is a place to get the value of the reload, 5184 rather than using reload_in. */ 5185rtx reload_override_in[MAX_RELOADS]; 5186 5187/* For each reload, the hard register number of the register used, 5188 or -1 if we did not need a register for this reload. */ 5189int reload_spill_index[MAX_RELOADS]; 5190 5191/* Subroutine of free_for_value_p, used to check a single register. */ 5192 5193static int 5194reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum, 5195 ignore_address_reloads) 5196 int regno; 5197 int opnum; 5198 enum reload_type type; 5199 rtx value, out; 5200 int reloadnum; 5201 int ignore_address_reloads; 5202{ 5203 int time1; 5204 /* Set if we see an input reload that must not share its reload register 5205 with any new earlyclobber, but might otherwise share the reload 5206 register with an output or input-output reload. */ 5207 int check_earlyclobber = 0; 5208 int i; 5209 int copy = 0; 5210 5211 /* ??? reload_reg_used is abused to hold the registers that are not 5212 available as spill registers, including hard registers that are 5213 earlyclobbered in asms. As a temporary measure, reject anything 5214 in reload_reg_used. */ 5215 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno)) 5216 return 0; 5217 5218 if (out == const0_rtx) 5219 { 5220 copy = 1; 5221 out = NULL_RTX; 5222 } 5223 5224 /* We use some pseudo 'time' value to check if the lifetimes of the 5225 new register use would overlap with the one of a previous reload 5226 that is not read-only or uses a different value. 5227 The 'time' used doesn't have to be linear in any shape or form, just 5228 monotonic. 5229 Some reload types use different 'buckets' for each operand. 5230 So there are MAX_RECOG_OPERANDS different time values for each 5231 such reload type. 5232 We compute TIME1 as the time when the register for the prospective 5233 new reload ceases to be live, and TIME2 for each existing 5234 reload as the time when that the reload register of that reload 5235 becomes live. 5236 Where there is little to be gained by exact lifetime calculations, 5237 we just make conservative assumptions, i.e. a longer lifetime; 5238 this is done in the 'default:' cases. */ 5239 switch (type) 5240 { 5241 case RELOAD_FOR_OTHER_ADDRESS: 5242 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */ 5243 time1 = copy ? 0 : 1; 5244 break; 5245 case RELOAD_OTHER: 5246 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5; 5247 break; 5248 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS, 5249 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 , 5250 respectively, to the time values for these, we get distinct time 5251 values. To get distinct time values for each operand, we have to 5252 multiply opnum by at least three. We round that up to four because 5253 multiply by four is often cheaper. */ 5254 case RELOAD_FOR_INPADDR_ADDRESS: 5255 time1 = opnum * 4 + 2; 5256 break; 5257 case RELOAD_FOR_INPUT_ADDRESS: 5258 time1 = opnum * 4 + 3; 5259 break; 5260 case RELOAD_FOR_INPUT: 5261 /* All RELOAD_FOR_INPUT reloads remain live till the instruction 5262 executes (inclusive). */ 5263 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3; 5264 break; 5265 case RELOAD_FOR_OPADDR_ADDR: 5266 /* opnum * 4 + 4 5267 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */ 5268 time1 = MAX_RECOG_OPERANDS * 4 + 1; 5269 break; 5270 case RELOAD_FOR_OPERAND_ADDRESS: 5271 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn 5272 is executed. */ 5273 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3; 5274 break; 5275 case RELOAD_FOR_OUTADDR_ADDRESS: 5276 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum; 5277 break; 5278 case RELOAD_FOR_OUTPUT_ADDRESS: 5279 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum; 5280 break; 5281 default: 5282 time1 = MAX_RECOG_OPERANDS * 5 + 5; 5283 } 5284 5285 for (i = 0; i < n_reloads; i++) 5286 { 5287 rtx reg = reload_reg_rtx[i]; 5288 if (reg && GET_CODE (reg) == REG 5289 && ((unsigned) regno - true_regnum (reg) 5290 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1) 5291 && i != reloadnum) 5292 { 5293 if (! reload_in[i] || ! rtx_equal_p (reload_in[i], value) 5294 || reload_out[i] || out) 5295 { 5296 int time2; 5297 switch (reload_when_needed[i]) 5298 { 5299 case RELOAD_FOR_OTHER_ADDRESS: 5300 time2 = 0; 5301 break; 5302 case RELOAD_FOR_INPADDR_ADDRESS: 5303 /* find_reloads makes sure that a 5304 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used 5305 by at most one - the first - 5306 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the 5307 address reload is inherited, the address address reload 5308 goes away, so we can ignore this conflict. */ 5309 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1 5310 && ignore_address_reloads 5311 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression. 5312 Then the address address is still needed to store 5313 back the new address. */ 5314 && ! reload_out[reloadnum]) 5315 continue; 5316 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its 5317 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS 5318 reloads go away. */ 5319 if (type == RELOAD_FOR_INPUT && opnum == reload_opnum[i] 5320 && ignore_address_reloads 5321 /* Unless we are reloading an auto_inc expression. */ 5322 && ! reload_out[reloadnum]) 5323 continue; 5324 time2 = reload_opnum[i] * 4 + 2; 5325 break; 5326 case RELOAD_FOR_INPUT_ADDRESS: 5327 if (type == RELOAD_FOR_INPUT && opnum == reload_opnum[i] 5328 && ignore_address_reloads 5329 && ! reload_out[reloadnum]) 5330 continue; 5331 time2 = reload_opnum[i] * 4 + 3; 5332 break; 5333 case RELOAD_FOR_INPUT: 5334 time2 = reload_opnum[i] * 4 + 4; 5335 check_earlyclobber = 1; 5336 break; 5337 /* reload_opnum[i] * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4 5338 == MAX_RECOG_OPERAND * 4 */ 5339 case RELOAD_FOR_OPADDR_ADDR: 5340 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1 5341 && ignore_address_reloads 5342 && ! reload_out[reloadnum]) 5343 continue; 5344 time2 = MAX_RECOG_OPERANDS * 4 + 1; 5345 break; 5346 case RELOAD_FOR_OPERAND_ADDRESS: 5347 time2 = MAX_RECOG_OPERANDS * 4 + 2; 5348 check_earlyclobber = 1; 5349 break; 5350 case RELOAD_FOR_INSN: 5351 time2 = MAX_RECOG_OPERANDS * 4 + 3; 5352 break; 5353 case RELOAD_FOR_OUTPUT: 5354 /* All RELOAD_FOR_OUTPUT reloads become live just after the 5355 instruction is executed. */ 5356 time2 = MAX_RECOG_OPERANDS * 4 + 4; 5357 break; 5358 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with 5359 the RELOAD_FOR_OUTPUT reloads, so assign it the same time 5360 value. */ 5361 case RELOAD_FOR_OUTADDR_ADDRESS: 5362 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1 5363 && ignore_address_reloads 5364 && ! reload_out[reloadnum]) 5365 continue; 5366 time2 = MAX_RECOG_OPERANDS * 4 + 4 + reload_opnum[i]; 5367 break; 5368 case RELOAD_FOR_OUTPUT_ADDRESS: 5369 time2 = MAX_RECOG_OPERANDS * 4 + 5 + reload_opnum[i]; 5370 break; 5371 case RELOAD_OTHER: 5372 /* If there is no conflict in the input part, handle this 5373 like an output reload. */ 5374 if (! reload_in[i] || rtx_equal_p (reload_in[i], value)) 5375 { 5376 time2 = MAX_RECOG_OPERANDS * 4 + 4; 5377 /* Earlyclobbered outputs must conflict with inputs. */ 5378 if (earlyclobber_operand_p (reload_out[i])) 5379 time2 = MAX_RECOG_OPERANDS * 4 + 3; 5380 break; 5381 } 5382 time2 = 1; 5383 /* RELOAD_OTHER might be live beyond instruction execution, 5384 but this is not obvious when we set time2 = 1. So check 5385 here if there might be a problem with the new reload 5386 clobbering the register used by the RELOAD_OTHER. */ 5387 if (out) 5388 return 0; 5389 break; 5390 default: 5391 return 0; 5392 } 5393 if ((time1 >= time2 5394 && (! reload_in[i] || reload_out[i] 5395 || ! rtx_equal_p (reload_in[i], value))) 5396 || (out && reload_out_reg[reloadnum] 5397 && time2 >= MAX_RECOG_OPERANDS * 4 + 3)) 5398 return 0; 5399 } 5400 } 5401 } 5402 5403 /* Earlyclobbered outputs must conflict with inputs. */ 5404 if (check_earlyclobber && out && earlyclobber_operand_p (out)) 5405 return 0; 5406 5407 return 1; 5408} 5409 5410/* Return 1 if the value in reload reg REGNO, as used by a reload 5411 needed for the part of the insn specified by OPNUM and TYPE, 5412 may be used to load VALUE into it. 5413 5414 MODE is the mode in which the register is used, this is needed to 5415 determine how many hard regs to test. 5416 5417 Other read-only reloads with the same value do not conflict 5418 unless OUT is non-zero and these other reloads have to live while 5419 output reloads live. 5420 If OUT is CONST0_RTX, this is a special case: it means that the 5421 test should not be for using register REGNO as reload register, but 5422 for copying from register REGNO into the reload register. 5423 5424 RELOADNUM is the number of the reload we want to load this value for; 5425 a reload does not conflict with itself. 5426 5427 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with 5428 reloads that load an address for the very reload we are considering. 5429 5430 The caller has to make sure that there is no conflict with the return 5431 register. */ 5432 5433static int 5434free_for_value_p (regno, mode, opnum, type, value, out, reloadnum, 5435 ignore_address_reloads) 5436 int regno; 5437 enum machine_mode mode; 5438 int opnum; 5439 enum reload_type type; 5440 rtx value, out; 5441 int reloadnum; 5442 int ignore_address_reloads; 5443{ 5444 int nregs = HARD_REGNO_NREGS (regno, mode); 5445 while (nregs-- > 0) 5446 if (! reload_reg_free_for_value_p (regno + nregs, opnum, type, value, out, 5447 reloadnum, ignore_address_reloads)) 5448 return 0; 5449 return 1; 5450} 5451 5452/* Determine whether the reload reg X overlaps any rtx'es used for 5453 overriding inheritance. Return nonzero if so. */ 5454 5455static int 5456conflicts_with_override (x) 5457 rtx x; 5458{ 5459 int i; 5460 for (i = 0; i < n_reloads; i++) 5461 if (reload_override_in[i] 5462 && reg_overlap_mentioned_p (x, reload_override_in[i])) 5463 return 1; 5464 return 0; 5465} 5466 5467/* Find a spill register to use as a reload register for reload R. 5468 LAST_RELOAD is non-zero if this is the last reload for the insn being 5469 processed. 5470 5471 Set reload_reg_rtx[R] to the register allocated. 5472 5473 If NOERROR is nonzero, we return 1 if successful, 5474 or 0 if we couldn't find a spill reg and we didn't change anything. */ 5475 5476static int 5477allocate_reload_reg (chain, r, last_reload, noerror) 5478 struct insn_chain *chain; 5479 int r; 5480 int last_reload; 5481 int noerror; 5482{ 5483 rtx insn = chain->insn; 5484 int i, pass, count, regno; 5485 rtx new; 5486 5487 /* If we put this reload ahead, thinking it is a group, 5488 then insist on finding a group. Otherwise we can grab a 5489 reg that some other reload needs. 5490 (That can happen when we have a 68000 DATA_OR_FP_REG 5491 which is a group of data regs or one fp reg.) 5492 We need not be so restrictive if there are no more reloads 5493 for this insn. 5494 5495 ??? Really it would be nicer to have smarter handling 5496 for that kind of reg class, where a problem like this is normal. 5497 Perhaps those classes should be avoided for reloading 5498 by use of more alternatives. */ 5499 5500 int force_group = reload_nregs[r] > 1 && ! last_reload; 5501 5502 /* If we want a single register and haven't yet found one, 5503 take any reg in the right class and not in use. 5504 If we want a consecutive group, here is where we look for it. 5505 5506 We use two passes so we can first look for reload regs to 5507 reuse, which are already in use for other reloads in this insn, 5508 and only then use additional registers. 5509 I think that maximizing reuse is needed to make sure we don't 5510 run out of reload regs. Suppose we have three reloads, and 5511 reloads A and B can share regs. These need two regs. 5512 Suppose A and B are given different regs. 5513 That leaves none for C. */ 5514 for (pass = 0; pass < 2; pass++) 5515 { 5516 /* I is the index in spill_regs. 5517 We advance it round-robin between insns to use all spill regs 5518 equally, so that inherited reloads have a chance 5519 of leapfrogging each other. Don't do this, however, when we have 5520 group needs and failure would be fatal; if we only have a relatively 5521 small number of spill registers, and more than one of them has 5522 group needs, then by starting in the middle, we may end up 5523 allocating the first one in such a way that we are not left with 5524 sufficient groups to handle the rest. */ 5525 5526 if (noerror || ! force_group) 5527 i = last_spill_reg; 5528 else 5529 i = -1; 5530 5531 for (count = 0; count < n_spills; count++) 5532 { 5533 int class = (int) reload_reg_class[r]; 5534 int regnum; 5535 5536 i++; 5537 if (i >= n_spills) 5538 i -= n_spills; 5539 regnum = spill_regs[i]; 5540 5541 if ((reload_reg_free_p (regnum, reload_opnum[r], 5542 reload_when_needed[r]) 5543 || (reload_in[r] 5544 /* We check reload_reg_used to make sure we 5545 don't clobber the return register. */ 5546 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum) 5547 && free_for_value_p (regnum, reload_mode[r], 5548 reload_opnum[r], 5549 reload_when_needed[r], 5550 reload_in[r], reload_out[r], r, 1))) 5551 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum) 5552 && HARD_REGNO_MODE_OK (regnum, reload_mode[r]) 5553 /* Look first for regs to share, then for unshared. But 5554 don't share regs used for inherited reloads; they are 5555 the ones we want to preserve. */ 5556 && (pass 5557 || (TEST_HARD_REG_BIT (reload_reg_used_at_all, 5558 regnum) 5559 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit, 5560 regnum)))) 5561 { 5562 int nr = HARD_REGNO_NREGS (regnum, reload_mode[r]); 5563 /* Avoid the problem where spilling a GENERAL_OR_FP_REG 5564 (on 68000) got us two FP regs. If NR is 1, 5565 we would reject both of them. */ 5566 if (force_group) 5567 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]); 5568 /* If we need only one reg, we have already won. */ 5569 if (nr == 1) 5570 { 5571 /* But reject a single reg if we demand a group. */ 5572 if (force_group) 5573 continue; 5574 break; 5575 } 5576 /* Otherwise check that as many consecutive regs as we need 5577 are available here. 5578 Also, don't use for a group registers that are 5579 needed for nongroups. */ 5580 if (! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regnum)) 5581 while (nr > 1) 5582 { 5583 regno = regnum + nr - 1; 5584 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno) 5585 && spill_reg_order[regno] >= 0 5586 && reload_reg_free_p (regno, reload_opnum[r], 5587 reload_when_needed[r]) 5588 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, 5589 regno))) 5590 break; 5591 nr--; 5592 } 5593 if (nr == 1) 5594 break; 5595 } 5596 } 5597 5598 /* If we found something on pass 1, omit pass 2. */ 5599 if (count < n_spills) 5600 break; 5601 } 5602 5603 /* We should have found a spill register by now. */ 5604 if (count == n_spills) 5605 { 5606 if (noerror) 5607 return 0; 5608 goto failure; 5609 } 5610 5611 /* I is the index in SPILL_REG_RTX of the reload register we are to 5612 allocate. Get an rtx for it and find its register number. */ 5613 5614 new = spill_reg_rtx[i]; 5615 5616 if (new == 0 || GET_MODE (new) != reload_mode[r]) 5617 spill_reg_rtx[i] = new 5618 = gen_rtx_REG (reload_mode[r], spill_regs[i]); 5619 5620 regno = true_regnum (new); 5621 5622 /* Detect when the reload reg can't hold the reload mode. 5623 This used to be one `if', but Sequent compiler can't handle that. */ 5624 if (HARD_REGNO_MODE_OK (regno, reload_mode[r])) 5625 { 5626 enum machine_mode test_mode = VOIDmode; 5627 if (reload_in[r]) 5628 test_mode = GET_MODE (reload_in[r]); 5629 /* If reload_in[r] has VOIDmode, it means we will load it 5630 in whatever mode the reload reg has: to wit, reload_mode[r]. 5631 We have already tested that for validity. */ 5632 /* Aside from that, we need to test that the expressions 5633 to reload from or into have modes which are valid for this 5634 reload register. Otherwise the reload insns would be invalid. */ 5635 if (! (reload_in[r] != 0 && test_mode != VOIDmode 5636 && ! HARD_REGNO_MODE_OK (regno, test_mode))) 5637 if (! (reload_out[r] != 0 5638 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r])))) 5639 { 5640 /* The reg is OK. */ 5641 last_spill_reg = i; 5642 5643 /* Mark as in use for this insn the reload regs we use 5644 for this. */ 5645 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r], 5646 reload_when_needed[r], reload_mode[r]); 5647 5648 reload_reg_rtx[r] = new; 5649 reload_spill_index[r] = spill_regs[i]; 5650 return 1; 5651 } 5652 } 5653 5654 /* The reg is not OK. */ 5655 if (noerror) 5656 return 0; 5657 5658 failure: 5659 if (asm_noperands (PATTERN (insn)) < 0) 5660 /* It's the compiler's fault. */ 5661 fatal_insn ("Could not find a spill register", insn); 5662 5663 /* It's the user's fault; the operand's mode and constraint 5664 don't match. Disable this reload so we don't crash in final. */ 5665 error_for_asm (insn, 5666 "`asm' operand constraint incompatible with operand size"); 5667 reload_in[r] = 0; 5668 reload_out[r] = 0; 5669 reload_reg_rtx[r] = 0; 5670 reload_optional[r] = 1; 5671 reload_secondary_p[r] = 1; 5672 5673 return 1; 5674} 5675 5676/* Assign hard reg targets for the pseudo-registers we must reload 5677 into hard regs for this insn. 5678 Also output the instructions to copy them in and out of the hard regs. 5679 5680 For machines with register classes, we are responsible for 5681 finding a reload reg in the proper class. */ 5682 5683static void 5684choose_reload_regs (chain) 5685 struct insn_chain *chain; 5686{ 5687 rtx insn = chain->insn; 5688 register int i, j; 5689 int max_group_size = 1; 5690 enum reg_class group_class = NO_REGS; 5691 int inheritance; 5692 int pass; 5693 5694 rtx save_reload_reg_rtx[MAX_RELOADS]; 5695 char save_reload_inherited[MAX_RELOADS]; 5696 rtx save_reload_inheritance_insn[MAX_RELOADS]; 5697 rtx save_reload_override_in[MAX_RELOADS]; 5698 int save_reload_spill_index[MAX_RELOADS]; 5699 HARD_REG_SET save_reload_reg_used; 5700 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS]; 5701 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS]; 5702 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS]; 5703 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS]; 5704 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS]; 5705 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS]; 5706 HARD_REG_SET save_reload_reg_used_in_op_addr; 5707 HARD_REG_SET save_reload_reg_used_in_op_addr_reload; 5708 HARD_REG_SET save_reload_reg_used_in_insn; 5709 HARD_REG_SET save_reload_reg_used_in_other_addr; 5710 HARD_REG_SET save_reload_reg_used_at_all; 5711 5712 bzero (reload_inherited, MAX_RELOADS); 5713 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx)); 5714 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx)); 5715 5716 CLEAR_HARD_REG_SET (reload_reg_used); 5717 CLEAR_HARD_REG_SET (reload_reg_used_at_all); 5718 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr); 5719 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload); 5720 CLEAR_HARD_REG_SET (reload_reg_used_in_insn); 5721 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr); 5722 5723 CLEAR_HARD_REG_SET (reg_used_in_insn); 5724 { 5725 HARD_REG_SET tmp; 5726 REG_SET_TO_HARD_REG_SET (tmp, chain->live_before); 5727 IOR_HARD_REG_SET (reg_used_in_insn, tmp); 5728 REG_SET_TO_HARD_REG_SET (tmp, chain->live_after); 5729 IOR_HARD_REG_SET (reg_used_in_insn, tmp); 5730 compute_use_by_pseudos (®_used_in_insn, chain->live_before); 5731 compute_use_by_pseudos (®_used_in_insn, chain->live_after); 5732 } 5733 for (i = 0; i < reload_n_operands; i++) 5734 { 5735 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]); 5736 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]); 5737 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]); 5738 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]); 5739 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]); 5740 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]); 5741 } 5742 5743 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs); 5744 5745#if 0 /* Not needed, now that we can always retry without inheritance. */ 5746 /* See if we have more mandatory reloads than spill regs. 5747 If so, then we cannot risk optimizations that could prevent 5748 reloads from sharing one spill register. 5749 5750 Since we will try finding a better register than reload_reg_rtx 5751 unless it is equal to reload_in or reload_out, count such reloads. */ 5752 5753 { 5754 int tem = 0; 5755 for (j = 0; j < n_reloads; j++) 5756 if (! reload_optional[j] 5757 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j]) 5758 && (reload_reg_rtx[j] == 0 5759 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j]) 5760 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j])))) 5761 tem++; 5762 if (tem > n_spills) 5763 must_reuse = 1; 5764 } 5765#endif 5766 5767 /* In order to be certain of getting the registers we need, 5768 we must sort the reloads into order of increasing register class. 5769 Then our grabbing of reload registers will parallel the process 5770 that provided the reload registers. 5771 5772 Also note whether any of the reloads wants a consecutive group of regs. 5773 If so, record the maximum size of the group desired and what 5774 register class contains all the groups needed by this insn. */ 5775 5776 for (j = 0; j < n_reloads; j++) 5777 { 5778 reload_order[j] = j; 5779 reload_spill_index[j] = -1; 5780 5781 reload_mode[j] 5782 = (reload_inmode[j] == VOIDmode 5783 || (GET_MODE_SIZE (reload_outmode[j]) 5784 > GET_MODE_SIZE (reload_inmode[j]))) 5785 ? reload_outmode[j] : reload_inmode[j]; 5786 5787 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]); 5788 5789 if (reload_nregs[j] > 1) 5790 { 5791 max_group_size = MAX (reload_nregs[j], max_group_size); 5792 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class]; 5793 } 5794 5795 /* If we have already decided to use a certain register, 5796 don't use it in another way. */ 5797 if (reload_reg_rtx[j]) 5798 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j], 5799 reload_when_needed[j], reload_mode[j]); 5800 } 5801 5802 if (n_reloads > 1) 5803 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower); 5804 5805 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx, 5806 sizeof reload_reg_rtx); 5807 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited); 5808 bcopy ((char *) reload_inheritance_insn, 5809 (char *) save_reload_inheritance_insn, 5810 sizeof reload_inheritance_insn); 5811 bcopy ((char *) reload_override_in, (char *) save_reload_override_in, 5812 sizeof reload_override_in); 5813 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index, 5814 sizeof reload_spill_index); 5815 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used); 5816 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all); 5817 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr, 5818 reload_reg_used_in_op_addr); 5819 5820 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload, 5821 reload_reg_used_in_op_addr_reload); 5822 5823 COPY_HARD_REG_SET (save_reload_reg_used_in_insn, 5824 reload_reg_used_in_insn); 5825 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr, 5826 reload_reg_used_in_other_addr); 5827 5828 for (i = 0; i < reload_n_operands; i++) 5829 { 5830 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i], 5831 reload_reg_used_in_output[i]); 5832 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i], 5833 reload_reg_used_in_input[i]); 5834 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i], 5835 reload_reg_used_in_input_addr[i]); 5836 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i], 5837 reload_reg_used_in_inpaddr_addr[i]); 5838 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i], 5839 reload_reg_used_in_output_addr[i]); 5840 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i], 5841 reload_reg_used_in_outaddr_addr[i]); 5842 } 5843 5844 /* If -O, try first with inheritance, then turning it off. 5845 If not -O, don't do inheritance. 5846 Using inheritance when not optimizing leads to paradoxes 5847 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves 5848 because one side of the comparison might be inherited. */ 5849 5850 for (inheritance = optimize > 0; inheritance >= 0; inheritance--) 5851 { 5852 /* Process the reloads in order of preference just found. 5853 Beyond this point, subregs can be found in reload_reg_rtx. 5854 5855 This used to look for an existing reloaded home for all 5856 of the reloads, and only then perform any new reloads. 5857 But that could lose if the reloads were done out of reg-class order 5858 because a later reload with a looser constraint might have an old 5859 home in a register needed by an earlier reload with a tighter constraint. 5860 5861 To solve this, we make two passes over the reloads, in the order 5862 described above. In the first pass we try to inherit a reload 5863 from a previous insn. If there is a later reload that needs a 5864 class that is a proper subset of the class being processed, we must 5865 also allocate a spill register during the first pass. 5866 5867 Then make a second pass over the reloads to allocate any reloads 5868 that haven't been given registers yet. */ 5869 5870 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit); 5871 5872 for (j = 0; j < n_reloads; j++) 5873 { 5874 register int r = reload_order[j]; 5875 rtx search_equiv = NULL_RTX; 5876 5877 /* Ignore reloads that got marked inoperative. */ 5878 if (reload_out[r] == 0 && reload_in[r] == 0 5879 && ! reload_secondary_p[r]) 5880 continue; 5881 5882 /* If find_reloads chose to use reload_in or reload_out as a reload 5883 register, we don't need to chose one. Otherwise, try even if it 5884 found one since we might save an insn if we find the value lying 5885 around. 5886 Try also when reload_in is a pseudo without a hard reg. */ 5887 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0 5888 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r]) 5889 || (rtx_equal_p (reload_out[r], reload_reg_rtx[r]) 5890 && GET_CODE (reload_in[r]) != MEM 5891 && true_regnum (reload_in[r]) < FIRST_PSEUDO_REGISTER))) 5892 continue; 5893 5894#if 0 /* No longer needed for correct operation. 5895 It might give better code, or might not; worth an experiment? */ 5896 /* If this is an optional reload, we can't inherit from earlier insns 5897 until we are sure that any non-optional reloads have been allocated. 5898 The following code takes advantage of the fact that optional reloads 5899 are at the end of reload_order. */ 5900 if (reload_optional[r] != 0) 5901 for (i = 0; i < j; i++) 5902 if ((reload_out[reload_order[i]] != 0 5903 || reload_in[reload_order[i]] != 0 5904 || reload_secondary_p[reload_order[i]]) 5905 && ! reload_optional[reload_order[i]] 5906 && reload_reg_rtx[reload_order[i]] == 0) 5907 allocate_reload_reg (chain, reload_order[i], 0, inheritance); 5908#endif 5909 5910 /* First see if this pseudo is already available as reloaded 5911 for a previous insn. We cannot try to inherit for reloads 5912 that are smaller than the maximum number of registers needed 5913 for groups unless the register we would allocate cannot be used 5914 for the groups. 5915 5916 We could check here to see if this is a secondary reload for 5917 an object that is already in a register of the desired class. 5918 This would avoid the need for the secondary reload register. 5919 But this is complex because we can't easily determine what 5920 objects might want to be loaded via this reload. So let a 5921 register be allocated here. In `emit_reload_insns' we suppress 5922 one of the loads in the case described above. */ 5923 5924 if (inheritance) 5925 { 5926 int word = 0; 5927 register int regno = -1; 5928 enum machine_mode mode; 5929 5930 if (reload_in[r] == 0) 5931 ; 5932 else if (GET_CODE (reload_in[r]) == REG) 5933 { 5934 regno = REGNO (reload_in[r]); 5935 mode = GET_MODE (reload_in[r]); 5936 } 5937 else if (GET_CODE (reload_in_reg[r]) == REG) 5938 { 5939 regno = REGNO (reload_in_reg[r]); 5940 mode = GET_MODE (reload_in_reg[r]); 5941 } 5942 else if (GET_CODE (reload_in_reg[r]) == SUBREG 5943 && GET_CODE (SUBREG_REG (reload_in_reg[r])) == REG) 5944 { 5945 word = SUBREG_WORD (reload_in_reg[r]); 5946 regno = REGNO (SUBREG_REG (reload_in_reg[r])); 5947 if (regno < FIRST_PSEUDO_REGISTER) 5948 regno += word; 5949 mode = GET_MODE (reload_in_reg[r]); 5950 } 5951#ifdef AUTO_INC_DEC 5952 else if ((GET_CODE (reload_in_reg[r]) == PRE_INC 5953 || GET_CODE (reload_in_reg[r]) == PRE_DEC 5954 || GET_CODE (reload_in_reg[r]) == POST_INC 5955 || GET_CODE (reload_in_reg[r]) == POST_DEC) 5956 && GET_CODE (XEXP (reload_in_reg[r], 0)) == REG) 5957 { 5958 regno = REGNO (XEXP (reload_in_reg[r], 0)); 5959 mode = GET_MODE (XEXP (reload_in_reg[r], 0)); 5960 reload_out[r] = reload_in[r]; 5961 } 5962#endif 5963#if 0 5964 /* This won't work, since REGNO can be a pseudo reg number. 5965 Also, it takes much more hair to keep track of all the things 5966 that can invalidate an inherited reload of part of a pseudoreg. */ 5967 else if (GET_CODE (reload_in[r]) == SUBREG 5968 && GET_CODE (SUBREG_REG (reload_in[r])) == REG) 5969 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]); 5970#endif 5971 5972 if (regno >= 0 && reg_last_reload_reg[regno] != 0) 5973 { 5974 enum reg_class class = reload_reg_class[r], last_class; 5975 rtx last_reg = reg_last_reload_reg[regno]; 5976 5977 i = REGNO (last_reg) + word; 5978 last_class = REGNO_REG_CLASS (i); 5979 if ((GET_MODE_SIZE (GET_MODE (last_reg)) 5980 >= GET_MODE_SIZE (mode) + word * UNITS_PER_WORD) 5981 && reg_reloaded_contents[i] == regno 5982 && TEST_HARD_REG_BIT (reg_reloaded_valid, i) 5983 && HARD_REGNO_MODE_OK (i, reload_mode[r]) 5984 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i) 5985 /* Even if we can't use this register as a reload 5986 register, we might use it for reload_override_in, 5987 if copying it to the desired class is cheap 5988 enough. */ 5989 || ((REGISTER_MOVE_COST (last_class, class) 5990 < MEMORY_MOVE_COST (mode, class, 1)) 5991#ifdef SECONDARY_INPUT_RELOAD_CLASS 5992 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode, 5993 last_reg) 5994 == NO_REGS) 5995#endif 5996#ifdef SECONDARY_MEMORY_NEEDED 5997 && ! SECONDARY_MEMORY_NEEDED (last_class, class, 5998 mode) 5999#endif 6000 )) 6001 6002 && (reload_nregs[r] == max_group_size 6003 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class], 6004 i)) 6005 && free_for_value_p (i, reload_mode[r], reload_opnum[r], 6006 reload_when_needed[r], reload_in[r], 6007 const0_rtx, r, 1)) 6008 { 6009 /* If a group is needed, verify that all the subsequent 6010 registers still have their values intact. */ 6011 int nr 6012 = HARD_REGNO_NREGS (i, reload_mode[r]); 6013 int k; 6014 6015 for (k = 1; k < nr; k++) 6016 if (reg_reloaded_contents[i + k] != regno 6017 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k)) 6018 break; 6019 6020 if (k == nr) 6021 { 6022 int i1; 6023 6024 last_reg = (GET_MODE (last_reg) == mode 6025 ? last_reg : gen_rtx_REG (mode, i)); 6026 6027 /* We found a register that contains the 6028 value we need. If this register is the 6029 same as an `earlyclobber' operand of the 6030 current insn, just mark it as a place to 6031 reload from since we can't use it as the 6032 reload register itself. */ 6033 6034 for (i1 = 0; i1 < n_earlyclobbers; i1++) 6035 if (reg_overlap_mentioned_for_reload_p 6036 (reg_last_reload_reg[regno], 6037 reload_earlyclobbers[i1])) 6038 break; 6039 6040 if (i1 != n_earlyclobbers 6041 || ! (free_for_value_p (i, reload_mode[r], 6042 reload_opnum[r], 6043 reload_when_needed[r], 6044 reload_in[r], 6045 reload_out[r], r, 1)) 6046 /* Don't use it if we'd clobber a pseudo reg. */ 6047 || (TEST_HARD_REG_BIT (reg_used_in_insn, i) 6048 && reload_out[r] 6049 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i)) 6050 /* Don't clobber the frame pointer. */ 6051 || (i == HARD_FRAME_POINTER_REGNUM 6052 && reload_out[r]) 6053 /* Don't really use the inherited spill reg 6054 if we need it wider than we've got it. */ 6055 || (GET_MODE_SIZE (reload_mode[r]) 6056 > GET_MODE_SIZE (mode)) 6057 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]], 6058 i) 6059 6060 /* If find_reloads chose reload_out as reload 6061 register, stay with it - that leaves the 6062 inherited register for subsequent reloads. */ 6063 || (reload_out[r] && reload_reg_rtx[r] 6064 && rtx_equal_p (reload_out[r], 6065 reload_reg_rtx[r]))) 6066 { 6067 if (! reload_optional[r]) 6068 { 6069 reload_override_in[r] = last_reg; 6070 reload_inheritance_insn[r] 6071 = reg_reloaded_insn[i]; 6072 } 6073 } 6074 else 6075 { 6076 int k; 6077 /* We can use this as a reload reg. */ 6078 /* Mark the register as in use for this part of 6079 the insn. */ 6080 mark_reload_reg_in_use (i, 6081 reload_opnum[r], 6082 reload_when_needed[r], 6083 reload_mode[r]); 6084 reload_reg_rtx[r] = last_reg; 6085 reload_inherited[r] = 1; 6086 reload_inheritance_insn[r] 6087 = reg_reloaded_insn[i]; 6088 reload_spill_index[r] = i; 6089 for (k = 0; k < nr; k++) 6090 SET_HARD_REG_BIT (reload_reg_used_for_inherit, 6091 i + k); 6092 } 6093 } 6094 } 6095 } 6096 } 6097 6098 /* Here's another way to see if the value is already lying around. */ 6099 if (inheritance 6100 && reload_in[r] != 0 6101 && ! reload_inherited[r] 6102 && reload_out[r] == 0 6103 && (CONSTANT_P (reload_in[r]) 6104 || GET_CODE (reload_in[r]) == PLUS 6105 || GET_CODE (reload_in[r]) == REG 6106 || GET_CODE (reload_in[r]) == MEM) 6107 && (reload_nregs[r] == max_group_size 6108 || ! reg_classes_intersect_p (reload_reg_class[r], group_class))) 6109 search_equiv = reload_in[r]; 6110 /* If this is an output reload from a simple move insn, look 6111 if an equivalence for the input is available. */ 6112 else if (inheritance && reload_in[r] == 0 && reload_out[r] != 0) 6113 { 6114 rtx set = single_set (insn); 6115 6116 if (set 6117 && rtx_equal_p (reload_out[r], SET_DEST (set)) 6118 && CONSTANT_P (SET_SRC (set))) 6119 search_equiv = SET_SRC (set); 6120 } 6121 6122 if (search_equiv) 6123 { 6124 register rtx equiv 6125 = find_equiv_reg (search_equiv, insn, reload_reg_class[r], 6126 -1, NULL_PTR, 0, reload_mode[r]); 6127 int regno; 6128 6129 if (equiv != 0) 6130 { 6131 if (GET_CODE (equiv) == REG) 6132 regno = REGNO (equiv); 6133 else if (GET_CODE (equiv) == SUBREG) 6134 { 6135 /* This must be a SUBREG of a hard register. 6136 Make a new REG since this might be used in an 6137 address and not all machines support SUBREGs 6138 there. */ 6139 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv); 6140 equiv = gen_rtx_REG (reload_mode[r], regno); 6141 } 6142 else 6143 abort (); 6144 } 6145 6146 /* If we found a spill reg, reject it unless it is free 6147 and of the desired class. */ 6148 if (equiv != 0 6149 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno) 6150 && ! free_for_value_p (regno, reload_mode[r], 6151 reload_opnum[r], 6152 reload_when_needed[r], 6153 reload_in[r], reload_out[r], r, 6154 1)) 6155 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]], 6156 regno))) 6157 equiv = 0; 6158 6159 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r])) 6160 equiv = 0; 6161 6162 /* We found a register that contains the value we need. 6163 If this register is the same as an `earlyclobber' operand 6164 of the current insn, just mark it as a place to reload from 6165 since we can't use it as the reload register itself. */ 6166 6167 if (equiv != 0) 6168 for (i = 0; i < n_earlyclobbers; i++) 6169 if (reg_overlap_mentioned_for_reload_p (equiv, 6170 reload_earlyclobbers[i])) 6171 { 6172 if (! reload_optional[r]) 6173 reload_override_in[r] = equiv; 6174 equiv = 0; 6175 break; 6176 } 6177 6178 /* If the equiv register we have found is explicitly clobbered 6179 in the current insn, it depends on the reload type if we 6180 can use it, use it for reload_override_in, or not at all. 6181 In particular, we then can't use EQUIV for a 6182 RELOAD_FOR_OUTPUT_ADDRESS reload. */ 6183 6184 if (equiv != 0 && regno_clobbered_p (regno, insn, 6185 reload_mode[r], 0)) 6186 { 6187 switch (reload_when_needed[r]) 6188 { 6189 case RELOAD_FOR_OTHER_ADDRESS: 6190 case RELOAD_FOR_INPADDR_ADDRESS: 6191 case RELOAD_FOR_INPUT_ADDRESS: 6192 case RELOAD_FOR_OPADDR_ADDR: 6193 break; 6194 case RELOAD_OTHER: 6195 case RELOAD_FOR_INPUT: 6196 case RELOAD_FOR_OPERAND_ADDRESS: 6197 if (! reload_optional[r]) 6198 reload_override_in[r] = equiv; 6199 /* Fall through. */ 6200 default: 6201 equiv = 0; 6202 break; 6203 } 6204 } 6205 else if (regno_clobbered_p (regno, insn, reload_mode[r], 1)) 6206 switch (reload_when_needed[r]) 6207 { 6208 case RELOAD_FOR_OTHER_ADDRESS: 6209 case RELOAD_FOR_INPADDR_ADDRESS: 6210 case RELOAD_FOR_INPUT_ADDRESS: 6211 case RELOAD_FOR_OPADDR_ADDR: 6212 case RELOAD_FOR_OPERAND_ADDRESS: 6213 case RELOAD_FOR_INPUT: 6214 break; 6215 case RELOAD_OTHER: 6216 if (! reload_optional[r]) 6217 reload_override_in[r] = equiv; 6218 /* Fall through. */ 6219 default: 6220 equiv = 0; 6221 break; 6222 } 6223 6224 /* If we found an equivalent reg, say no code need be generated 6225 to load it, and use it as our reload reg. */ 6226 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM) 6227 { 6228 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]); 6229 int k; 6230 reload_reg_rtx[r] = equiv; 6231 reload_inherited[r] = 1; 6232 6233 /* If reg_reloaded_valid is not set for this register, 6234 there might be a stale spill_reg_store lying around. 6235 We must clear it, since otherwise emit_reload_insns 6236 might delete the store. */ 6237 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno)) 6238 spill_reg_store[regno] = NULL_RTX; 6239 /* If any of the hard registers in EQUIV are spill 6240 registers, mark them as in use for this insn. */ 6241 for (k = 0; k < nr; k++) 6242 { 6243 i = spill_reg_order[regno + k]; 6244 if (i >= 0) 6245 { 6246 mark_reload_reg_in_use (regno, reload_opnum[r], 6247 reload_when_needed[r], 6248 reload_mode[r]); 6249 SET_HARD_REG_BIT (reload_reg_used_for_inherit, 6250 regno + k); 6251 } 6252 } 6253 } 6254 } 6255 6256 /* If we found a register to use already, or if this is an optional 6257 reload, we are done. */ 6258 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0) 6259 continue; 6260 6261#if 0 /* No longer needed for correct operation. Might or might not 6262 give better code on the average. Want to experiment? */ 6263 6264 /* See if there is a later reload that has a class different from our 6265 class that intersects our class or that requires less register 6266 than our reload. If so, we must allocate a register to this 6267 reload now, since that reload might inherit a previous reload 6268 and take the only available register in our class. Don't do this 6269 for optional reloads since they will force all previous reloads 6270 to be allocated. Also don't do this for reloads that have been 6271 turned off. */ 6272 6273 for (i = j + 1; i < n_reloads; i++) 6274 { 6275 int s = reload_order[i]; 6276 6277 if ((reload_in[s] == 0 && reload_out[s] == 0 6278 && ! reload_secondary_p[s]) 6279 || reload_optional[s]) 6280 continue; 6281 6282 if ((reload_reg_class[s] != reload_reg_class[r] 6283 && reg_classes_intersect_p (reload_reg_class[r], 6284 reload_reg_class[s])) 6285 || reload_nregs[s] < reload_nregs[r]) 6286 break; 6287 } 6288 6289 if (i == n_reloads) 6290 continue; 6291 6292 allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance); 6293#endif 6294 } 6295 6296 /* Now allocate reload registers for anything non-optional that 6297 didn't get one yet. */ 6298 for (j = 0; j < n_reloads; j++) 6299 { 6300 register int r = reload_order[j]; 6301 6302 /* Ignore reloads that got marked inoperative. */ 6303 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r]) 6304 continue; 6305 6306 /* Skip reloads that already have a register allocated or are 6307 optional. */ 6308 if (reload_reg_rtx[r] != 0 || reload_optional[r]) 6309 continue; 6310 6311 if (! allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance)) 6312 break; 6313 } 6314 6315 /* If that loop got all the way, we have won. */ 6316 if (j == n_reloads) 6317 break; 6318 6319 /* Loop around and try without any inheritance. */ 6320 /* First undo everything done by the failed attempt 6321 to allocate with inheritance. */ 6322 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx, 6323 sizeof reload_reg_rtx); 6324 bcopy ((char *) save_reload_inherited, (char *) reload_inherited, 6325 sizeof reload_inherited); 6326 bcopy ((char *) save_reload_inheritance_insn, 6327 (char *) reload_inheritance_insn, 6328 sizeof reload_inheritance_insn); 6329 bcopy ((char *) save_reload_override_in, (char *) reload_override_in, 6330 sizeof reload_override_in); 6331 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index, 6332 sizeof reload_spill_index); 6333 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used); 6334 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all); 6335 COPY_HARD_REG_SET (reload_reg_used_in_op_addr, 6336 save_reload_reg_used_in_op_addr); 6337 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload, 6338 save_reload_reg_used_in_op_addr_reload); 6339 COPY_HARD_REG_SET (reload_reg_used_in_insn, 6340 save_reload_reg_used_in_insn); 6341 COPY_HARD_REG_SET (reload_reg_used_in_other_addr, 6342 save_reload_reg_used_in_other_addr); 6343 6344 for (i = 0; i < reload_n_operands; i++) 6345 { 6346 COPY_HARD_REG_SET (reload_reg_used_in_input[i], 6347 save_reload_reg_used_in_input[i]); 6348 COPY_HARD_REG_SET (reload_reg_used_in_output[i], 6349 save_reload_reg_used_in_output[i]); 6350 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i], 6351 save_reload_reg_used_in_input_addr[i]); 6352 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i], 6353 save_reload_reg_used_in_inpaddr_addr[i]); 6354 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i], 6355 save_reload_reg_used_in_output_addr[i]); 6356 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i], 6357 save_reload_reg_used_in_outaddr_addr[i]); 6358 } 6359 } 6360 6361 /* If we thought we could inherit a reload, because it seemed that 6362 nothing else wanted the same reload register earlier in the insn, 6363 verify that assumption, now that all reloads have been assigned. 6364 Likewise for reloads where reload_override_in has been set. */ 6365 6366 /* If doing expensive optimizations, do one preliminary pass that doesn't 6367 cancel any inheritance, but removes reloads that have been needed only 6368 for reloads that we know can be inherited. */ 6369 for (pass = flag_expensive_optimizations; pass >= 0; pass--) 6370 { 6371 for (j = 0; j < n_reloads; j++) 6372 { 6373 register int r = reload_order[j]; 6374 rtx check_reg; 6375 if (reload_inherited[r] && reload_reg_rtx[r]) 6376 check_reg = reload_reg_rtx[r]; 6377 else if (reload_override_in[r] 6378 && (GET_CODE (reload_override_in[r]) == REG 6379 || GET_CODE (reload_override_in[r]) == SUBREG)) 6380 check_reg = reload_override_in[r]; 6381 else 6382 continue; 6383 if (! free_for_value_p (true_regnum (check_reg), reload_mode[r], 6384 reload_opnum[r], reload_when_needed[r], 6385 reload_in[r], 6386 (reload_inherited[r] 6387 ? reload_out[r] : const0_rtx), 6388 r, 1)) 6389 { 6390 if (pass) 6391 continue; 6392 reload_inherited[r] = 0; 6393 reload_override_in[r] = 0; 6394 } 6395 /* If we can inherit a RELOAD_FOR_INPUT, or can use a 6396 reload_override_in, then we do not need its related 6397 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads; 6398 likewise for other reload types. 6399 We handle this by removing a reload when its only replacement 6400 is mentioned in reload_in of the reload we are going to inherit. 6401 A special case are auto_inc expressions; even if the input is 6402 inherited, we still need the address for the output. We can 6403 recognize them because they have RELOAD_OUT set but not 6404 RELOAD_OUT_REG. 6405 If we suceeded removing some reload and we are doing a preliminary 6406 pass just to remove such reloads, make another pass, since the 6407 removal of one reload might allow us to inherit another one. */ 6408 else if ((! reload_out[r] || reload_out_reg[r]) 6409 && reload_in[r] 6410 && remove_address_replacements (reload_in[r]) && pass) 6411 pass = 2; 6412 } 6413 } 6414 6415 /* Now that reload_override_in is known valid, 6416 actually override reload_in. */ 6417 for (j = 0; j < n_reloads; j++) 6418 if (reload_override_in[j]) 6419 reload_in[j] = reload_override_in[j]; 6420 6421 /* If this reload won't be done because it has been cancelled or is 6422 optional and not inherited, clear reload_reg_rtx so other 6423 routines (such as subst_reloads) don't get confused. */ 6424 for (j = 0; j < n_reloads; j++) 6425 if (reload_reg_rtx[j] != 0 6426 && ((reload_optional[j] && ! reload_inherited[j]) 6427 || (reload_in[j] == 0 && reload_out[j] == 0 6428 && ! reload_secondary_p[j]))) 6429 { 6430 int regno = true_regnum (reload_reg_rtx[j]); 6431 6432 if (spill_reg_order[regno] >= 0) 6433 clear_reload_reg_in_use (regno, reload_opnum[j], 6434 reload_when_needed[j], reload_mode[j]); 6435 reload_reg_rtx[j] = 0; 6436 reload_spill_index[j] = -1; 6437 } 6438 6439 /* Record which pseudos and which spill regs have output reloads. */ 6440 for (j = 0; j < n_reloads; j++) 6441 { 6442 register int r = reload_order[j]; 6443 6444 i = reload_spill_index[r]; 6445 6446 /* I is nonneg if this reload uses a register. 6447 If reload_reg_rtx[r] is 0, this is an optional reload 6448 that we opted to ignore. */ 6449 if (reload_out_reg[r] != 0 && GET_CODE (reload_out_reg[r]) == REG 6450 && reload_reg_rtx[r] != 0) 6451 { 6452 register int nregno = REGNO (reload_out_reg[r]); 6453 int nr = 1; 6454 6455 if (nregno < FIRST_PSEUDO_REGISTER) 6456 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]); 6457 6458 while (--nr >= 0) 6459 reg_has_output_reload[nregno + nr] = 1; 6460 6461 if (i >= 0) 6462 { 6463 nr = HARD_REGNO_NREGS (i, reload_mode[r]); 6464 while (--nr >= 0) 6465 SET_HARD_REG_BIT (reg_is_output_reload, i + nr); 6466 } 6467 6468 if (reload_when_needed[r] != RELOAD_OTHER 6469 && reload_when_needed[r] != RELOAD_FOR_OUTPUT 6470 && reload_when_needed[r] != RELOAD_FOR_INSN) 6471 abort (); 6472 } 6473 } 6474} 6475 6476/* Deallocate the reload register for reload R. This is called from 6477 remove_address_replacements. */ 6478void 6479deallocate_reload_reg (r) 6480 int r; 6481{ 6482 int regno; 6483 6484 if (! reload_reg_rtx[r]) 6485 return; 6486 regno = true_regnum (reload_reg_rtx[r]); 6487 reload_reg_rtx[r] = 0; 6488 if (spill_reg_order[regno] >= 0) 6489 clear_reload_reg_in_use (regno, reload_opnum[r], reload_when_needed[r], 6490 reload_mode[r]); 6491 reload_spill_index[r] = -1; 6492} 6493 6494/* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two 6495 reloads of the same item for fear that we might not have enough reload 6496 registers. However, normally they will get the same reload register 6497 and hence actually need not be loaded twice. 6498 6499 Here we check for the most common case of this phenomenon: when we have 6500 a number of reloads for the same object, each of which were allocated 6501 the same reload_reg_rtx, that reload_reg_rtx is not used for any other 6502 reload, and is not modified in the insn itself. If we find such, 6503 merge all the reloads and set the resulting reload to RELOAD_OTHER. 6504 This will not increase the number of spill registers needed and will 6505 prevent redundant code. */ 6506 6507static void 6508merge_assigned_reloads (insn) 6509 rtx insn; 6510{ 6511 int i, j; 6512 6513 /* Scan all the reloads looking for ones that only load values and 6514 are not already RELOAD_OTHER and ones whose reload_reg_rtx are 6515 assigned and not modified by INSN. */ 6516 6517 for (i = 0; i < n_reloads; i++) 6518 { 6519 int conflicting_input = 0; 6520 int max_input_address_opnum = -1; 6521 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS; 6522 6523 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER 6524 || reload_out[i] != 0 || reload_reg_rtx[i] == 0 6525 || reg_set_p (reload_reg_rtx[i], insn)) 6526 continue; 6527 6528 /* Look at all other reloads. Ensure that the only use of this 6529 reload_reg_rtx is in a reload that just loads the same value 6530 as we do. Note that any secondary reloads must be of the identical 6531 class since the values, modes, and result registers are the 6532 same, so we need not do anything with any secondary reloads. */ 6533 6534 for (j = 0; j < n_reloads; j++) 6535 { 6536 if (i == j || reload_reg_rtx[j] == 0 6537 || ! reg_overlap_mentioned_p (reload_reg_rtx[j], 6538 reload_reg_rtx[i])) 6539 continue; 6540 6541 if (reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS 6542 && reload_opnum[j] > max_input_address_opnum) 6543 max_input_address_opnum = reload_opnum[j]; 6544 6545 /* If the reload regs aren't exactly the same (e.g, different modes) 6546 or if the values are different, we can't merge this reload. 6547 But if it is an input reload, we might still merge 6548 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */ 6549 6550 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]) 6551 || reload_out[j] != 0 || reload_in[j] == 0 6552 || ! rtx_equal_p (reload_in[i], reload_in[j])) 6553 { 6554 if (reload_when_needed[j] != RELOAD_FOR_INPUT 6555 || ((reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS 6556 || reload_opnum[i] > reload_opnum[j]) 6557 && reload_when_needed[i] != RELOAD_FOR_OTHER_ADDRESS)) 6558 break; 6559 conflicting_input = 1; 6560 if (min_conflicting_input_opnum > reload_opnum[j]) 6561 min_conflicting_input_opnum = reload_opnum[j]; 6562 } 6563 } 6564 6565 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if 6566 we, in fact, found any matching reloads. */ 6567 6568 if (j == n_reloads 6569 && max_input_address_opnum <= min_conflicting_input_opnum) 6570 { 6571 for (j = 0; j < n_reloads; j++) 6572 if (i != j && reload_reg_rtx[j] != 0 6573 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]) 6574 && (! conflicting_input 6575 || reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS 6576 || reload_when_needed[j] == RELOAD_FOR_OTHER_ADDRESS)) 6577 { 6578 reload_when_needed[i] = RELOAD_OTHER; 6579 reload_in[j] = 0; 6580 reload_spill_index[j] = -1; 6581 transfer_replacements (i, j); 6582 } 6583 6584 /* If this is now RELOAD_OTHER, look for any reloads that load 6585 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS 6586 if they were for inputs, RELOAD_OTHER for outputs. Note that 6587 this test is equivalent to looking for reloads for this operand 6588 number. */ 6589 6590 if (reload_when_needed[i] == RELOAD_OTHER) 6591 for (j = 0; j < n_reloads; j++) 6592 if (reload_in[j] != 0 6593 && reload_when_needed[i] != RELOAD_OTHER 6594 && reg_overlap_mentioned_for_reload_p (reload_in[j], 6595 reload_in[i])) 6596 reload_when_needed[j] 6597 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS 6598 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS) 6599 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER); 6600 } 6601 } 6602} 6603 6604 6605/* Output insns to reload values in and out of the chosen reload regs. */ 6606 6607static void 6608emit_reload_insns (chain) 6609 struct insn_chain *chain; 6610{ 6611 rtx insn = chain->insn; 6612 6613 register int j; 6614 rtx input_reload_insns[MAX_RECOG_OPERANDS]; 6615 rtx other_input_address_reload_insns = 0; 6616 rtx other_input_reload_insns = 0; 6617 rtx input_address_reload_insns[MAX_RECOG_OPERANDS]; 6618 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS]; 6619 rtx output_reload_insns[MAX_RECOG_OPERANDS]; 6620 rtx output_address_reload_insns[MAX_RECOG_OPERANDS]; 6621 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS]; 6622 rtx operand_reload_insns = 0; 6623 rtx other_operand_reload_insns = 0; 6624 rtx other_output_reload_insns[MAX_RECOG_OPERANDS]; 6625 rtx following_insn = NEXT_INSN (insn); 6626 rtx before_insn = PREV_INSN (insn); 6627 int special; 6628 /* Values to be put in spill_reg_store are put here first. */ 6629 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER]; 6630 HARD_REG_SET reg_reloaded_died; 6631 6632 CLEAR_HARD_REG_SET (reg_reloaded_died); 6633 6634 for (j = 0; j < reload_n_operands; j++) 6635 input_reload_insns[j] = input_address_reload_insns[j] 6636 = inpaddr_address_reload_insns[j] 6637 = output_reload_insns[j] = output_address_reload_insns[j] 6638 = outaddr_address_reload_insns[j] 6639 = other_output_reload_insns[j] = 0; 6640 6641 /* Now output the instructions to copy the data into and out of the 6642 reload registers. Do these in the order that the reloads were reported, 6643 since reloads of base and index registers precede reloads of operands 6644 and the operands may need the base and index registers reloaded. */ 6645 6646 for (j = 0; j < n_reloads; j++) 6647 { 6648 register rtx old; 6649 rtx oldequiv_reg = 0; 6650 rtx this_reload_insn = 0; 6651 int expect_occurrences = 1; 6652 6653 if (reload_reg_rtx[j] 6654 && REGNO (reload_reg_rtx[j]) < FIRST_PSEUDO_REGISTER) 6655 new_spill_reg_store[REGNO (reload_reg_rtx[j])] = 0; 6656 6657 old = (reload_in[j] && GET_CODE (reload_in[j]) == MEM 6658 ? reload_in_reg[j] : reload_in[j]); 6659 6660 if (old != 0 6661 /* AUTO_INC reloads need to be handled even if inherited. We got an 6662 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */ 6663 && (! reload_inherited[j] || (reload_out[j] && ! reload_out_reg[j])) 6664 && ! rtx_equal_p (reload_reg_rtx[j], old) 6665 && reload_reg_rtx[j] != 0) 6666 { 6667 register rtx reloadreg = reload_reg_rtx[j]; 6668 rtx oldequiv = 0; 6669 enum machine_mode mode; 6670 rtx *where; 6671 6672 /* Determine the mode to reload in. 6673 This is very tricky because we have three to choose from. 6674 There is the mode the insn operand wants (reload_inmode[J]). 6675 There is the mode of the reload register RELOADREG. 6676 There is the intrinsic mode of the operand, which we could find 6677 by stripping some SUBREGs. 6678 It turns out that RELOADREG's mode is irrelevant: 6679 we can change that arbitrarily. 6680 6681 Consider (SUBREG:SI foo:QI) as an operand that must be SImode; 6682 then the reload reg may not support QImode moves, so use SImode. 6683 If foo is in memory due to spilling a pseudo reg, this is safe, 6684 because the QImode value is in the least significant part of a 6685 slot big enough for a SImode. If foo is some other sort of 6686 memory reference, then it is impossible to reload this case, 6687 so previous passes had better make sure this never happens. 6688 6689 Then consider a one-word union which has SImode and one of its 6690 members is a float, being fetched as (SUBREG:SF union:SI). 6691 We must fetch that as SFmode because we could be loading into 6692 a float-only register. In this case OLD's mode is correct. 6693 6694 Consider an immediate integer: it has VOIDmode. Here we need 6695 to get a mode from something else. 6696 6697 In some cases, there is a fourth mode, the operand's 6698 containing mode. If the insn specifies a containing mode for 6699 this operand, it overrides all others. 6700 6701 I am not sure whether the algorithm here is always right, 6702 but it does the right things in those cases. */ 6703 6704 mode = GET_MODE (old); 6705 if (mode == VOIDmode) 6706 mode = reload_inmode[j]; 6707 6708#ifdef SECONDARY_INPUT_RELOAD_CLASS 6709 /* If we need a secondary register for this operation, see if 6710 the value is already in a register in that class. Don't 6711 do this if the secondary register will be used as a scratch 6712 register. */ 6713 6714 if (reload_secondary_in_reload[j] >= 0 6715 && reload_secondary_in_icode[j] == CODE_FOR_nothing 6716 && optimize) 6717 oldequiv 6718 = find_equiv_reg (old, insn, 6719 reload_reg_class[reload_secondary_in_reload[j]], 6720 -1, NULL_PTR, 0, mode); 6721#endif 6722 6723 /* If reloading from memory, see if there is a register 6724 that already holds the same value. If so, reload from there. 6725 We can pass 0 as the reload_reg_p argument because 6726 any other reload has either already been emitted, 6727 in which case find_equiv_reg will see the reload-insn, 6728 or has yet to be emitted, in which case it doesn't matter 6729 because we will use this equiv reg right away. */ 6730 6731 if (oldequiv == 0 && optimize 6732 && (GET_CODE (old) == MEM 6733 || (GET_CODE (old) == REG 6734 && REGNO (old) >= FIRST_PSEUDO_REGISTER 6735 && reg_renumber[REGNO (old)] < 0))) 6736 oldequiv = find_equiv_reg (old, insn, ALL_REGS, 6737 -1, NULL_PTR, 0, mode); 6738 6739 if (oldequiv) 6740 { 6741 int regno = true_regnum (oldequiv); 6742 6743 /* Don't use OLDEQUIV if any other reload changes it at an 6744 earlier stage of this insn or at this stage. */ 6745 if (! free_for_value_p (regno, reload_mode[j], 6746 reload_opnum[j], 6747 reload_when_needed[j], 6748 reload_in[j], const0_rtx, j, 0)) 6749 oldequiv = 0; 6750 6751 /* If it is no cheaper to copy from OLDEQUIV into the 6752 reload register than it would be to move from memory, 6753 don't use it. Likewise, if we need a secondary register 6754 or memory. */ 6755 6756 if (oldequiv != 0 6757 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j] 6758 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno), 6759 reload_reg_class[j]) 6760 >= MEMORY_MOVE_COST (mode, reload_reg_class[j], 1))) 6761#ifdef SECONDARY_INPUT_RELOAD_CLASS 6762 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j], 6763 mode, oldequiv) 6764 != NO_REGS) 6765#endif 6766#ifdef SECONDARY_MEMORY_NEEDED 6767 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno), 6768 reload_reg_class[j], 6769 mode) 6770#endif 6771 )) 6772 oldequiv = 0; 6773 } 6774 6775 /* delete_output_reload is only invoked properly if old contains 6776 the original pseudo register. Since this is replaced with a 6777 hard reg when RELOAD_OVERRIDE_IN is set, see if we can 6778 find the pseudo in RELOAD_IN_REG. */ 6779 if (oldequiv == 0 6780 && reload_override_in[j] 6781 && GET_CODE (reload_in_reg[j]) == REG) 6782 { 6783 oldequiv = old; 6784 old = reload_in_reg[j]; 6785 } 6786 if (oldequiv == 0) 6787 oldequiv = old; 6788 else if (GET_CODE (oldequiv) == REG) 6789 oldequiv_reg = oldequiv; 6790 else if (GET_CODE (oldequiv) == SUBREG) 6791 oldequiv_reg = SUBREG_REG (oldequiv); 6792 6793 /* If we are reloading from a register that was recently stored in 6794 with an output-reload, see if we can prove there was 6795 actually no need to store the old value in it. */ 6796 6797 if (optimize && GET_CODE (oldequiv) == REG 6798 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER 6799 && spill_reg_store[REGNO (oldequiv)] 6800 && GET_CODE (old) == REG 6801 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)]) 6802 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)], 6803 reload_out_reg[j]))) 6804 delete_output_reload (insn, j, REGNO (oldequiv)); 6805 6806 /* Encapsulate both RELOADREG and OLDEQUIV into that mode, 6807 then load RELOADREG from OLDEQUIV. Note that we cannot use 6808 gen_lowpart_common since it can do the wrong thing when 6809 RELOADREG has a multi-word mode. Note that RELOADREG 6810 must always be a REG here. */ 6811 6812 if (GET_MODE (reloadreg) != mode) 6813 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg)); 6814 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode) 6815 oldequiv = SUBREG_REG (oldequiv); 6816 if (GET_MODE (oldequiv) != VOIDmode 6817 && mode != GET_MODE (oldequiv)) 6818 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0); 6819 6820 /* Switch to the right place to emit the reload insns. */ 6821 switch (reload_when_needed[j]) 6822 { 6823 case RELOAD_OTHER: 6824 where = &other_input_reload_insns; 6825 break; 6826 case RELOAD_FOR_INPUT: 6827 where = &input_reload_insns[reload_opnum[j]]; 6828 break; 6829 case RELOAD_FOR_INPUT_ADDRESS: 6830 where = &input_address_reload_insns[reload_opnum[j]]; 6831 break; 6832 case RELOAD_FOR_INPADDR_ADDRESS: 6833 where = &inpaddr_address_reload_insns[reload_opnum[j]]; 6834 break; 6835 case RELOAD_FOR_OUTPUT_ADDRESS: 6836 where = &output_address_reload_insns[reload_opnum[j]]; 6837 break; 6838 case RELOAD_FOR_OUTADDR_ADDRESS: 6839 where = &outaddr_address_reload_insns[reload_opnum[j]]; 6840 break; 6841 case RELOAD_FOR_OPERAND_ADDRESS: 6842 where = &operand_reload_insns; 6843 break; 6844 case RELOAD_FOR_OPADDR_ADDR: 6845 where = &other_operand_reload_insns; 6846 break; 6847 case RELOAD_FOR_OTHER_ADDRESS: 6848 where = &other_input_address_reload_insns; 6849 break; 6850 default: 6851 abort (); 6852 } 6853 6854 push_to_sequence (*where); 6855 special = 0; 6856 6857 /* Auto-increment addresses must be reloaded in a special way. */ 6858 if (reload_out[j] && ! reload_out_reg[j]) 6859 { 6860 /* We are not going to bother supporting the case where a 6861 incremented register can't be copied directly from 6862 OLDEQUIV since this seems highly unlikely. */ 6863 if (reload_secondary_in_reload[j] >= 0) 6864 abort (); 6865 6866 if (reload_inherited[j]) 6867 oldequiv = reloadreg; 6868 6869 old = XEXP (reload_in_reg[j], 0); 6870 6871 if (optimize && GET_CODE (oldequiv) == REG 6872 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER 6873 && spill_reg_store[REGNO (oldequiv)] 6874 && GET_CODE (old) == REG 6875 && (dead_or_set_p (insn, 6876 spill_reg_stored_to[REGNO (oldequiv)]) 6877 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)], 6878 old))) 6879 delete_output_reload (insn, j, REGNO (oldequiv)); 6880 6881 /* Prevent normal processing of this reload. */ 6882 special = 1; 6883 /* Output a special code sequence for this case. */ 6884 new_spill_reg_store[REGNO (reloadreg)] 6885 = inc_for_reload (reloadreg, oldequiv, reload_out[j], 6886 reload_inc[j]); 6887 } 6888 6889 /* If we are reloading a pseudo-register that was set by the previous 6890 insn, see if we can get rid of that pseudo-register entirely 6891 by redirecting the previous insn into our reload register. */ 6892 6893 else if (optimize && GET_CODE (old) == REG 6894 && REGNO (old) >= FIRST_PSEUDO_REGISTER 6895 && dead_or_set_p (insn, old) 6896 /* This is unsafe if some other reload 6897 uses the same reg first. */ 6898 && ! conflicts_with_override (reloadreg) 6899 && free_for_value_p (REGNO (reloadreg), reload_mode[j], 6900 reload_opnum[j], reload_when_needed[j], 6901 old, reload_out[j], j, 0)) 6902 { 6903 rtx temp = PREV_INSN (insn); 6904 while (temp && GET_CODE (temp) == NOTE) 6905 temp = PREV_INSN (temp); 6906 if (temp 6907 && GET_CODE (temp) == INSN 6908 && GET_CODE (PATTERN (temp)) == SET 6909 && SET_DEST (PATTERN (temp)) == old 6910 /* Make sure we can access insn_operand_constraint. */ 6911 && asm_noperands (PATTERN (temp)) < 0 6912 /* This is unsafe if prev insn rejects our reload reg. */ 6913 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0], 6914 reloadreg) 6915 /* This is unsafe if operand occurs more than once in current 6916 insn. Perhaps some occurrences aren't reloaded. */ 6917 && count_occurrences (PATTERN (insn), old) == 1 6918 /* Don't risk splitting a matching pair of operands. */ 6919 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp)))) 6920 { 6921 /* Store into the reload register instead of the pseudo. */ 6922 SET_DEST (PATTERN (temp)) = reloadreg; 6923 6924 /* If the previous insn is an output reload, the source is 6925 a reload register, and its spill_reg_store entry will 6926 contain the previous destination. This is now 6927 invalid. */ 6928 if (GET_CODE (SET_SRC (PATTERN (temp))) == REG 6929 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER) 6930 { 6931 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0; 6932 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0; 6933 } 6934 6935 /* If these are the only uses of the pseudo reg, 6936 pretend for GDB it lives in the reload reg we used. */ 6937 if (REG_N_DEATHS (REGNO (old)) == 1 6938 && REG_N_SETS (REGNO (old)) == 1) 6939 { 6940 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]); 6941 alter_reg (REGNO (old), -1); 6942 } 6943 special = 1; 6944 } 6945 } 6946 6947 /* We can't do that, so output an insn to load RELOADREG. */ 6948 6949 if (! special) 6950 { 6951#ifdef SECONDARY_INPUT_RELOAD_CLASS 6952 rtx second_reload_reg = 0; 6953 enum insn_code icode; 6954 6955 /* If we have a secondary reload, pick up the secondary register 6956 and icode, if any. If OLDEQUIV and OLD are different or 6957 if this is an in-out reload, recompute whether or not we 6958 still need a secondary register and what the icode should 6959 be. If we still need a secondary register and the class or 6960 icode is different, go back to reloading from OLD if using 6961 OLDEQUIV means that we got the wrong type of register. We 6962 cannot have different class or icode due to an in-out reload 6963 because we don't make such reloads when both the input and 6964 output need secondary reload registers. */ 6965 6966 if (reload_secondary_in_reload[j] >= 0) 6967 { 6968 int secondary_reload = reload_secondary_in_reload[j]; 6969 rtx real_oldequiv = oldequiv; 6970 rtx real_old = old; 6971 rtx tmp; 6972 6973 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM 6974 and similarly for OLD. 6975 See comments in get_secondary_reload in reload.c. */ 6976 /* If it is a pseudo that cannot be replaced with its 6977 equivalent MEM, we must fall back to reload_in, which 6978 will have all the necessary substitutions registered. 6979 Likewise for a pseudo that can't be replaced with its 6980 equivalent constant. 6981 6982 Take extra care for subregs of such pseudos. Note that 6983 we cannot use reg_equiv_mem in this case because it is 6984 not in the right mode. */ 6985 6986 tmp = oldequiv; 6987 if (GET_CODE (tmp) == SUBREG) 6988 tmp = SUBREG_REG (tmp); 6989 if (GET_CODE (tmp) == REG 6990 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER 6991 && (reg_equiv_memory_loc[REGNO (tmp)] != 0 6992 || reg_equiv_constant[REGNO (tmp)] != 0)) 6993 { 6994 if (! reg_equiv_mem[REGNO (tmp)] 6995 || num_not_at_initial_offset 6996 || GET_CODE (oldequiv) == SUBREG) 6997 real_oldequiv = reload_in[j]; 6998 else 6999 real_oldequiv = reg_equiv_mem[REGNO (tmp)]; 7000 } 7001 7002 tmp = old; 7003 if (GET_CODE (tmp) == SUBREG) 7004 tmp = SUBREG_REG (tmp); 7005 if (GET_CODE (tmp) == REG 7006 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER 7007 && (reg_equiv_memory_loc[REGNO (tmp)] != 0 7008 || reg_equiv_constant[REGNO (tmp)] != 0)) 7009 { 7010 if (! reg_equiv_mem[REGNO (tmp)] 7011 || num_not_at_initial_offset 7012 || GET_CODE (old) == SUBREG) 7013 real_old = reload_in[j]; 7014 else 7015 real_old = reg_equiv_mem[REGNO (tmp)]; 7016 } 7017 7018 second_reload_reg = reload_reg_rtx[secondary_reload]; 7019 icode = reload_secondary_in_icode[j]; 7020 7021 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv)) 7022 || (reload_in[j] != 0 && reload_out[j] != 0)) 7023 { 7024 enum reg_class new_class 7025 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j], 7026 mode, real_oldequiv); 7027 7028 if (new_class == NO_REGS) 7029 second_reload_reg = 0; 7030 else 7031 { 7032 enum insn_code new_icode; 7033 enum machine_mode new_mode; 7034 7035 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], 7036 REGNO (second_reload_reg))) 7037 oldequiv = old, real_oldequiv = real_old; 7038 else 7039 { 7040 new_icode = reload_in_optab[(int) mode]; 7041 if (new_icode != CODE_FOR_nothing 7042 && ((insn_operand_predicate[(int) new_icode][0] 7043 && ! ((*insn_operand_predicate[(int) new_icode][0]) 7044 (reloadreg, mode))) 7045 || (insn_operand_predicate[(int) new_icode][1] 7046 && ! ((*insn_operand_predicate[(int) new_icode][1]) 7047 (real_oldequiv, mode))))) 7048 new_icode = CODE_FOR_nothing; 7049 7050 if (new_icode == CODE_FOR_nothing) 7051 new_mode = mode; 7052 else 7053 new_mode = insn_operand_mode[(int) new_icode][2]; 7054 7055 if (GET_MODE (second_reload_reg) != new_mode) 7056 { 7057 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg), 7058 new_mode)) 7059 oldequiv = old, real_oldequiv = real_old; 7060 else 7061 second_reload_reg 7062 = gen_rtx_REG (new_mode, 7063 REGNO (second_reload_reg)); 7064 } 7065 } 7066 } 7067 } 7068 7069 /* If we still need a secondary reload register, check 7070 to see if it is being used as a scratch or intermediate 7071 register and generate code appropriately. If we need 7072 a scratch register, use REAL_OLDEQUIV since the form of 7073 the insn may depend on the actual address if it is 7074 a MEM. */ 7075 7076 if (second_reload_reg) 7077 { 7078 if (icode != CODE_FOR_nothing) 7079 { 7080 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv, 7081 second_reload_reg)); 7082 special = 1; 7083 } 7084 else 7085 { 7086 /* See if we need a scratch register to load the 7087 intermediate register (a tertiary reload). */ 7088 enum insn_code tertiary_icode 7089 = reload_secondary_in_icode[secondary_reload]; 7090 7091 if (tertiary_icode != CODE_FOR_nothing) 7092 { 7093 rtx third_reload_reg 7094 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]]; 7095 7096 emit_insn ((GEN_FCN (tertiary_icode) 7097 (second_reload_reg, real_oldequiv, 7098 third_reload_reg))); 7099 } 7100 else 7101 gen_reload (second_reload_reg, real_oldequiv, 7102 reload_opnum[j], 7103 reload_when_needed[j]); 7104 7105 oldequiv = second_reload_reg; 7106 } 7107 } 7108 } 7109#endif 7110 7111 if (! special && ! rtx_equal_p (reloadreg, oldequiv)) 7112 { 7113 rtx real_oldequiv = oldequiv; 7114 7115 if ((GET_CODE (oldequiv) == REG 7116 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER 7117 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0 7118 || reg_equiv_constant[REGNO (oldequiv)] != 0)) 7119 || (GET_CODE (oldequiv) == SUBREG 7120 && GET_CODE (SUBREG_REG (oldequiv)) == REG 7121 && (REGNO (SUBREG_REG (oldequiv)) 7122 >= FIRST_PSEUDO_REGISTER) 7123 && ((reg_equiv_memory_loc 7124 [REGNO (SUBREG_REG (oldequiv))] != 0) 7125 || (reg_equiv_constant 7126 [REGNO (SUBREG_REG (oldequiv))] != 0)))) 7127 real_oldequiv = reload_in[j]; 7128 gen_reload (reloadreg, real_oldequiv, reload_opnum[j], 7129 reload_when_needed[j]); 7130 } 7131 7132 } 7133 7134 this_reload_insn = get_last_insn (); 7135 /* End this sequence. */ 7136 *where = get_insns (); 7137 end_sequence (); 7138 7139 /* Update reload_override_in so that delete_address_reloads_1 7140 can see the actual register usage. */ 7141 if (oldequiv_reg) 7142 reload_override_in[j] = oldequiv; 7143 } 7144 7145 /* When inheriting a wider reload, we have a MEM in reload_in[j], 7146 e.g. inheriting a SImode output reload for 7147 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */ 7148 if (optimize && reload_inherited[j] && reload_in[j] 7149 && GET_CODE (reload_in[j]) == MEM 7150 && GET_CODE (reload_in_reg[j]) == MEM 7151 && reload_spill_index[j] >= 0 7152 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j])) 7153 { 7154 expect_occurrences 7155 = count_occurrences (PATTERN (insn), reload_in[j]) == 1 ? 0 : -1; 7156 reload_in[j] 7157 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]]; 7158 } 7159 7160 /* If we are reloading a register that was recently stored in with an 7161 output-reload, see if we can prove there was 7162 actually no need to store the old value in it. */ 7163 7164 if (optimize 7165 && (reload_inherited[j] || reload_override_in[j]) 7166 && reload_reg_rtx[j] 7167 && GET_CODE (reload_reg_rtx[j]) == REG 7168 && spill_reg_store[REGNO (reload_reg_rtx[j])] != 0 7169#if 0 7170 /* There doesn't seem to be any reason to restrict this to pseudos 7171 and doing so loses in the case where we are copying from a 7172 register of the wrong class. */ 7173 && REGNO (spill_reg_stored_to[REGNO (reload_reg_rtx[j])]) 7174 >= FIRST_PSEUDO_REGISTER 7175#endif 7176 /* The insn might have already some references to stackslots 7177 replaced by MEMs, while reload_out_reg still names the 7178 original pseudo. */ 7179 && (dead_or_set_p (insn, 7180 spill_reg_stored_to[REGNO (reload_reg_rtx[j])]) 7181 || rtx_equal_p (spill_reg_stored_to[REGNO (reload_reg_rtx[j])], 7182 reload_out_reg[j]))) 7183 delete_output_reload (insn, j, REGNO (reload_reg_rtx[j])); 7184 7185 /* Input-reloading is done. Now do output-reloading, 7186 storing the value from the reload-register after the main insn 7187 if reload_out[j] is nonzero. 7188 7189 ??? At some point we need to support handling output reloads of 7190 JUMP_INSNs or insns that set cc0. */ 7191 7192 /* If this is an output reload that stores something that is 7193 not loaded in this same reload, see if we can eliminate a previous 7194 store. */ 7195 { 7196 rtx pseudo = reload_out_reg[j]; 7197 7198 if (pseudo 7199 && GET_CODE (pseudo) == REG 7200 && ! rtx_equal_p (reload_in_reg[j], pseudo) 7201 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER 7202 && reg_last_reload_reg[REGNO (pseudo)]) 7203 { 7204 int pseudo_no = REGNO (pseudo); 7205 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]); 7206 7207 /* We don't need to test full validity of last_regno for 7208 inherit here; we only want to know if the store actually 7209 matches the pseudo. */ 7210 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno) 7211 && reg_reloaded_contents[last_regno] == pseudo_no 7212 && spill_reg_store[last_regno] 7213 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno])) 7214 delete_output_reload (insn, j, last_regno); 7215 } 7216 } 7217 7218 old = reload_out_reg[j]; 7219 if (old != 0 7220 && reload_reg_rtx[j] != old 7221 && reload_reg_rtx[j] != 0) 7222 { 7223 register rtx reloadreg = reload_reg_rtx[j]; 7224#ifdef SECONDARY_OUTPUT_RELOAD_CLASS 7225 register rtx second_reloadreg = 0; 7226#endif 7227 rtx note, p; 7228 enum machine_mode mode; 7229 int special = 0; 7230 7231 /* An output operand that dies right away does need a reload, 7232 but need not be copied from it. Show the new location in the 7233 REG_UNUSED note. */ 7234 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH) 7235 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0) 7236 { 7237 XEXP (note, 0) = reload_reg_rtx[j]; 7238 continue; 7239 } 7240 /* Likewise for a SUBREG of an operand that dies. */ 7241 else if (GET_CODE (old) == SUBREG 7242 && GET_CODE (SUBREG_REG (old)) == REG 7243 && 0 != (note = find_reg_note (insn, REG_UNUSED, 7244 SUBREG_REG (old)))) 7245 { 7246 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), 7247 reload_reg_rtx[j]); 7248 continue; 7249 } 7250 else if (GET_CODE (old) == SCRATCH) 7251 /* If we aren't optimizing, there won't be a REG_UNUSED note, 7252 but we don't want to make an output reload. */ 7253 continue; 7254 7255#if 0 7256 /* Strip off of OLD any size-increasing SUBREGs such as 7257 (SUBREG:SI foo:QI 0). */ 7258 7259 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0 7260 && (GET_MODE_SIZE (GET_MODE (old)) 7261 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old))))) 7262 old = SUBREG_REG (old); 7263#endif 7264 7265 /* If is a JUMP_INSN, we can't support output reloads yet. */ 7266 if (GET_CODE (insn) == JUMP_INSN) 7267 abort (); 7268 7269 if (reload_when_needed[j] == RELOAD_OTHER) 7270 start_sequence (); 7271 else 7272 push_to_sequence (output_reload_insns[reload_opnum[j]]); 7273 7274 old = reload_out[j]; 7275 7276 /* Determine the mode to reload in. 7277 See comments above (for input reloading). */ 7278 7279 mode = GET_MODE (old); 7280 if (mode == VOIDmode) 7281 { 7282 /* VOIDmode should never happen for an output. */ 7283 if (asm_noperands (PATTERN (insn)) < 0) 7284 /* It's the compiler's fault. */ 7285 fatal_insn ("VOIDmode on an output", insn); 7286 error_for_asm (insn, "output operand is constant in `asm'"); 7287 /* Prevent crash--use something we know is valid. */ 7288 mode = word_mode; 7289 old = gen_rtx_REG (mode, REGNO (reloadreg)); 7290 } 7291 7292 if (GET_MODE (reloadreg) != mode) 7293 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg)); 7294 7295#ifdef SECONDARY_OUTPUT_RELOAD_CLASS 7296 7297 /* If we need two reload regs, set RELOADREG to the intermediate 7298 one, since it will be stored into OLD. We might need a secondary 7299 register only for an input reload, so check again here. */ 7300 7301 if (reload_secondary_out_reload[j] >= 0) 7302 { 7303 rtx real_old = old; 7304 7305 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER 7306 && reg_equiv_mem[REGNO (old)] != 0) 7307 real_old = reg_equiv_mem[REGNO (old)]; 7308 7309 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j], 7310 mode, real_old) 7311 != NO_REGS)) 7312 { 7313 second_reloadreg = reloadreg; 7314 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]]; 7315 7316 /* See if RELOADREG is to be used as a scratch register 7317 or as an intermediate register. */ 7318 if (reload_secondary_out_icode[j] != CODE_FOR_nothing) 7319 { 7320 emit_insn ((GEN_FCN (reload_secondary_out_icode[j]) 7321 (real_old, second_reloadreg, reloadreg))); 7322 special = 1; 7323 } 7324 else 7325 { 7326 /* See if we need both a scratch and intermediate reload 7327 register. */ 7328 7329 int secondary_reload = reload_secondary_out_reload[j]; 7330 enum insn_code tertiary_icode 7331 = reload_secondary_out_icode[secondary_reload]; 7332 7333 if (GET_MODE (reloadreg) != mode) 7334 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg)); 7335 7336 if (tertiary_icode != CODE_FOR_nothing) 7337 { 7338 rtx third_reloadreg 7339 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]]; 7340 rtx tem; 7341 7342 /* Copy primary reload reg to secondary reload reg. 7343 (Note that these have been swapped above, then 7344 secondary reload reg to OLD using our insn. */ 7345 7346 /* If REAL_OLD is a paradoxical SUBREG, remove it 7347 and try to put the opposite SUBREG on 7348 RELOADREG. */ 7349 if (GET_CODE (real_old) == SUBREG 7350 && (GET_MODE_SIZE (GET_MODE (real_old)) 7351 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old)))) 7352 && 0 != (tem = gen_lowpart_common 7353 (GET_MODE (SUBREG_REG (real_old)), 7354 reloadreg))) 7355 real_old = SUBREG_REG (real_old), reloadreg = tem; 7356 7357 gen_reload (reloadreg, second_reloadreg, 7358 reload_opnum[j], reload_when_needed[j]); 7359 emit_insn ((GEN_FCN (tertiary_icode) 7360 (real_old, reloadreg, third_reloadreg))); 7361 special = 1; 7362 } 7363 7364 else 7365 /* Copy between the reload regs here and then to 7366 OUT later. */ 7367 7368 gen_reload (reloadreg, second_reloadreg, 7369 reload_opnum[j], reload_when_needed[j]); 7370 } 7371 } 7372 } 7373#endif 7374 7375 /* Output the last reload insn. */ 7376 if (! special) 7377 { 7378 rtx set; 7379 7380 /* Don't output the last reload if OLD is not the dest of 7381 INSN and is in the src and is clobbered by INSN. */ 7382 if (! flag_expensive_optimizations 7383 || GET_CODE (old) != REG 7384 || !(set = single_set (insn)) 7385 || rtx_equal_p (old, SET_DEST (set)) 7386 || !reg_mentioned_p (old, SET_SRC (set)) 7387 || !regno_clobbered_p (REGNO (old), insn, reload_mode[j], 0)) 7388 gen_reload (old, reloadreg, reload_opnum[j], 7389 reload_when_needed[j]); 7390 } 7391 7392 /* Look at all insns we emitted, just to be safe. */ 7393 for (p = get_insns (); p; p = NEXT_INSN (p)) 7394 if (GET_RTX_CLASS (GET_CODE (p)) == 'i') 7395 { 7396 rtx pat = PATTERN (p); 7397 7398 /* If this output reload doesn't come from a spill reg, 7399 clear any memory of reloaded copies of the pseudo reg. 7400 If this output reload comes from a spill reg, 7401 reg_has_output_reload will make this do nothing. */ 7402 note_stores (pat, forget_old_reloads_1); 7403 7404 if (reg_mentioned_p (reload_reg_rtx[j], pat)) 7405 { 7406 rtx set = single_set (insn); 7407 if (reload_spill_index[j] < 0 7408 && set 7409 && SET_SRC (set) == reload_reg_rtx[j]) 7410 { 7411 int src = REGNO (SET_SRC (set)); 7412 7413 reload_spill_index[j] = src; 7414 SET_HARD_REG_BIT (reg_is_output_reload, src); 7415 if (find_regno_note (insn, REG_DEAD, src)) 7416 SET_HARD_REG_BIT (reg_reloaded_died, src); 7417 } 7418 if (REGNO (reload_reg_rtx[j]) < FIRST_PSEUDO_REGISTER) 7419 { 7420 int s = reload_secondary_out_reload[j]; 7421 set = single_set (p); 7422 /* If this reload copies only to the secondary reload 7423 register, the secondary reload does the actual 7424 store. */ 7425 if (s >= 0 && set == NULL_RTX) 7426 ; /* We can't tell what function the secondary reload 7427 has and where the actual store to the pseudo is 7428 made; leave new_spill_reg_store alone. */ 7429 else if (s >= 0 7430 && SET_SRC (set) == reload_reg_rtx[j] 7431 && SET_DEST (set) == reload_reg_rtx[s]) 7432 { 7433 /* Usually the next instruction will be the 7434 secondary reload insn; if we can confirm 7435 that it is, setting new_spill_reg_store to 7436 that insn will allow an extra optimization. */ 7437 rtx s_reg = reload_reg_rtx[s]; 7438 rtx next = NEXT_INSN (p); 7439 reload_out[s] = reload_out[j]; 7440 reload_out_reg[s] = reload_out_reg[j]; 7441 set = single_set (next); 7442 if (set && SET_SRC (set) == s_reg 7443 && ! new_spill_reg_store[REGNO (s_reg)]) 7444 { 7445 SET_HARD_REG_BIT (reg_is_output_reload, 7446 REGNO (s_reg)); 7447 new_spill_reg_store[REGNO (s_reg)] = next; 7448 } 7449 } 7450 else 7451 new_spill_reg_store[REGNO (reload_reg_rtx[j])] = p; 7452 } 7453 } 7454 } 7455 7456 if (reload_when_needed[j] == RELOAD_OTHER) 7457 { 7458 emit_insns (other_output_reload_insns[reload_opnum[j]]); 7459 other_output_reload_insns[reload_opnum[j]] = get_insns (); 7460 } 7461 else 7462 output_reload_insns[reload_opnum[j]] = get_insns (); 7463 7464 end_sequence (); 7465 } 7466 } 7467 7468 /* Now write all the insns we made for reloads in the order expected by 7469 the allocation functions. Prior to the insn being reloaded, we write 7470 the following reloads: 7471 7472 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses. 7473 7474 RELOAD_OTHER reloads. 7475 7476 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed 7477 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the 7478 RELOAD_FOR_INPUT reload for the operand. 7479 7480 RELOAD_FOR_OPADDR_ADDRS reloads. 7481 7482 RELOAD_FOR_OPERAND_ADDRESS reloads. 7483 7484 After the insn being reloaded, we write the following: 7485 7486 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed 7487 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the 7488 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output 7489 reloads for the operand. The RELOAD_OTHER output reloads are 7490 output in descending order by reload number. */ 7491 7492 emit_insns_before (other_input_address_reload_insns, insn); 7493 emit_insns_before (other_input_reload_insns, insn); 7494 7495 for (j = 0; j < reload_n_operands; j++) 7496 { 7497 emit_insns_before (inpaddr_address_reload_insns[j], insn); 7498 emit_insns_before (input_address_reload_insns[j], insn); 7499 emit_insns_before (input_reload_insns[j], insn); 7500 } 7501 7502 emit_insns_before (other_operand_reload_insns, insn); 7503 emit_insns_before (operand_reload_insns, insn); 7504 7505 for (j = 0; j < reload_n_operands; j++) 7506 { 7507 emit_insns_before (outaddr_address_reload_insns[j], following_insn); 7508 emit_insns_before (output_address_reload_insns[j], following_insn); 7509 emit_insns_before (output_reload_insns[j], following_insn); 7510 emit_insns_before (other_output_reload_insns[j], following_insn); 7511 } 7512 7513 /* Keep basic block info up to date. */ 7514 if (n_basic_blocks) 7515 { 7516 if (BLOCK_HEAD (chain->block) == insn) 7517 BLOCK_HEAD (chain->block) = NEXT_INSN (before_insn); 7518 if (BLOCK_END (chain->block) == insn) 7519 BLOCK_END (chain->block) = PREV_INSN (following_insn); 7520 } 7521 7522 /* For all the spill regs newly reloaded in this instruction, 7523 record what they were reloaded from, so subsequent instructions 7524 can inherit the reloads. 7525 7526 Update spill_reg_store for the reloads of this insn. 7527 Copy the elements that were updated in the loop above. */ 7528 7529 for (j = 0; j < n_reloads; j++) 7530 { 7531 register int r = reload_order[j]; 7532 register int i = reload_spill_index[r]; 7533 7534 /* If this is a non-inherited input reload from a pseudo, we must 7535 clear any memory of a previous store to the same pseudo. Only do 7536 something if there will not be an output reload for the pseudo 7537 being reloaded. */ 7538 if (reload_in_reg[r] != 0 7539 && ! (reload_inherited[r] || reload_override_in[r])) 7540 { 7541 rtx reg = reload_in_reg[r]; 7542 7543 if (GET_CODE (reg) == SUBREG) 7544 reg = SUBREG_REG (reg); 7545 7546 if (GET_CODE (reg) == REG 7547 && REGNO (reg) >= FIRST_PSEUDO_REGISTER 7548 && ! reg_has_output_reload[REGNO (reg)]) 7549 { 7550 int nregno = REGNO (reg); 7551 7552 if (reg_last_reload_reg[nregno]) 7553 { 7554 int last_regno = REGNO (reg_last_reload_reg[nregno]); 7555 7556 if (reg_reloaded_contents[last_regno] == nregno) 7557 spill_reg_store[last_regno] = 0; 7558 } 7559 } 7560 } 7561 7562 /* I is nonneg if this reload used a register. 7563 If reload_reg_rtx[r] is 0, this is an optional reload 7564 that we opted to ignore. */ 7565 7566 if (i >= 0 && reload_reg_rtx[r] != 0) 7567 { 7568 int nr 7569 = HARD_REGNO_NREGS (i, GET_MODE (reload_reg_rtx[r])); 7570 int k; 7571 int part_reaches_end = 0; 7572 int all_reaches_end = 1; 7573 7574 /* For a multi register reload, we need to check if all or part 7575 of the value lives to the end. */ 7576 for (k = 0; k < nr; k++) 7577 { 7578 if (reload_reg_reaches_end_p (i + k, reload_opnum[r], 7579 reload_when_needed[r])) 7580 part_reaches_end = 1; 7581 else 7582 all_reaches_end = 0; 7583 } 7584 7585 /* Ignore reloads that don't reach the end of the insn in 7586 entirety. */ 7587 if (all_reaches_end) 7588 { 7589 /* First, clear out memory of what used to be in this spill reg. 7590 If consecutive registers are used, clear them all. */ 7591 7592 for (k = 0; k < nr; k++) 7593 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k); 7594 7595 /* Maybe the spill reg contains a copy of reload_out. */ 7596 if (reload_out[r] != 0 7597 && (GET_CODE (reload_out[r]) == REG 7598#ifdef AUTO_INC_DEC 7599 || ! reload_out_reg[r] 7600#endif 7601 || GET_CODE (reload_out_reg[r]) == REG)) 7602 { 7603 rtx out = (GET_CODE (reload_out[r]) == REG 7604 ? reload_out[r] 7605 : reload_out_reg[r] 7606 ? reload_out_reg[r] 7607/* AUTO_INC */ : XEXP (reload_in_reg[r], 0)); 7608 register int nregno = REGNO (out); 7609 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1 7610 : HARD_REGNO_NREGS (nregno, 7611 GET_MODE (reload_reg_rtx[r]))); 7612 7613 spill_reg_store[i] = new_spill_reg_store[i]; 7614 spill_reg_stored_to[i] = out; 7615 reg_last_reload_reg[nregno] = reload_reg_rtx[r]; 7616 7617 /* If NREGNO is a hard register, it may occupy more than 7618 one register. If it does, say what is in the 7619 rest of the registers assuming that both registers 7620 agree on how many words the object takes. If not, 7621 invalidate the subsequent registers. */ 7622 7623 if (nregno < FIRST_PSEUDO_REGISTER) 7624 for (k = 1; k < nnr; k++) 7625 reg_last_reload_reg[nregno + k] 7626 = (nr == nnr 7627 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k], 7628 REGNO (reload_reg_rtx[r]) + k) 7629 : 0); 7630 7631 /* Now do the inverse operation. */ 7632 for (k = 0; k < nr; k++) 7633 { 7634 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k); 7635 reg_reloaded_contents[i + k] 7636 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr 7637 ? nregno 7638 : nregno + k); 7639 reg_reloaded_insn[i + k] = insn; 7640 SET_HARD_REG_BIT (reg_reloaded_valid, i + k); 7641 } 7642 } 7643 7644 /* Maybe the spill reg contains a copy of reload_in. Only do 7645 something if there will not be an output reload for 7646 the register being reloaded. */ 7647 else if (reload_out_reg[r] == 0 7648 && reload_in[r] != 0 7649 && ((GET_CODE (reload_in[r]) == REG 7650 && REGNO (reload_in[r]) >= FIRST_PSEUDO_REGISTER 7651 && ! reg_has_output_reload[REGNO (reload_in[r])]) 7652 || (GET_CODE (reload_in_reg[r]) == REG 7653 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])) 7654 && ! reg_set_p (reload_reg_rtx[r], PATTERN (insn))) 7655 { 7656 register int nregno; 7657 int nnr; 7658 7659 if (GET_CODE (reload_in[r]) == REG 7660 && REGNO (reload_in[r]) >= FIRST_PSEUDO_REGISTER) 7661 nregno = REGNO (reload_in[r]); 7662 else if (GET_CODE (reload_in_reg[r]) == REG) 7663 nregno = REGNO (reload_in_reg[r]); 7664 else 7665 nregno = REGNO (XEXP (reload_in_reg[r], 0)); 7666 7667 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1 7668 : HARD_REGNO_NREGS (nregno, 7669 GET_MODE (reload_reg_rtx[r]))); 7670 7671 reg_last_reload_reg[nregno] = reload_reg_rtx[r]; 7672 7673 if (nregno < FIRST_PSEUDO_REGISTER) 7674 for (k = 1; k < nnr; k++) 7675 reg_last_reload_reg[nregno + k] 7676 = (nr == nnr 7677 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k], 7678 REGNO (reload_reg_rtx[r]) + k) 7679 : 0); 7680 7681 /* Unless we inherited this reload, show we haven't 7682 recently done a store. 7683 Previous stores of inherited auto_inc expressions 7684 also have to be discarded. */ 7685 if (! reload_inherited[r] 7686 || (reload_out[r] && ! reload_out_reg[r])) 7687 spill_reg_store[i] = 0; 7688 7689 for (k = 0; k < nr; k++) 7690 { 7691 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k); 7692 reg_reloaded_contents[i + k] 7693 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr 7694 ? nregno 7695 : nregno + k); 7696 reg_reloaded_insn[i + k] = insn; 7697 SET_HARD_REG_BIT (reg_reloaded_valid, i + k); 7698 } 7699 } 7700 } 7701 7702 /* However, if part of the reload reaches the end, then we must 7703 invalidate the old info for the part that survives to the end. */ 7704 else if (part_reaches_end) 7705 { 7706 for (k = 0; k < nr; k++) 7707 if (reload_reg_reaches_end_p (i + k, 7708 reload_opnum[r], 7709 reload_when_needed[r])) 7710 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k); 7711 } 7712 } 7713 7714 /* The following if-statement was #if 0'd in 1.34 (or before...). 7715 It's reenabled in 1.35 because supposedly nothing else 7716 deals with this problem. */ 7717 7718 /* If a register gets output-reloaded from a non-spill register, 7719 that invalidates any previous reloaded copy of it. 7720 But forget_old_reloads_1 won't get to see it, because 7721 it thinks only about the original insn. So invalidate it here. */ 7722 if (i < 0 && reload_out[r] != 0 7723 && (GET_CODE (reload_out[r]) == REG 7724 || (GET_CODE (reload_out[r]) == MEM 7725 && GET_CODE (reload_out_reg[r]) == REG))) 7726 { 7727 rtx out = (GET_CODE (reload_out[r]) == REG 7728 ? reload_out[r] : reload_out_reg[r]); 7729 register int nregno = REGNO (out); 7730 if (nregno >= FIRST_PSEUDO_REGISTER) 7731 { 7732 rtx src_reg, store_insn; 7733 7734 reg_last_reload_reg[nregno] = 0; 7735 7736 /* If we can find a hard register that is stored, record 7737 the storing insn so that we may delete this insn with 7738 delete_output_reload. */ 7739 src_reg = reload_reg_rtx[r]; 7740 7741 /* If this is an optional reload, try to find the source reg 7742 from an input reload. */ 7743 if (! src_reg) 7744 { 7745 rtx set = single_set (insn); 7746 if (set && SET_DEST (set) == reload_out[r]) 7747 { 7748 int k; 7749 7750 src_reg = SET_SRC (set); 7751 store_insn = insn; 7752 for (k = 0; k < n_reloads; k++) 7753 { 7754 if (reload_in[k] == src_reg) 7755 { 7756 src_reg = reload_reg_rtx[k]; 7757 break; 7758 } 7759 } 7760 } 7761 } 7762 else 7763 store_insn = new_spill_reg_store[REGNO (src_reg)]; 7764 if (src_reg && GET_CODE (src_reg) == REG 7765 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER) 7766 { 7767 int src_regno = REGNO (src_reg); 7768 int nr = HARD_REGNO_NREGS (src_regno, reload_mode[r]); 7769 /* The place where to find a death note varies with 7770 PRESERVE_DEATH_INFO_REGNO_P . The condition is not 7771 necessarily checked exactly in the code that moves 7772 notes, so just check both locations. */ 7773 rtx note = find_regno_note (insn, REG_DEAD, src_regno); 7774 if (! note) 7775 note = find_regno_note (store_insn, REG_DEAD, src_regno); 7776 while (nr-- > 0) 7777 { 7778 spill_reg_store[src_regno + nr] = store_insn; 7779 spill_reg_stored_to[src_regno + nr] = out; 7780 reg_reloaded_contents[src_regno + nr] = nregno; 7781 reg_reloaded_insn[src_regno + nr] = store_insn; 7782 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr); 7783 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr); 7784 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr); 7785 if (note) 7786 SET_HARD_REG_BIT (reg_reloaded_died, src_regno); 7787 else 7788 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno); 7789 } 7790 reg_last_reload_reg[nregno] = src_reg; 7791 } 7792 } 7793 else 7794 { 7795 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r])); 7796 7797 while (num_regs-- > 0) 7798 reg_last_reload_reg[nregno + num_regs] = 0; 7799 } 7800 } 7801 } 7802 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died); 7803} 7804 7805/* Emit code to perform a reload from IN (which may be a reload register) to 7806 OUT (which may also be a reload register). IN or OUT is from operand 7807 OPNUM with reload type TYPE. 7808 7809 Returns first insn emitted. */ 7810 7811rtx 7812gen_reload (out, in, opnum, type) 7813 rtx out; 7814 rtx in; 7815 int opnum; 7816 enum reload_type type; 7817{ 7818 rtx last = get_last_insn (); 7819 rtx tem; 7820 7821 /* If IN is a paradoxical SUBREG, remove it and try to put the 7822 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */ 7823 if (GET_CODE (in) == SUBREG 7824 && (GET_MODE_SIZE (GET_MODE (in)) 7825 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))) 7826 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0) 7827 in = SUBREG_REG (in), out = tem; 7828 else if (GET_CODE (out) == SUBREG 7829 && (GET_MODE_SIZE (GET_MODE (out)) 7830 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))) 7831 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0) 7832 out = SUBREG_REG (out), in = tem; 7833 7834 /* How to do this reload can get quite tricky. Normally, we are being 7835 asked to reload a simple operand, such as a MEM, a constant, or a pseudo 7836 register that didn't get a hard register. In that case we can just 7837 call emit_move_insn. 7838 7839 We can also be asked to reload a PLUS that adds a register or a MEM to 7840 another register, constant or MEM. This can occur during frame pointer 7841 elimination and while reloading addresses. This case is handled by 7842 trying to emit a single insn to perform the add. If it is not valid, 7843 we use a two insn sequence. 7844 7845 Finally, we could be called to handle an 'o' constraint by putting 7846 an address into a register. In that case, we first try to do this 7847 with a named pattern of "reload_load_address". If no such pattern 7848 exists, we just emit a SET insn and hope for the best (it will normally 7849 be valid on machines that use 'o'). 7850 7851 This entire process is made complex because reload will never 7852 process the insns we generate here and so we must ensure that 7853 they will fit their constraints and also by the fact that parts of 7854 IN might be being reloaded separately and replaced with spill registers. 7855 Because of this, we are, in some sense, just guessing the right approach 7856 here. The one listed above seems to work. 7857 7858 ??? At some point, this whole thing needs to be rethought. */ 7859 7860 if (GET_CODE (in) == PLUS 7861 && (GET_CODE (XEXP (in, 0)) == REG 7862 || GET_CODE (XEXP (in, 0)) == SUBREG 7863 || GET_CODE (XEXP (in, 0)) == MEM) 7864 && (GET_CODE (XEXP (in, 1)) == REG 7865 || GET_CODE (XEXP (in, 1)) == SUBREG 7866 || CONSTANT_P (XEXP (in, 1)) 7867 || GET_CODE (XEXP (in, 1)) == MEM)) 7868 { 7869 /* We need to compute the sum of a register or a MEM and another 7870 register, constant, or MEM, and put it into the reload 7871 register. The best possible way of doing this is if the machine 7872 has a three-operand ADD insn that accepts the required operands. 7873 7874 The simplest approach is to try to generate such an insn and see if it 7875 is recognized and matches its constraints. If so, it can be used. 7876 7877 It might be better not to actually emit the insn unless it is valid, 7878 but we need to pass the insn as an operand to `recog' and 7879 `extract_insn' and it is simpler to emit and then delete the insn if 7880 not valid than to dummy things up. */ 7881 7882 rtx op0, op1, tem, insn; 7883 int code; 7884 7885 op0 = find_replacement (&XEXP (in, 0)); 7886 op1 = find_replacement (&XEXP (in, 1)); 7887 7888 /* Since constraint checking is strict, commutativity won't be 7889 checked, so we need to do that here to avoid spurious failure 7890 if the add instruction is two-address and the second operand 7891 of the add is the same as the reload reg, which is frequently 7892 the case. If the insn would be A = B + A, rearrange it so 7893 it will be A = A + B as constrain_operands expects. */ 7894 7895 if (GET_CODE (XEXP (in, 1)) == REG 7896 && REGNO (out) == REGNO (XEXP (in, 1))) 7897 tem = op0, op0 = op1, op1 = tem; 7898 7899 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1)) 7900 in = gen_rtx_PLUS (GET_MODE (in), op0, op1); 7901 7902 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in)); 7903 code = recog_memoized (insn); 7904 7905 if (code >= 0) 7906 { 7907 extract_insn (insn); 7908 /* We want constrain operands to treat this insn strictly in 7909 its validity determination, i.e., the way it would after reload 7910 has completed. */ 7911 if (constrain_operands (1)) 7912 return insn; 7913 } 7914 7915 delete_insns_since (last); 7916 7917 /* If that failed, we must use a conservative two-insn sequence. 7918 use move to copy constant, MEM, or pseudo register to the reload 7919 register since "move" will be able to handle an arbitrary operand, 7920 unlike add which can't, in general. Then add the registers. 7921 7922 If there is another way to do this for a specific machine, a 7923 DEFINE_PEEPHOLE should be specified that recognizes the sequence 7924 we emit below. */ 7925 7926 code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code; 7927 7928 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG 7929 || (GET_CODE (op1) == REG 7930 && REGNO (op1) >= FIRST_PSEUDO_REGISTER) 7931 || (code != CODE_FOR_nothing 7932 && ! (*insn_operand_predicate[code][2]) (op1, insn_operand_mode[code][2]))) 7933 tem = op0, op0 = op1, op1 = tem; 7934 7935 gen_reload (out, op0, opnum, type); 7936 7937 /* If OP0 and OP1 are the same, we can use OUT for OP1. 7938 This fixes a problem on the 32K where the stack pointer cannot 7939 be used as an operand of an add insn. */ 7940 7941 if (rtx_equal_p (op0, op1)) 7942 op1 = out; 7943 7944 insn = emit_insn (gen_add2_insn (out, op1)); 7945 7946 /* If that failed, copy the address register to the reload register. 7947 Then add the constant to the reload register. */ 7948 7949 code = recog_memoized (insn); 7950 7951 if (code >= 0) 7952 { 7953 extract_insn (insn); 7954 /* We want constrain operands to treat this insn strictly in 7955 its validity determination, i.e., the way it would after reload 7956 has completed. */ 7957 if (constrain_operands (1)) 7958 { 7959 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */ 7960 REG_NOTES (insn) 7961 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn)); 7962 return insn; 7963 } 7964 } 7965 7966 delete_insns_since (last); 7967 7968 gen_reload (out, op1, opnum, type); 7969 insn = emit_insn (gen_add2_insn (out, op0)); 7970 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn)); 7971 } 7972 7973#ifdef SECONDARY_MEMORY_NEEDED 7974 /* If we need a memory location to do the move, do it that way. */ 7975 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER 7976 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER 7977 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)), 7978 REGNO_REG_CLASS (REGNO (out)), 7979 GET_MODE (out))) 7980 { 7981 /* Get the memory to use and rewrite both registers to its mode. */ 7982 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type); 7983 7984 if (GET_MODE (loc) != GET_MODE (out)) 7985 out = gen_rtx_REG (GET_MODE (loc), REGNO (out)); 7986 7987 if (GET_MODE (loc) != GET_MODE (in)) 7988 in = gen_rtx_REG (GET_MODE (loc), REGNO (in)); 7989 7990 gen_reload (loc, in, opnum, type); 7991 gen_reload (out, loc, opnum, type); 7992 } 7993#endif 7994 7995 /* If IN is a simple operand, use gen_move_insn. */ 7996 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG) 7997 emit_insn (gen_move_insn (out, in)); 7998 7999#ifdef HAVE_reload_load_address 8000 else if (HAVE_reload_load_address) 8001 emit_insn (gen_reload_load_address (out, in)); 8002#endif 8003 8004 /* Otherwise, just write (set OUT IN) and hope for the best. */ 8005 else 8006 emit_insn (gen_rtx_SET (VOIDmode, out, in)); 8007 8008 /* Return the first insn emitted. 8009 We can not just return get_last_insn, because there may have 8010 been multiple instructions emitted. Also note that gen_move_insn may 8011 emit more than one insn itself, so we can not assume that there is one 8012 insn emitted per emit_insn_before call. */ 8013 8014 return last ? NEXT_INSN (last) : get_insns (); 8015} 8016 8017/* Delete a previously made output-reload 8018 whose result we now believe is not needed. 8019 First we double-check. 8020 8021 INSN is the insn now being processed. 8022 LAST_RELOAD_REG is the hard register number for which we want to delete 8023 the last output reload. 8024 J is the reload-number that originally used REG. The caller has made 8025 certain that reload J doesn't use REG any longer for input. */ 8026 8027static void 8028delete_output_reload (insn, j, last_reload_reg) 8029 rtx insn; 8030 int j; 8031 int last_reload_reg; 8032{ 8033 rtx output_reload_insn = spill_reg_store[last_reload_reg]; 8034 rtx reg = spill_reg_stored_to[last_reload_reg]; 8035 int k; 8036 int n_occurrences; 8037 int n_inherited = 0; 8038 register rtx i1; 8039 rtx substed; 8040 8041 /* Get the raw pseudo-register referred to. */ 8042 8043 while (GET_CODE (reg) == SUBREG) 8044 reg = SUBREG_REG (reg); 8045 substed = reg_equiv_memory_loc[REGNO (reg)]; 8046 8047 /* This is unsafe if the operand occurs more often in the current 8048 insn than it is inherited. */ 8049 for (k = n_reloads - 1; k >= 0; k--) 8050 { 8051 rtx reg2 = reload_in[k]; 8052 if (! reg2) 8053 continue; 8054 if (GET_CODE (reg2) == MEM || reload_override_in[k]) 8055 reg2 = reload_in_reg[k]; 8056#ifdef AUTO_INC_DEC 8057 if (reload_out[k] && ! reload_out_reg[k]) 8058 reg2 = XEXP (reload_in_reg[k], 0); 8059#endif 8060 while (GET_CODE (reg2) == SUBREG) 8061 reg2 = SUBREG_REG (reg2); 8062 if (rtx_equal_p (reg2, reg)) 8063 { 8064 if (reload_inherited[k] || reload_override_in[k] || k == j) 8065 { 8066 n_inherited++; 8067 reg2 = reload_out_reg[k]; 8068 if (! reg2) 8069 continue; 8070 while (GET_CODE (reg2) == SUBREG) 8071 reg2 = XEXP (reg2, 0); 8072 if (rtx_equal_p (reg2, reg)) 8073 n_inherited++; 8074 } 8075 else 8076 return; 8077 } 8078 } 8079 n_occurrences = count_occurrences (PATTERN (insn), reg); 8080 if (substed) 8081 n_occurrences += count_occurrences (PATTERN (insn), 8082 eliminate_regs (substed, 0, 8083 NULL_RTX)); 8084 if (n_occurrences > n_inherited) 8085 return; 8086 8087 /* If the pseudo-reg we are reloading is no longer referenced 8088 anywhere between the store into it and here, 8089 and no jumps or labels intervene, then the value can get 8090 here through the reload reg alone. 8091 Otherwise, give up--return. */ 8092 for (i1 = NEXT_INSN (output_reload_insn); 8093 i1 != insn; i1 = NEXT_INSN (i1)) 8094 { 8095 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN) 8096 return; 8097 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN) 8098 && reg_mentioned_p (reg, PATTERN (i1))) 8099 { 8100 /* If this is USE in front of INSN, we only have to check that 8101 there are no more references than accounted for by inheritance. */ 8102 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE) 8103 { 8104 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0; 8105 i1 = NEXT_INSN (i1); 8106 } 8107 if (n_occurrences <= n_inherited && i1 == insn) 8108 break; 8109 return; 8110 } 8111 } 8112 8113 /* The caller has already checked that REG dies or is set in INSN. 8114 It has also checked that we are optimizing, and thus some inaccurancies 8115 in the debugging information are acceptable. 8116 So we could just delete output_reload_insn. 8117 But in some cases we can improve the debugging information without 8118 sacrificing optimization - maybe even improving the code: 8119 See if the pseudo reg has been completely replaced 8120 with reload regs. If so, delete the store insn 8121 and forget we had a stack slot for the pseudo. */ 8122 if (reload_out[j] != reload_in[j] 8123 && REG_N_DEATHS (REGNO (reg)) == 1 8124 && REG_N_SETS (REGNO (reg)) == 1 8125 && REG_BASIC_BLOCK (REGNO (reg)) >= 0 8126 && find_regno_note (insn, REG_DEAD, REGNO (reg))) 8127 { 8128 rtx i2; 8129 8130 /* We know that it was used only between here 8131 and the beginning of the current basic block. 8132 (We also know that the last use before INSN was 8133 the output reload we are thinking of deleting, but never mind that.) 8134 Search that range; see if any ref remains. */ 8135 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2)) 8136 { 8137 rtx set = single_set (i2); 8138 8139 /* Uses which just store in the pseudo don't count, 8140 since if they are the only uses, they are dead. */ 8141 if (set != 0 && SET_DEST (set) == reg) 8142 continue; 8143 if (GET_CODE (i2) == CODE_LABEL 8144 || GET_CODE (i2) == JUMP_INSN) 8145 break; 8146 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN) 8147 && reg_mentioned_p (reg, PATTERN (i2))) 8148 { 8149 /* Some other ref remains; just delete the output reload we 8150 know to be dead. */ 8151 delete_address_reloads (output_reload_insn, insn); 8152 PUT_CODE (output_reload_insn, NOTE); 8153 NOTE_SOURCE_FILE (output_reload_insn) = 0; 8154 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED; 8155 return; 8156 } 8157 } 8158 8159 /* Delete the now-dead stores into this pseudo. */ 8160 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2)) 8161 { 8162 rtx set = single_set (i2); 8163 8164 if (set != 0 && SET_DEST (set) == reg) 8165 { 8166 delete_address_reloads (i2, insn); 8167 /* This might be a basic block head, 8168 thus don't use delete_insn. */ 8169 PUT_CODE (i2, NOTE); 8170 NOTE_SOURCE_FILE (i2) = 0; 8171 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED; 8172 } 8173 if (GET_CODE (i2) == CODE_LABEL 8174 || GET_CODE (i2) == JUMP_INSN) 8175 break; 8176 } 8177 8178 /* For the debugging info, 8179 say the pseudo lives in this reload reg. */ 8180 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]); 8181 alter_reg (REGNO (reg), -1); 8182 } 8183 delete_address_reloads (output_reload_insn, insn); 8184 PUT_CODE (output_reload_insn, NOTE); 8185 NOTE_SOURCE_FILE (output_reload_insn) = 0; 8186 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED; 8187 8188} 8189 8190/* We are going to delete DEAD_INSN. Recursively delete loads of 8191 reload registers used in DEAD_INSN that are not used till CURRENT_INSN. 8192 CURRENT_INSN is being reloaded, so we have to check its reloads too. */ 8193static void 8194delete_address_reloads (dead_insn, current_insn) 8195 rtx dead_insn, current_insn; 8196{ 8197 rtx set = single_set (dead_insn); 8198 rtx set2, dst, prev, next; 8199 if (set) 8200 { 8201 rtx dst = SET_DEST (set); 8202 if (GET_CODE (dst) == MEM) 8203 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn); 8204 } 8205 /* If we deleted the store from a reloaded post_{in,de}c expression, 8206 we can delete the matching adds. */ 8207 prev = PREV_INSN (dead_insn); 8208 next = NEXT_INSN (dead_insn); 8209 if (! prev || ! next) 8210 return; 8211 set = single_set (next); 8212 set2 = single_set (prev); 8213 if (! set || ! set2 8214 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS 8215 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT 8216 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT) 8217 return; 8218 dst = SET_DEST (set); 8219 if (! rtx_equal_p (dst, SET_DEST (set2)) 8220 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0)) 8221 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0)) 8222 || (INTVAL (XEXP (SET_SRC (set), 1)) 8223 != - INTVAL (XEXP (SET_SRC (set2), 1)))) 8224 return; 8225 delete_insn (prev); 8226 delete_insn (next); 8227} 8228 8229/* Subfunction of delete_address_reloads: process registers found in X. */ 8230static void 8231delete_address_reloads_1 (dead_insn, x, current_insn) 8232 rtx dead_insn, x, current_insn; 8233{ 8234 rtx prev, set, dst, i2; 8235 int i, j; 8236 enum rtx_code code = GET_CODE (x); 8237 8238 if (code != REG) 8239 { 8240 char *fmt= GET_RTX_FORMAT (code); 8241 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 8242 { 8243 if (fmt[i] == 'e') 8244 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn); 8245 else if (fmt[i] == 'E') 8246 { 8247 for (j = XVECLEN (x, i) - 1; j >=0; j--) 8248 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j), 8249 current_insn); 8250 } 8251 } 8252 return; 8253 } 8254 8255 if (spill_reg_order[REGNO (x)] < 0) 8256 return; 8257 8258 /* Scan backwards for the insn that sets x. This might be a way back due 8259 to inheritance. */ 8260 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev)) 8261 { 8262 code = GET_CODE (prev); 8263 if (code == CODE_LABEL || code == JUMP_INSN) 8264 return; 8265 if (GET_RTX_CLASS (code) != 'i') 8266 continue; 8267 if (reg_set_p (x, PATTERN (prev))) 8268 break; 8269 if (reg_referenced_p (x, PATTERN (prev))) 8270 return; 8271 } 8272 if (! prev || INSN_UID (prev) < reload_first_uid) 8273 return; 8274 /* Check that PREV only sets the reload register. */ 8275 set = single_set (prev); 8276 if (! set) 8277 return; 8278 dst = SET_DEST (set); 8279 if (GET_CODE (dst) != REG 8280 || ! rtx_equal_p (dst, x)) 8281 return; 8282 if (! reg_set_p (dst, PATTERN (dead_insn))) 8283 { 8284 /* Check if DST was used in a later insn - 8285 it might have been inherited. */ 8286 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2)) 8287 { 8288 if (GET_CODE (i2) == CODE_LABEL) 8289 break; 8290 if (GET_RTX_CLASS (GET_CODE (i2)) != 'i') 8291 continue; 8292 if (reg_referenced_p (dst, PATTERN (i2))) 8293 { 8294 /* If there is a reference to the register in the current insn, 8295 it might be loaded in a non-inherited reload. If no other 8296 reload uses it, that means the register is set before 8297 referenced. */ 8298 if (i2 == current_insn) 8299 { 8300 for (j = n_reloads - 1; j >= 0; j--) 8301 if ((reload_reg_rtx[j] == dst && reload_inherited[j]) 8302 || reload_override_in[j] == dst) 8303 return; 8304 for (j = n_reloads - 1; j >= 0; j--) 8305 if (reload_in[j] && reload_reg_rtx[j] == dst) 8306 break; 8307 if (j >= 0) 8308 break; 8309 } 8310 return; 8311 } 8312 if (GET_CODE (i2) == JUMP_INSN) 8313 break; 8314 /* If DST is still live at CURRENT_INSN, check if it is used for 8315 any reload. Note that even if CURRENT_INSN sets DST, we still 8316 have to check the reloads. */ 8317 if (i2 == current_insn) 8318 { 8319 for (j = n_reloads - 1; j >= 0; j--) 8320 if ((reload_reg_rtx[j] == dst && reload_inherited[j]) 8321 || reload_override_in[j] == dst) 8322 return; 8323 /* ??? We can't finish the loop here, because dst might be 8324 allocated to a pseudo in this block if no reload in this 8325 block needs any of the clsses containing DST - see 8326 spill_hard_reg. There is no easy way to tell this, so we 8327 have to scan till the end of the basic block. */ 8328 } 8329 if (reg_set_p (dst, PATTERN (i2))) 8330 break; 8331 } 8332 } 8333 delete_address_reloads_1 (prev, SET_SRC (set), current_insn); 8334 reg_reloaded_contents[REGNO (dst)] = -1; 8335 /* Can't use delete_insn here because PREV might be a basic block head. */ 8336 PUT_CODE (prev, NOTE); 8337 NOTE_LINE_NUMBER (prev) = NOTE_INSN_DELETED; 8338 NOTE_SOURCE_FILE (prev) = 0; 8339} 8340 8341/* Output reload-insns to reload VALUE into RELOADREG. 8342 VALUE is an autoincrement or autodecrement RTX whose operand 8343 is a register or memory location; 8344 so reloading involves incrementing that location. 8345 IN is either identical to VALUE, or some cheaper place to reload from. 8346 8347 INC_AMOUNT is the number to increment or decrement by (always positive). 8348 This cannot be deduced from VALUE. 8349 8350 Return the instruction that stores into RELOADREG. */ 8351 8352static rtx 8353inc_for_reload (reloadreg, in, value, inc_amount) 8354 rtx reloadreg; 8355 rtx in, value; 8356 int inc_amount; 8357{ 8358 /* REG or MEM to be copied and incremented. */ 8359 rtx incloc = XEXP (value, 0); 8360 /* Nonzero if increment after copying. */ 8361 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC); 8362 rtx last; 8363 rtx inc; 8364 rtx add_insn; 8365 int code; 8366 rtx store; 8367 rtx real_in = in == value ? XEXP (in, 0) : in; 8368 8369 /* No hard register is equivalent to this register after 8370 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero, 8371 we could inc/dec that register as well (maybe even using it for 8372 the source), but I'm not sure it's worth worrying about. */ 8373 if (GET_CODE (incloc) == REG) 8374 reg_last_reload_reg[REGNO (incloc)] = 0; 8375 8376 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC) 8377 inc_amount = - inc_amount; 8378 8379 inc = GEN_INT (inc_amount); 8380 8381 /* If this is post-increment, first copy the location to the reload reg. */ 8382 if (post && real_in != reloadreg) 8383 emit_insn (gen_move_insn (reloadreg, real_in)); 8384 8385 if (in == value) 8386 { 8387 /* See if we can directly increment INCLOC. Use a method similar to 8388 that in gen_reload. */ 8389 8390 last = get_last_insn (); 8391 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc, 8392 gen_rtx_PLUS (GET_MODE (incloc), 8393 incloc, inc))); 8394 8395 code = recog_memoized (add_insn); 8396 if (code >= 0) 8397 { 8398 extract_insn (add_insn); 8399 if (constrain_operands (1)) 8400 { 8401 /* If this is a pre-increment and we have incremented the value 8402 where it lives, copy the incremented value to RELOADREG to 8403 be used as an address. */ 8404 8405 if (! post) 8406 emit_insn (gen_move_insn (reloadreg, incloc)); 8407 8408 return add_insn; 8409 } 8410 } 8411 delete_insns_since (last); 8412 } 8413 8414 /* If couldn't do the increment directly, must increment in RELOADREG. 8415 The way we do this depends on whether this is pre- or post-increment. 8416 For pre-increment, copy INCLOC to the reload register, increment it 8417 there, then save back. */ 8418 8419 if (! post) 8420 { 8421 if (in != reloadreg) 8422 emit_insn (gen_move_insn (reloadreg, real_in)); 8423 emit_insn (gen_add2_insn (reloadreg, inc)); 8424 store = emit_insn (gen_move_insn (incloc, reloadreg)); 8425 } 8426 else 8427 { 8428 /* Postincrement. 8429 Because this might be a jump insn or a compare, and because RELOADREG 8430 may not be available after the insn in an input reload, we must do 8431 the incrementation before the insn being reloaded for. 8432 8433 We have already copied IN to RELOADREG. Increment the copy in 8434 RELOADREG, save that back, then decrement RELOADREG so it has 8435 the original value. */ 8436 8437 emit_insn (gen_add2_insn (reloadreg, inc)); 8438 store = emit_insn (gen_move_insn (incloc, reloadreg)); 8439 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount))); 8440 } 8441 8442 return store; 8443} 8444 8445/* Return 1 if we are certain that the constraint-string STRING allows 8446 the hard register REG. Return 0 if we can't be sure of this. */ 8447 8448static int 8449constraint_accepts_reg_p (string, reg) 8450 const char *string; 8451 rtx reg; 8452{ 8453 int value = 0; 8454 int regno = true_regnum (reg); 8455 int c; 8456 8457 /* Initialize for first alternative. */ 8458 value = 0; 8459 /* Check that each alternative contains `g' or `r'. */ 8460 while (1) 8461 switch (c = *string++) 8462 { 8463 case 0: 8464 /* If an alternative lacks `g' or `r', we lose. */ 8465 return value; 8466 case ',': 8467 /* If an alternative lacks `g' or `r', we lose. */ 8468 if (value == 0) 8469 return 0; 8470 /* Initialize for next alternative. */ 8471 value = 0; 8472 break; 8473 case 'g': 8474 case 'r': 8475 /* Any general reg wins for this alternative. */ 8476 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno)) 8477 value = 1; 8478 break; 8479 default: 8480 /* Any reg in specified class wins for this alternative. */ 8481 { 8482 enum reg_class class = REG_CLASS_FROM_LETTER (c); 8483 8484 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)) 8485 value = 1; 8486 } 8487 } 8488} 8489 8490/* Return the number of places FIND appears within X, but don't count 8491 an occurrence if some SET_DEST is FIND. */ 8492 8493int 8494count_occurrences (x, find) 8495 register rtx x, find; 8496{ 8497 register int i, j; 8498 register enum rtx_code code; 8499 register char *format_ptr; 8500 int count; 8501 8502 if (x == find) 8503 return 1; 8504 if (x == 0) 8505 return 0; 8506 8507 code = GET_CODE (x); 8508 8509 switch (code) 8510 { 8511 case REG: 8512 case QUEUED: 8513 case CONST_INT: 8514 case CONST_DOUBLE: 8515 case SYMBOL_REF: 8516 case CODE_LABEL: 8517 case PC: 8518 case CC0: 8519 return 0; 8520 8521 case MEM: 8522 if (GET_CODE (find) == MEM && rtx_equal_p (x, find)) 8523 return 1; 8524 break; 8525 case SET: 8526 if (SET_DEST (x) == find) 8527 return count_occurrences (SET_SRC (x), find); 8528 break; 8529 8530 default: 8531 break; 8532 } 8533 8534 format_ptr = GET_RTX_FORMAT (code); 8535 count = 0; 8536 8537 for (i = 0; i < GET_RTX_LENGTH (code); i++) 8538 { 8539 switch (*format_ptr++) 8540 { 8541 case 'e': 8542 count += count_occurrences (XEXP (x, i), find); 8543 break; 8544 8545 case 'E': 8546 if (XVEC (x, i) != NULL) 8547 { 8548 for (j = 0; j < XVECLEN (x, i); j++) 8549 count += count_occurrences (XVECEXP (x, i, j), find); 8550 } 8551 break; 8552 } 8553 } 8554 return count; 8555} 8556 8557/* This array holds values which are equivalent to a hard register 8558 during reload_cse_regs. Each array element is an EXPR_LIST of 8559 values. Each time a hard register is set, we set the corresponding 8560 array element to the value. Each time a hard register is copied 8561 into memory, we add the memory location to the corresponding array 8562 element. We don't store values or memory addresses with side 8563 effects in this array. 8564 8565 If the value is a CONST_INT, then the mode of the containing 8566 EXPR_LIST is the mode in which that CONST_INT was referenced. 8567 8568 We sometimes clobber a specific entry in a list. In that case, we 8569 just set XEXP (list-entry, 0) to 0. */ 8570 8571static rtx *reg_values; 8572 8573/* This is a preallocated REG rtx which we use as a temporary in 8574 reload_cse_invalidate_regno, so that we don't need to allocate a 8575 new one each time through a loop in that function. */ 8576 8577static rtx invalidate_regno_rtx; 8578 8579/* Invalidate any entries in reg_values which depend on REGNO, 8580 including those for REGNO itself. This is called if REGNO is 8581 changing. If CLOBBER is true, then always forget anything we 8582 currently know about REGNO. MODE is the mode of the assignment to 8583 REGNO, which is used to determine how many hard registers are being 8584 changed. If MODE is VOIDmode, then only REGNO is being changed; 8585 this is used when invalidating call clobbered registers across a 8586 call. */ 8587 8588static void 8589reload_cse_invalidate_regno (regno, mode, clobber) 8590 int regno; 8591 enum machine_mode mode; 8592 int clobber; 8593{ 8594 int endregno; 8595 register int i; 8596 8597 /* Our callers don't always go through true_regnum; we may see a 8598 pseudo-register here from a CLOBBER or the like. We probably 8599 won't ever see a pseudo-register that has a real register number, 8600 for we check anyhow for safety. */ 8601 if (regno >= FIRST_PSEUDO_REGISTER) 8602 regno = reg_renumber[regno]; 8603 if (regno < 0) 8604 return; 8605 8606 if (mode == VOIDmode) 8607 endregno = regno + 1; 8608 else 8609 endregno = regno + HARD_REGNO_NREGS (regno, mode); 8610 8611 if (clobber) 8612 for (i = regno; i < endregno; i++) 8613 reg_values[i] = 0; 8614 8615 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 8616 { 8617 rtx x; 8618 8619 for (x = reg_values[i]; x; x = XEXP (x, 1)) 8620 { 8621 if (XEXP (x, 0) != 0 8622 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR)) 8623 { 8624 /* If this is the only entry on the list, clear 8625 reg_values[i]. Otherwise, just clear this entry on 8626 the list. */ 8627 if (XEXP (x, 1) == 0 && x == reg_values[i]) 8628 { 8629 reg_values[i] = 0; 8630 break; 8631 } 8632 XEXP (x, 0) = 0; 8633 } 8634 } 8635 } 8636 8637 /* We must look at earlier registers, in case REGNO is part of a 8638 multi word value but is not the first register. If an earlier 8639 register has a value in a mode which overlaps REGNO, then we must 8640 invalidate that earlier register. Note that we do not need to 8641 check REGNO or later registers (we must not check REGNO itself, 8642 because we would incorrectly conclude that there was a conflict). */ 8643 8644 for (i = 0; i < regno; i++) 8645 { 8646 rtx x; 8647 8648 for (x = reg_values[i]; x; x = XEXP (x, 1)) 8649 { 8650 if (XEXP (x, 0) != 0) 8651 { 8652 PUT_MODE (invalidate_regno_rtx, GET_MODE (x)); 8653 REGNO (invalidate_regno_rtx) = i; 8654 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx, 8655 NULL_PTR)) 8656 { 8657 reload_cse_invalidate_regno (i, VOIDmode, 1); 8658 break; 8659 } 8660 } 8661 } 8662 } 8663} 8664 8665/* The memory at address MEM_BASE is being changed. 8666 Return whether this change will invalidate VAL. */ 8667 8668static int 8669reload_cse_mem_conflict_p (mem_base, val) 8670 rtx mem_base; 8671 rtx val; 8672{ 8673 enum rtx_code code; 8674 char *fmt; 8675 int i; 8676 8677 code = GET_CODE (val); 8678 switch (code) 8679 { 8680 /* Get rid of a few simple cases quickly. */ 8681 case REG: 8682 case PC: 8683 case CC0: 8684 case SCRATCH: 8685 case CONST: 8686 case CONST_INT: 8687 case CONST_DOUBLE: 8688 case SYMBOL_REF: 8689 case LABEL_REF: 8690 return 0; 8691 8692 case MEM: 8693 if (GET_MODE (mem_base) == BLKmode 8694 || GET_MODE (val) == BLKmode) 8695 return 1; 8696 if (anti_dependence (val, mem_base)) 8697 return 1; 8698 /* The address may contain nested MEMs. */ 8699 break; 8700 8701 default: 8702 break; 8703 } 8704 8705 fmt = GET_RTX_FORMAT (code); 8706 8707 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 8708 { 8709 if (fmt[i] == 'e') 8710 { 8711 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i))) 8712 return 1; 8713 } 8714 else if (fmt[i] == 'E') 8715 { 8716 int j; 8717 8718 for (j = 0; j < XVECLEN (val, i); j++) 8719 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j))) 8720 return 1; 8721 } 8722 } 8723 8724 return 0; 8725} 8726 8727/* Invalidate any entries in reg_values which are changed because of a 8728 store to MEM_RTX. If this is called because of a non-const call 8729 instruction, MEM_RTX is (mem:BLK const0_rtx). */ 8730 8731static void 8732reload_cse_invalidate_mem (mem_rtx) 8733 rtx mem_rtx; 8734{ 8735 register int i; 8736 8737 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 8738 { 8739 rtx x; 8740 8741 for (x = reg_values[i]; x; x = XEXP (x, 1)) 8742 { 8743 if (XEXP (x, 0) != 0 8744 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0))) 8745 { 8746 /* If this is the only entry on the list, clear 8747 reg_values[i]. Otherwise, just clear this entry on 8748 the list. */ 8749 if (XEXP (x, 1) == 0 && x == reg_values[i]) 8750 { 8751 reg_values[i] = 0; 8752 break; 8753 } 8754 XEXP (x, 0) = 0; 8755 } 8756 } 8757 } 8758} 8759 8760/* Invalidate DEST, which is being assigned to or clobbered. The 8761 second parameter exists so that this function can be passed to 8762 note_stores; it is ignored. */ 8763 8764static void 8765reload_cse_invalidate_rtx (dest, ignore) 8766 rtx dest; 8767 rtx ignore ATTRIBUTE_UNUSED; 8768{ 8769 while (GET_CODE (dest) == STRICT_LOW_PART 8770 || GET_CODE (dest) == SIGN_EXTRACT 8771 || GET_CODE (dest) == ZERO_EXTRACT 8772 || GET_CODE (dest) == SUBREG) 8773 dest = XEXP (dest, 0); 8774 8775 if (GET_CODE (dest) == REG) 8776 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1); 8777 else if (GET_CODE (dest) == MEM) 8778 reload_cse_invalidate_mem (dest); 8779} 8780 8781/* Do a very simple CSE pass over the hard registers. 8782 8783 This function detects no-op moves where we happened to assign two 8784 different pseudo-registers to the same hard register, and then 8785 copied one to the other. Reload will generate a useless 8786 instruction copying a register to itself. 8787 8788 This function also detects cases where we load a value from memory 8789 into two different registers, and (if memory is more expensive than 8790 registers) changes it to simply copy the first register into the 8791 second register. 8792 8793 Another optimization is performed that scans the operands of each 8794 instruction to see whether the value is already available in a 8795 hard register. It then replaces the operand with the hard register 8796 if possible, much like an optional reload would. */ 8797 8798static void 8799reload_cse_regs_1 (first) 8800 rtx first; 8801{ 8802 char *firstobj; 8803 rtx callmem; 8804 register int i; 8805 rtx insn; 8806 8807 init_alias_analysis (); 8808 8809 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx)); 8810 bzero ((char *)reg_values, FIRST_PSEUDO_REGISTER * sizeof (rtx)); 8811 8812 /* Create our EXPR_LIST structures on reload_obstack, so that we can 8813 free them when we are done. */ 8814 push_obstacks (&reload_obstack, &reload_obstack); 8815 firstobj = (char *) obstack_alloc (&reload_obstack, 0); 8816 8817 /* We pass this to reload_cse_invalidate_mem to invalidate all of 8818 memory for a non-const call instruction. */ 8819 callmem = gen_rtx_MEM (BLKmode, const0_rtx); 8820 8821 /* This is used in reload_cse_invalidate_regno to avoid consing a 8822 new REG in a loop in that function. */ 8823 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0); 8824 8825 for (insn = first; insn; insn = NEXT_INSN (insn)) 8826 { 8827 rtx body; 8828 8829 if (GET_CODE (insn) == CODE_LABEL) 8830 { 8831 /* Forget all the register values at a code label. We don't 8832 try to do anything clever around jumps. */ 8833 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 8834 reg_values[i] = 0; 8835 8836 continue; 8837 } 8838 8839#ifdef NON_SAVING_SETJMP 8840 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE 8841 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP) 8842 { 8843 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 8844 reg_values[i] = 0; 8845 8846 continue; 8847 } 8848#endif 8849 8850 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i') 8851 continue; 8852 8853 /* If this is a call instruction, forget anything stored in a 8854 call clobbered register, or, if this is not a const call, in 8855 memory. */ 8856 if (GET_CODE (insn) == CALL_INSN) 8857 { 8858 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 8859 if (call_used_regs[i]) 8860 reload_cse_invalidate_regno (i, VOIDmode, 1); 8861 8862 if (! CONST_CALL_P (insn)) 8863 reload_cse_invalidate_mem (callmem); 8864 } 8865 8866 8867 /* Forget all the register values at a volatile asm. */ 8868 if (GET_CODE (insn) == INSN 8869 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS 8870 && MEM_VOLATILE_P (PATTERN (insn))) 8871 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 8872 reg_values[i] = 0; 8873 8874 body = PATTERN (insn); 8875 if (GET_CODE (body) == SET) 8876 { 8877 int count = 0; 8878 if (reload_cse_noop_set_p (body, insn)) 8879 { 8880 /* If this sets the return value of the function, we must keep 8881 a USE around, in case this is in a different basic block 8882 than the final USE. Otherwise, we could loose important 8883 register lifeness information on SMALL_REGISTER_CLASSES 8884 machines, where return registers might be used as spills: 8885 subsequent passes assume that spill registers are dead at 8886 the end of a basic block. */ 8887 if (REG_FUNCTION_VALUE_P (SET_DEST (body))) 8888 { 8889 pop_obstacks (); 8890 PATTERN (insn) = gen_rtx_USE (VOIDmode, SET_DEST (body)); 8891 INSN_CODE (insn) = -1; 8892 REG_NOTES (insn) = NULL_RTX; 8893 push_obstacks (&reload_obstack, &reload_obstack); 8894 } 8895 else 8896 { 8897 PUT_CODE (insn, NOTE); 8898 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 8899 NOTE_SOURCE_FILE (insn) = 0; 8900 } 8901 8902 /* We're done with this insn. */ 8903 continue; 8904 } 8905 8906 /* It's not a no-op, but we can try to simplify it. */ 8907 count += reload_cse_simplify_set (body, insn); 8908 8909 if (count > 0) 8910 apply_change_group (); 8911 else 8912 reload_cse_simplify_operands (insn); 8913 8914 reload_cse_record_set (body, body); 8915 } 8916 else if (GET_CODE (body) == PARALLEL) 8917 { 8918 int count = 0; 8919 rtx value = NULL_RTX; 8920 8921 /* If every action in a PARALLEL is a noop, we can delete 8922 the entire PARALLEL. */ 8923 for (i = XVECLEN (body, 0) - 1; i >= 0; --i) 8924 { 8925 rtx part = XVECEXP (body, 0, i); 8926 if (GET_CODE (part) == SET) 8927 { 8928 if (! reload_cse_noop_set_p (part, insn)) 8929 break; 8930 if (REG_FUNCTION_VALUE_P (SET_DEST (part))) 8931 { 8932 if (value) 8933 break; 8934 value = SET_DEST (part); 8935 } 8936 } 8937 else if (GET_CODE (part) != CLOBBER) 8938 break; 8939 } 8940 if (i < 0) 8941 { 8942 if (value) 8943 { 8944 pop_obstacks (); 8945 PATTERN (insn) = gen_rtx_USE (VOIDmode, value); 8946 INSN_CODE (insn) = -1; 8947 REG_NOTES (insn) = NULL_RTX; 8948 push_obstacks (&reload_obstack, &reload_obstack); 8949 } 8950 else 8951 { 8952 PUT_CODE (insn, NOTE); 8953 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 8954 NOTE_SOURCE_FILE (insn) = 0; 8955 } 8956 8957 /* We're done with this insn. */ 8958 continue; 8959 } 8960 8961 /* It's not a no-op, but we can try to simplify it. */ 8962 for (i = XVECLEN (body, 0) - 1; i >= 0; --i) 8963 if (GET_CODE (XVECEXP (body, 0, i)) == SET) 8964 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn); 8965 8966 if (count > 0) 8967 apply_change_group (); 8968 else 8969 reload_cse_simplify_operands (insn); 8970 8971 /* Look through the PARALLEL and record the values being 8972 set, if possible. Also handle any CLOBBERs. */ 8973 for (i = XVECLEN (body, 0) - 1; i >= 0; --i) 8974 { 8975 rtx x = XVECEXP (body, 0, i); 8976 8977 if (GET_CODE (x) == SET) 8978 reload_cse_record_set (x, body); 8979 else 8980 note_stores (x, reload_cse_invalidate_rtx); 8981 } 8982 } 8983 else 8984 note_stores (body, reload_cse_invalidate_rtx); 8985 8986#ifdef AUTO_INC_DEC 8987 /* Clobber any registers which appear in REG_INC notes. We 8988 could keep track of the changes to their values, but it is 8989 unlikely to help. */ 8990 { 8991 rtx x; 8992 8993 for (x = REG_NOTES (insn); x; x = XEXP (x, 1)) 8994 if (REG_NOTE_KIND (x) == REG_INC) 8995 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX); 8996 } 8997#endif 8998 8999 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only 9000 after we have processed the insn. */ 9001 if (GET_CODE (insn) == CALL_INSN) 9002 { 9003 rtx x; 9004 9005 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1)) 9006 if (GET_CODE (XEXP (x, 0)) == CLOBBER) 9007 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX); 9008 } 9009 } 9010 9011 /* Free all the temporary structures we created, and go back to the 9012 regular obstacks. */ 9013 obstack_free (&reload_obstack, firstobj); 9014 pop_obstacks (); 9015} 9016 9017/* Call cse / combine like post-reload optimization phases. 9018 FIRST is the first instruction. */ 9019void 9020reload_cse_regs (first) 9021 rtx first; 9022{ 9023 reload_cse_regs_1 (first); 9024 reload_combine (); 9025 reload_cse_move2add (first); 9026 if (flag_expensive_optimizations) 9027 reload_cse_regs_1 (first); 9028} 9029 9030/* Return whether the values known for REGNO are equal to VAL. MODE 9031 is the mode of the object that VAL is being copied to; this matters 9032 if VAL is a CONST_INT. */ 9033 9034static int 9035reload_cse_regno_equal_p (regno, val, mode) 9036 int regno; 9037 rtx val; 9038 enum machine_mode mode; 9039{ 9040 rtx x; 9041 9042 if (val == 0) 9043 return 0; 9044 9045 for (x = reg_values[regno]; x; x = XEXP (x, 1)) 9046 if (XEXP (x, 0) != 0 9047 && rtx_equal_p (XEXP (x, 0), val) 9048 && (! flag_float_store || GET_CODE (XEXP (x, 0)) != MEM 9049 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT) 9050 && (GET_CODE (val) != CONST_INT 9051 || mode == GET_MODE (x) 9052 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)) 9053 /* On a big endian machine if the value spans more than 9054 one register then this register holds the high part of 9055 it and we can't use it. 9056 9057 ??? We should also compare with the high part of the 9058 value. */ 9059 && !(WORDS_BIG_ENDIAN 9060 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1) 9061 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), 9062 GET_MODE_BITSIZE (GET_MODE (x)))))) 9063 return 1; 9064 9065 return 0; 9066} 9067 9068/* See whether a single set is a noop. SET is the set instruction we 9069 are should check, and INSN is the instruction from which it came. */ 9070 9071static int 9072reload_cse_noop_set_p (set, insn) 9073 rtx set; 9074 rtx insn; 9075{ 9076 rtx src, dest; 9077 enum machine_mode dest_mode; 9078 int dreg, sreg; 9079 int ret; 9080 9081 src = SET_SRC (set); 9082 dest = SET_DEST (set); 9083 dest_mode = GET_MODE (dest); 9084 9085 if (side_effects_p (src)) 9086 return 0; 9087 9088 dreg = true_regnum (dest); 9089 sreg = true_regnum (src); 9090 9091 /* Check for setting a register to itself. In this case, we don't 9092 have to worry about REG_DEAD notes. */ 9093 if (dreg >= 0 && dreg == sreg) 9094 return 1; 9095 9096 ret = 0; 9097 if (dreg >= 0) 9098 { 9099 /* Check for setting a register to itself. */ 9100 if (dreg == sreg) 9101 ret = 1; 9102 9103 /* Check for setting a register to a value which we already know 9104 is in the register. */ 9105 else if (reload_cse_regno_equal_p (dreg, src, dest_mode)) 9106 ret = 1; 9107 9108 /* Check for setting a register DREG to another register SREG 9109 where SREG is equal to a value which is already in DREG. */ 9110 else if (sreg >= 0) 9111 { 9112 rtx x; 9113 9114 for (x = reg_values[sreg]; x; x = XEXP (x, 1)) 9115 { 9116 rtx tmp; 9117 9118 if (XEXP (x, 0) == 0) 9119 continue; 9120 9121 if (dest_mode == GET_MODE (x)) 9122 tmp = XEXP (x, 0); 9123 else if (GET_MODE_BITSIZE (dest_mode) 9124 < GET_MODE_BITSIZE (GET_MODE (x))) 9125 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0)); 9126 else 9127 continue; 9128 9129 if (tmp 9130 && reload_cse_regno_equal_p (dreg, tmp, dest_mode)) 9131 { 9132 ret = 1; 9133 break; 9134 } 9135 } 9136 } 9137 } 9138 else if (GET_CODE (dest) == MEM) 9139 { 9140 /* Check for storing a register to memory when we know that the 9141 register is equivalent to the memory location. */ 9142 if (sreg >= 0 9143 && reload_cse_regno_equal_p (sreg, dest, dest_mode) 9144 && ! side_effects_p (dest)) 9145 ret = 1; 9146 } 9147 9148 return ret; 9149} 9150 9151/* Try to simplify a single SET instruction. SET is the set pattern. 9152 INSN is the instruction it came from. 9153 This function only handles one case: if we set a register to a value 9154 which is not a register, we try to find that value in some other register 9155 and change the set into a register copy. */ 9156 9157static int 9158reload_cse_simplify_set (set, insn) 9159 rtx set; 9160 rtx insn; 9161{ 9162 int dreg; 9163 rtx src; 9164 enum machine_mode dest_mode; 9165 enum reg_class dclass; 9166 register int i; 9167 9168 dreg = true_regnum (SET_DEST (set)); 9169 if (dreg < 0) 9170 return 0; 9171 9172 src = SET_SRC (set); 9173 if (side_effects_p (src) || true_regnum (src) >= 0) 9174 return 0; 9175 9176 dclass = REGNO_REG_CLASS (dreg); 9177 9178 /* If memory loads are cheaper than register copies, don't change them. */ 9179 if (GET_CODE (src) == MEM 9180 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2) 9181 return 0; 9182 9183 /* If the constant is cheaper than a register, don't change it. */ 9184 if (CONSTANT_P (src) 9185 && rtx_cost (src, SET) < 2) 9186 return 0; 9187 9188 dest_mode = GET_MODE (SET_DEST (set)); 9189 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 9190 { 9191 if (i != dreg 9192 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2 9193 && reload_cse_regno_equal_p (i, src, dest_mode)) 9194 { 9195 int validated; 9196 9197 /* Pop back to the real obstacks while changing the insn. */ 9198 pop_obstacks (); 9199 9200 validated = validate_change (insn, &SET_SRC (set), 9201 gen_rtx_REG (dest_mode, i), 1); 9202 9203 /* Go back to the obstack we are using for temporary 9204 storage. */ 9205 push_obstacks (&reload_obstack, &reload_obstack); 9206 9207 if (validated) 9208 return 1; 9209 } 9210 } 9211 return 0; 9212} 9213 9214/* Try to replace operands in INSN with equivalent values that are already 9215 in registers. This can be viewed as optional reloading. 9216 9217 For each non-register operand in the insn, see if any hard regs are 9218 known to be equivalent to that operand. Record the alternatives which 9219 can accept these hard registers. Among all alternatives, select the 9220 ones which are better or equal to the one currently matching, where 9221 "better" is in terms of '?' and '!' constraints. Among the remaining 9222 alternatives, select the one which replaces most operands with 9223 hard registers. */ 9224 9225static int 9226reload_cse_simplify_operands (insn) 9227 rtx insn; 9228{ 9229#ifdef REGISTER_CONSTRAINTS 9230 int i,j; 9231 9232 const char *constraints[MAX_RECOG_OPERANDS]; 9233 9234 /* Vector recording how bad an alternative is. */ 9235 int *alternative_reject; 9236 /* Vector recording how many registers can be introduced by choosing 9237 this alternative. */ 9238 int *alternative_nregs; 9239 /* Array of vectors recording, for each operand and each alternative, 9240 which hard register to substitute, or -1 if the operand should be 9241 left as it is. */ 9242 int *op_alt_regno[MAX_RECOG_OPERANDS]; 9243 /* Array of alternatives, sorted in order of decreasing desirability. */ 9244 int *alternative_order; 9245 rtx reg = gen_rtx_REG (VOIDmode, -1); 9246 9247 extract_insn (insn); 9248 9249 if (recog_n_alternatives == 0 || recog_n_operands == 0) 9250 return 0; 9251 9252 /* Figure out which alternative currently matches. */ 9253 if (! constrain_operands (1)) 9254 fatal_insn_not_found (insn); 9255 9256 alternative_reject = (int *) alloca (recog_n_alternatives * sizeof (int)); 9257 alternative_nregs = (int *) alloca (recog_n_alternatives * sizeof (int)); 9258 alternative_order = (int *) alloca (recog_n_alternatives * sizeof (int)); 9259 bzero ((char *)alternative_reject, recog_n_alternatives * sizeof (int)); 9260 bzero ((char *)alternative_nregs, recog_n_alternatives * sizeof (int)); 9261 9262 for (i = 0; i < recog_n_operands; i++) 9263 { 9264 enum machine_mode mode; 9265 int regno; 9266 const char *p; 9267 9268 op_alt_regno[i] = (int *) alloca (recog_n_alternatives * sizeof (int)); 9269 for (j = 0; j < recog_n_alternatives; j++) 9270 op_alt_regno[i][j] = -1; 9271 9272 p = constraints[i] = recog_constraints[i]; 9273 mode = recog_operand_mode[i]; 9274 9275 /* Add the reject values for each alternative given by the constraints 9276 for this operand. */ 9277 j = 0; 9278 while (*p != '\0') 9279 { 9280 char c = *p++; 9281 if (c == ',') 9282 j++; 9283 else if (c == '?') 9284 alternative_reject[j] += 3; 9285 else if (c == '!') 9286 alternative_reject[j] += 300; 9287 } 9288 9289 /* We won't change operands which are already registers. We 9290 also don't want to modify output operands. */ 9291 regno = true_regnum (recog_operand[i]); 9292 if (regno >= 0 9293 || constraints[i][0] == '=' 9294 || constraints[i][0] == '+') 9295 continue; 9296 9297 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 9298 { 9299 int class = (int) NO_REGS; 9300 9301 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode)) 9302 continue; 9303 9304 REGNO (reg) = regno; 9305 PUT_MODE (reg, mode); 9306 9307 /* We found a register equal to this operand. Now look for all 9308 alternatives that can accept this register and have not been 9309 assigned a register they can use yet. */ 9310 j = 0; 9311 p = constraints[i]; 9312 for (;;) 9313 { 9314 char c = *p++; 9315 9316 switch (c) 9317 { 9318 case '=': case '+': case '?': 9319 case '#': case '&': case '!': 9320 case '*': case '%': 9321 case '0': case '1': case '2': case '3': case '4': 9322 case 'm': case '<': case '>': case 'V': case 'o': 9323 case 'E': case 'F': case 'G': case 'H': 9324 case 's': case 'i': case 'n': 9325 case 'I': case 'J': case 'K': case 'L': 9326 case 'M': case 'N': case 'O': case 'P': 9327#ifdef EXTRA_CONSTRAINT 9328 case 'Q': case 'R': case 'S': case 'T': case 'U': 9329#endif 9330 case 'p': case 'X': 9331 /* These don't say anything we care about. */ 9332 break; 9333 9334 case 'g': case 'r': 9335 class = reg_class_subunion[(int) class][(int) GENERAL_REGS]; 9336 break; 9337 9338 default: 9339 class 9340 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)]; 9341 break; 9342 9343 case ',': case '\0': 9344 /* See if REGNO fits this alternative, and set it up as the 9345 replacement register if we don't have one for this 9346 alternative yet and the operand being replaced is not 9347 a cheap CONST_INT. */ 9348 if (op_alt_regno[i][j] == -1 9349 && reg_fits_class_p (reg, class, 0, mode) 9350 && (GET_CODE (recog_operand[i]) != CONST_INT 9351 || rtx_cost (recog_operand[i], SET) > rtx_cost (reg, SET))) 9352 { 9353 alternative_nregs[j]++; 9354 op_alt_regno[i][j] = regno; 9355 } 9356 j++; 9357 break; 9358 } 9359 9360 if (c == '\0') 9361 break; 9362 } 9363 } 9364 } 9365 9366 /* Record all alternatives which are better or equal to the currently 9367 matching one in the alternative_order array. */ 9368 for (i = j = 0; i < recog_n_alternatives; i++) 9369 if (alternative_reject[i] <= alternative_reject[which_alternative]) 9370 alternative_order[j++] = i; 9371 recog_n_alternatives = j; 9372 9373 /* Sort it. Given a small number of alternatives, a dumb algorithm 9374 won't hurt too much. */ 9375 for (i = 0; i < recog_n_alternatives - 1; i++) 9376 { 9377 int best = i; 9378 int best_reject = alternative_reject[alternative_order[i]]; 9379 int best_nregs = alternative_nregs[alternative_order[i]]; 9380 int tmp; 9381 9382 for (j = i + 1; j < recog_n_alternatives; j++) 9383 { 9384 int this_reject = alternative_reject[alternative_order[j]]; 9385 int this_nregs = alternative_nregs[alternative_order[j]]; 9386 9387 if (this_reject < best_reject 9388 || (this_reject == best_reject && this_nregs < best_nregs)) 9389 { 9390 best = j; 9391 best_reject = this_reject; 9392 best_nregs = this_nregs; 9393 } 9394 } 9395 9396 tmp = alternative_order[best]; 9397 alternative_order[best] = alternative_order[i]; 9398 alternative_order[i] = tmp; 9399 } 9400 9401 /* Substitute the operands as determined by op_alt_regno for the best 9402 alternative. */ 9403 j = alternative_order[0]; 9404 9405 /* Pop back to the real obstacks while changing the insn. */ 9406 pop_obstacks (); 9407 9408 for (i = 0; i < recog_n_operands; i++) 9409 { 9410 enum machine_mode mode = recog_operand_mode[i]; 9411 if (op_alt_regno[i][j] == -1) 9412 continue; 9413 9414 validate_change (insn, recog_operand_loc[i], 9415 gen_rtx_REG (mode, op_alt_regno[i][j]), 1); 9416 } 9417 9418 for (i = recog_n_dups - 1; i >= 0; i--) 9419 { 9420 int op = recog_dup_num[i]; 9421 enum machine_mode mode = recog_operand_mode[op]; 9422 9423 if (op_alt_regno[op][j] == -1) 9424 continue; 9425 9426 validate_change (insn, recog_dup_loc[i], 9427 gen_rtx_REG (mode, op_alt_regno[op][j]), 1); 9428 } 9429 9430 /* Go back to the obstack we are using for temporary 9431 storage. */ 9432 push_obstacks (&reload_obstack, &reload_obstack); 9433 9434 return apply_change_group (); 9435#else 9436 return 0; 9437#endif 9438} 9439 9440/* These two variables are used to pass information from 9441 reload_cse_record_set to reload_cse_check_clobber. */ 9442 9443static int reload_cse_check_clobbered; 9444static rtx reload_cse_check_src; 9445 9446/* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set 9447 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The 9448 second argument, which is passed by note_stores, is ignored. */ 9449 9450static void 9451reload_cse_check_clobber (dest, ignore) 9452 rtx dest; 9453 rtx ignore ATTRIBUTE_UNUSED; 9454{ 9455 if (reg_overlap_mentioned_p (dest, reload_cse_check_src)) 9456 reload_cse_check_clobbered = 1; 9457} 9458 9459/* Record the result of a SET instruction. SET is the set pattern. 9460 BODY is the pattern of the insn that it came from. */ 9461 9462static void 9463reload_cse_record_set (set, body) 9464 rtx set; 9465 rtx body; 9466{ 9467 rtx dest, src, x; 9468 int dreg, sreg; 9469 enum machine_mode dest_mode; 9470 9471 dest = SET_DEST (set); 9472 src = SET_SRC (set); 9473 dreg = true_regnum (dest); 9474 sreg = true_regnum (src); 9475 dest_mode = GET_MODE (dest); 9476 9477 /* Some machines don't define AUTO_INC_DEC, but they still use push 9478 instructions. We need to catch that case here in order to 9479 invalidate the stack pointer correctly. Note that invalidating 9480 the stack pointer is different from invalidating DEST. */ 9481 x = dest; 9482 while (GET_CODE (x) == SUBREG 9483 || GET_CODE (x) == ZERO_EXTRACT 9484 || GET_CODE (x) == SIGN_EXTRACT 9485 || GET_CODE (x) == STRICT_LOW_PART) 9486 x = XEXP (x, 0); 9487 if (push_operand (x, GET_MODE (x))) 9488 { 9489 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX); 9490 reload_cse_invalidate_rtx (dest, NULL_RTX); 9491 return; 9492 } 9493 9494 /* We can only handle an assignment to a register, or a store of a 9495 register to a memory location. For other cases, we just clobber 9496 the destination. We also have to just clobber if there are side 9497 effects in SRC or DEST. */ 9498 if ((dreg < 0 && GET_CODE (dest) != MEM) 9499 || side_effects_p (src) 9500 || side_effects_p (dest)) 9501 { 9502 reload_cse_invalidate_rtx (dest, NULL_RTX); 9503 return; 9504 } 9505 9506#ifdef HAVE_cc0 9507 /* We don't try to handle values involving CC, because it's a pain 9508 to keep track of when they have to be invalidated. */ 9509 if (reg_mentioned_p (cc0_rtx, src) 9510 || reg_mentioned_p (cc0_rtx, dest)) 9511 { 9512 reload_cse_invalidate_rtx (dest, NULL_RTX); 9513 return; 9514 } 9515#endif 9516 9517 /* If BODY is a PARALLEL, then we need to see whether the source of 9518 SET is clobbered by some other instruction in the PARALLEL. */ 9519 if (GET_CODE (body) == PARALLEL) 9520 { 9521 int i; 9522 9523 for (i = XVECLEN (body, 0) - 1; i >= 0; --i) 9524 { 9525 rtx x; 9526 9527 x = XVECEXP (body, 0, i); 9528 if (x == set) 9529 continue; 9530 9531 reload_cse_check_clobbered = 0; 9532 reload_cse_check_src = src; 9533 note_stores (x, reload_cse_check_clobber); 9534 if (reload_cse_check_clobbered) 9535 { 9536 reload_cse_invalidate_rtx (dest, NULL_RTX); 9537 return; 9538 } 9539 } 9540 } 9541 9542 if (dreg >= 0) 9543 { 9544 int i; 9545 9546 /* This is an assignment to a register. Update the value we 9547 have stored for the register. */ 9548 if (sreg >= 0) 9549 { 9550 rtx x; 9551 9552 /* This is a copy from one register to another. Any values 9553 which were valid for SREG are now valid for DREG. If the 9554 mode changes, we use gen_lowpart_common to extract only 9555 the part of the value that is copied. */ 9556 reg_values[dreg] = 0; 9557 for (x = reg_values[sreg]; x; x = XEXP (x, 1)) 9558 { 9559 rtx tmp; 9560 9561 if (XEXP (x, 0) == 0) 9562 continue; 9563 if (dest_mode == GET_MODE (XEXP (x, 0))) 9564 tmp = XEXP (x, 0); 9565 else if (GET_MODE_BITSIZE (dest_mode) 9566 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))) 9567 continue; 9568 else 9569 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0)); 9570 if (tmp) 9571 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp, 9572 reg_values[dreg]); 9573 } 9574 } 9575 else 9576 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX); 9577 9578 /* We've changed DREG, so invalidate any values held by other 9579 registers that depend upon it. */ 9580 reload_cse_invalidate_regno (dreg, dest_mode, 0); 9581 9582 /* If this assignment changes more than one hard register, 9583 forget anything we know about the others. */ 9584 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++) 9585 reg_values[dreg + i] = 0; 9586 } 9587 else if (GET_CODE (dest) == MEM) 9588 { 9589 /* Invalidate conflicting memory locations. */ 9590 reload_cse_invalidate_mem (dest); 9591 9592 /* If we're storing a register to memory, add DEST to the list 9593 in REG_VALUES. */ 9594 if (sreg >= 0 && ! side_effects_p (dest)) 9595 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest, 9596 reg_values[sreg]); 9597 } 9598 else 9599 { 9600 /* We should have bailed out earlier. */ 9601 abort (); 9602 } 9603} 9604 9605/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg 9606 addressing now. 9607 This code might also be useful when reload gave up on reg+reg addresssing 9608 because of clashes between the return register and INDEX_REG_CLASS. */ 9609 9610/* The maximum number of uses of a register we can keep track of to 9611 replace them with reg+reg addressing. */ 9612#define RELOAD_COMBINE_MAX_USES 6 9613 9614/* INSN is the insn where a register has ben used, and USEP points to the 9615 location of the register within the rtl. */ 9616struct reg_use { rtx insn, *usep; }; 9617 9618/* If the register is used in some unknown fashion, USE_INDEX is negative. 9619 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID 9620 indicates where it becomes live again. 9621 Otherwise, USE_INDEX is the index of the last encountered use of the 9622 register (which is first among these we have seen since we scan backwards), 9623 OFFSET contains the constant offset that is added to the register in 9624 all encountered uses, and USE_RUID indicates the first encountered, i.e. 9625 last, of these uses. 9626 STORE_RUID is always meaningful if we only want to use a value in a 9627 register in a different place: it denotes the next insn in the insn 9628 stream (i.e. the last ecountered) that sets or clobbers the register. */ 9629static struct 9630 { 9631 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES]; 9632 int use_index; 9633 rtx offset; 9634 int store_ruid; 9635 int use_ruid; 9636 } reg_state[FIRST_PSEUDO_REGISTER]; 9637 9638/* Reverse linear uid. This is increased in reload_combine while scanning 9639 the instructions from last to first. It is used to set last_label_ruid 9640 and the store_ruid / use_ruid fields in reg_state. */ 9641static int reload_combine_ruid; 9642 9643#define LABEL_LIVE(LABEL) \ 9644 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno]) 9645 9646static void 9647reload_combine () 9648{ 9649 rtx insn, set; 9650 int first_index_reg = 1, last_index_reg = 0; 9651 int i; 9652 int last_label_ruid; 9653 int min_labelno, n_labels; 9654 HARD_REG_SET ever_live_at_start, *label_live; 9655 9656 /* If reg+reg can be used in offsetable memory adresses, the main chunk of 9657 reload has already used it where appropriate, so there is no use in 9658 trying to generate it now. */ 9659 if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS) 9660 return; 9661 9662 /* To avoid wasting too much time later searching for an index register, 9663 determine the minimum and maximum index register numbers. */ 9664 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i) 9665 { 9666 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)) 9667 { 9668 if (! last_index_reg) 9669 last_index_reg = i; 9670 first_index_reg = i; 9671 } 9672 } 9673 /* If no index register is available, we can quit now. */ 9674 if (first_index_reg > last_index_reg) 9675 return; 9676 9677 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime 9678 information is a bit fuzzy immediately after reload, but it's 9679 still good enough to determine which registers are live at a jump 9680 destination. */ 9681 min_labelno = get_first_label_num (); 9682 n_labels = max_label_num () - min_labelno; 9683 label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET)); 9684 CLEAR_HARD_REG_SET (ever_live_at_start); 9685 for (i = n_basic_blocks - 1; i >= 0; i--) 9686 { 9687 insn = BLOCK_HEAD (i); 9688 if (GET_CODE (insn) == CODE_LABEL) 9689 { 9690 HARD_REG_SET live; 9691 9692 REG_SET_TO_HARD_REG_SET (live, BASIC_BLOCK (i)->global_live_at_start); 9693 compute_use_by_pseudos (&live, BASIC_BLOCK (i)->global_live_at_start); 9694 COPY_HARD_REG_SET (LABEL_LIVE (insn), live); 9695 IOR_HARD_REG_SET (ever_live_at_start, live); 9696 } 9697 } 9698 9699 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */ 9700 last_label_ruid = reload_combine_ruid = 0; 9701 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i) 9702 { 9703 reg_state[i].store_ruid = reload_combine_ruid; 9704 if (fixed_regs[i]) 9705 reg_state[i].use_index = -1; 9706 else 9707 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES; 9708 } 9709 9710 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) 9711 { 9712 rtx note; 9713 9714 /* We cannot do our optimization across labels. Invalidating all the use 9715 information we have would be costly, so we just note where the label 9716 is and then later disable any optimization that would cross it. */ 9717 if (GET_CODE (insn) == CODE_LABEL) 9718 last_label_ruid = reload_combine_ruid; 9719 if (GET_CODE (insn) == BARRIER) 9720 { 9721 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i) 9722 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES; 9723 } 9724 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i') 9725 continue; 9726 reload_combine_ruid++; 9727 9728 /* Look for (set (REGX) (CONST_INT)) 9729 (set (REGX) (PLUS (REGX) (REGY))) 9730 ... 9731 ... (MEM (REGX)) ... 9732 and convert it to 9733 (set (REGZ) (CONST_INT)) 9734 ... 9735 ... (MEM (PLUS (REGZ) (REGY)))... . 9736 9737 First, check that we have (set (REGX) (PLUS (REGX) (REGY))) 9738 and that we know all uses of REGX before it dies. */ 9739 set = single_set (insn); 9740 if (set != NULL_RTX 9741 && GET_CODE (SET_DEST (set)) == REG 9742 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)), 9743 GET_MODE (SET_DEST (set))) 9744 == 1) 9745 && GET_CODE (SET_SRC (set)) == PLUS 9746 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG 9747 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set)) 9748 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid) 9749 { 9750 rtx reg = SET_DEST (set); 9751 rtx plus = SET_SRC (set); 9752 rtx base = XEXP (plus, 1); 9753 rtx prev = prev_nonnote_insn (insn); 9754 rtx prev_set = prev ? single_set (prev) : NULL_RTX; 9755 int regno = REGNO (reg); 9756 rtx const_reg; 9757 rtx reg_sum = NULL_RTX; 9758 9759 /* Now, we need an index register. 9760 We'll set index_reg to this index register, const_reg to the 9761 register that is to be loaded with the constant 9762 (denoted as REGZ in the substitution illustration above), 9763 and reg_sum to the register-register that we want to use to 9764 substitute uses of REG (typically in MEMs) with. 9765 First check REG and BASE for being index registers; 9766 we can use them even if they are not dead. */ 9767 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno) 9768 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], 9769 REGNO (base))) 9770 { 9771 const_reg = reg; 9772 reg_sum = plus; 9773 } 9774 else 9775 { 9776 /* Otherwise, look for a free index register. Since we have 9777 checked above that neiter REG nor BASE are index registers, 9778 if we find anything at all, it will be different from these 9779 two registers. */ 9780 for (i = first_index_reg; i <= last_index_reg; i++) 9781 { 9782 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i) 9783 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES 9784 && reg_state[i].store_ruid <= reg_state[regno].use_ruid 9785 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1) 9786 { 9787 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i); 9788 const_reg = index_reg; 9789 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base); 9790 break; 9791 } 9792 } 9793 } 9794 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that 9795 (REGY), i.e. BASE, is not clobbered before the last use we'll 9796 create. */ 9797 if (prev_set 9798 && GET_CODE (SET_SRC (prev_set)) == CONST_INT 9799 && rtx_equal_p (SET_DEST (prev_set), reg) 9800 && reg_state[regno].use_index >= 0 9801 && reg_state[REGNO (base)].store_ruid <= reg_state[regno].use_ruid 9802 && reg_sum) 9803 { 9804 int i; 9805 9806 /* Change destination register and - if necessary - the 9807 constant value in PREV, the constant loading instruction. */ 9808 validate_change (prev, &SET_DEST (prev_set), const_reg, 1); 9809 if (reg_state[regno].offset != const0_rtx) 9810 validate_change (prev, 9811 &SET_SRC (prev_set), 9812 GEN_INT (INTVAL (SET_SRC (prev_set)) 9813 + INTVAL (reg_state[regno].offset)), 9814 1); 9815 /* Now for every use of REG that we have recorded, replace REG 9816 with REG_SUM. */ 9817 for (i = reg_state[regno].use_index; 9818 i < RELOAD_COMBINE_MAX_USES; i++) 9819 validate_change (reg_state[regno].reg_use[i].insn, 9820 reg_state[regno].reg_use[i].usep, 9821 reg_sum, 1); 9822 9823 if (apply_change_group ()) 9824 { 9825 rtx *np; 9826 9827 /* Delete the reg-reg addition. */ 9828 PUT_CODE (insn, NOTE); 9829 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 9830 NOTE_SOURCE_FILE (insn) = 0; 9831 9832 if (reg_state[regno].offset != const0_rtx) 9833 { 9834 /* Previous REG_EQUIV / REG_EQUAL notes for PREV 9835 are now invalid. */ 9836 for (np = ®_NOTES (prev); *np; ) 9837 { 9838 if (REG_NOTE_KIND (*np) == REG_EQUAL 9839 || REG_NOTE_KIND (*np) == REG_EQUIV) 9840 *np = XEXP (*np, 1); 9841 else 9842 np = &XEXP (*np, 1); 9843 } 9844 } 9845 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES; 9846 reg_state[REGNO (const_reg)].store_ruid = reload_combine_ruid; 9847 continue; 9848 } 9849 } 9850 } 9851 note_stores (PATTERN (insn), reload_combine_note_store); 9852 if (GET_CODE (insn) == CALL_INSN) 9853 { 9854 rtx link; 9855 9856 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i) 9857 { 9858 if (call_used_regs[i]) 9859 { 9860 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES; 9861 reg_state[i].store_ruid = reload_combine_ruid; 9862 } 9863 } 9864 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; 9865 link = XEXP (link, 1)) 9866 { 9867 rtx use = XEXP (link, 0); 9868 int regno = REGNO (XEXP (use, 0)); 9869 if (GET_CODE (use) == CLOBBER) 9870 { 9871 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES; 9872 reg_state[regno].store_ruid = reload_combine_ruid; 9873 } 9874 else 9875 reg_state[regno].use_index = -1; 9876 } 9877 } 9878 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) != RETURN) 9879 { 9880 /* Non-spill registers might be used at the call destination in 9881 some unknown fashion, so we have to mark the unknown use. */ 9882 HARD_REG_SET *live; 9883 if ((condjump_p (insn) || condjump_in_parallel_p (insn)) 9884 && JUMP_LABEL (insn)) 9885 live = &LABEL_LIVE (JUMP_LABEL (insn)); 9886 else 9887 live = &ever_live_at_start; 9888 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i) 9889 { 9890 if (TEST_HARD_REG_BIT (*live, i)) 9891 reg_state[i].use_index = -1; 9892 } 9893 } 9894 reload_combine_note_use (&PATTERN (insn), insn); 9895 for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) 9896 { 9897 if (REG_NOTE_KIND (note) == REG_INC 9898 && GET_CODE (XEXP (note, 0)) == REG) 9899 { 9900 int regno = REGNO (XEXP (note, 0)); 9901 9902 reg_state[regno].store_ruid = reload_combine_ruid; 9903 reg_state[regno].use_index = -1; 9904 } 9905 } 9906 } 9907 free (label_live); 9908} 9909 9910/* Check if DST is a register or a subreg of a register; if it is, 9911 update reg_state[regno].store_ruid and reg_state[regno].use_index 9912 accordingly. Called via note_stores from reload_combine. */ 9913static void 9914reload_combine_note_store (dst, set) 9915 rtx dst, set; 9916{ 9917 int regno = 0; 9918 int i; 9919 unsigned size = GET_MODE_SIZE (GET_MODE (dst)); 9920 9921 if (GET_CODE (dst) == SUBREG) 9922 { 9923 regno = SUBREG_WORD (dst); 9924 dst = SUBREG_REG (dst); 9925 } 9926 if (GET_CODE (dst) != REG) 9927 return; 9928 regno += REGNO (dst); 9929 9930 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be 9931 careful with registers / register parts that are not full words. 9932 9933 Similarly for ZERO_EXTRACT and SIGN_EXTRACT. */ 9934 if (GET_CODE (set) != SET 9935 || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT 9936 || GET_CODE (SET_DEST (set)) == SIGN_EXTRACT 9937 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART) 9938 { 9939 for (i = (size - 1) / UNITS_PER_WORD + regno; i >= regno; i--) 9940 { 9941 reg_state[i].use_index = -1; 9942 reg_state[i].store_ruid = reload_combine_ruid; 9943 } 9944 } 9945 else 9946 { 9947 for (i = (size - 1) / UNITS_PER_WORD + regno; i >= regno; i--) 9948 { 9949 reg_state[i].store_ruid = reload_combine_ruid; 9950 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES; 9951 } 9952 } 9953} 9954 9955/* XP points to a piece of rtl that has to be checked for any uses of 9956 registers. 9957 *XP is the pattern of INSN, or a part of it. 9958 Called from reload_combine, and recursively by itself. */ 9959static void 9960reload_combine_note_use (xp, insn) 9961 rtx *xp, insn; 9962{ 9963 rtx x = *xp; 9964 enum rtx_code code = x->code; 9965 char *fmt; 9966 int i, j; 9967 rtx offset = const0_rtx; /* For the REG case below. */ 9968 9969 switch (code) 9970 { 9971 case SET: 9972 if (GET_CODE (SET_DEST (x)) == REG) 9973 { 9974 reload_combine_note_use (&SET_SRC (x), insn); 9975 return; 9976 } 9977 break; 9978 9979 case USE: 9980 /* If this is the USE of a return value, we can't change it. */ 9981 if (GET_CODE (XEXP (x, 0)) == REG && REG_FUNCTION_VALUE_P (XEXP (x, 0))) 9982 { 9983 /* Mark the return register as used in an unknown fashion. */ 9984 rtx reg = XEXP (x, 0); 9985 int regno = REGNO (reg); 9986 int nregs = HARD_REGNO_NREGS (regno, GET_MODE (reg)); 9987 9988 while (--nregs >= 0) 9989 reg_state[regno + nregs].use_index = -1; 9990 return; 9991 } 9992 break; 9993 9994 case CLOBBER: 9995 if (GET_CODE (SET_DEST (x)) == REG) 9996 return; 9997 break; 9998 9999 case PLUS: 10000 /* We are interested in (plus (reg) (const_int)) . */ 10001 if (GET_CODE (XEXP (x, 0)) != REG || GET_CODE (XEXP (x, 1)) != CONST_INT) 10002 break; 10003 offset = XEXP (x, 1); 10004 x = XEXP (x, 0); 10005 /* Fall through. */ 10006 case REG: 10007 { 10008 int regno = REGNO (x); 10009 int use_index; 10010 int nregs; 10011 10012 /* Some spurious USEs of pseudo registers might remain. 10013 Just ignore them. */ 10014 if (regno >= FIRST_PSEUDO_REGISTER) 10015 return; 10016 10017 nregs = HARD_REGNO_NREGS (regno, GET_MODE (x)); 10018 10019 /* We can't substitute into multi-hard-reg uses. */ 10020 if (nregs > 1) 10021 { 10022 while (--nregs >= 0) 10023 reg_state[regno + nregs].use_index = -1; 10024 return; 10025 } 10026 10027 /* If this register is already used in some unknown fashion, we 10028 can't do anything. 10029 If we decrement the index from zero to -1, we can't store more 10030 uses, so this register becomes used in an unknown fashion. */ 10031 use_index = --reg_state[regno].use_index; 10032 if (use_index < 0) 10033 return; 10034 10035 if (use_index != RELOAD_COMBINE_MAX_USES - 1) 10036 { 10037 /* We have found another use for a register that is already 10038 used later. Check if the offsets match; if not, mark the 10039 register as used in an unknown fashion. */ 10040 if (! rtx_equal_p (offset, reg_state[regno].offset)) 10041 { 10042 reg_state[regno].use_index = -1; 10043 return; 10044 } 10045 } 10046 else 10047 { 10048 /* This is the first use of this register we have seen since we 10049 marked it as dead. */ 10050 reg_state[regno].offset = offset; 10051 reg_state[regno].use_ruid = reload_combine_ruid; 10052 } 10053 reg_state[regno].reg_use[use_index].insn = insn; 10054 reg_state[regno].reg_use[use_index].usep = xp; 10055 return; 10056 } 10057 10058 default: 10059 break; 10060 } 10061 10062 /* Recursively process the components of X. */ 10063 fmt = GET_RTX_FORMAT (code); 10064 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 10065 { 10066 if (fmt[i] == 'e') 10067 reload_combine_note_use (&XEXP (x, i), insn); 10068 else if (fmt[i] == 'E') 10069 { 10070 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 10071 reload_combine_note_use (&XVECEXP (x, i, j), insn); 10072 } 10073 } 10074} 10075 10076/* See if we can reduce the cost of a constant by replacing a move with 10077 an add. */ 10078/* We cannot do our optimization across labels. Invalidating all the 10079 information about register contents we have would be costly, so we 10080 use last_label_luid (local variable of reload_cse_move2add) to note 10081 where the label is and then later disable any optimization that would 10082 cross it. 10083 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if 10084 reg_set_luid[n] is larger than last_label_luid[n] . */ 10085static int reg_set_luid[FIRST_PSEUDO_REGISTER]; 10086/* reg_offset[n] has to be CONST_INT for it and reg_base_reg[n] / 10087 reg_mode[n] to be valid. 10088 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is negative, register n 10089 has been set to reg_offset[n] in mode reg_mode[n] . 10090 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is non-negative, 10091 register n has been set to the sum of reg_offset[n] and register 10092 reg_base_reg[n], calculated in mode reg_mode[n] . */ 10093static rtx reg_offset[FIRST_PSEUDO_REGISTER]; 10094static int reg_base_reg[FIRST_PSEUDO_REGISTER]; 10095static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER]; 10096/* move2add_luid is linearily increased while scanning the instructions 10097 from first to last. It is used to set reg_set_luid in 10098 reload_cse_move2add and move2add_note_store. */ 10099static int move2add_luid; 10100 10101/* Generate a CONST_INT and force it in the range of MODE. */ 10102static rtx 10103gen_mode_int (mode, value) 10104 enum machine_mode mode; 10105 HOST_WIDE_INT value; 10106{ 10107 HOST_WIDE_INT cval = value & GET_MODE_MASK (mode); 10108 int width = GET_MODE_BITSIZE (mode); 10109 10110 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative number, 10111 sign extend it. */ 10112 if (width > 0 && width < HOST_BITS_PER_WIDE_INT 10113 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) 10114 cval |= (HOST_WIDE_INT) -1 << width; 10115 10116 return GEN_INT (cval); 10117} 10118 10119static void 10120reload_cse_move2add (first) 10121 rtx first; 10122{ 10123 int i; 10124 rtx insn; 10125 int last_label_luid; 10126 10127 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--) 10128 reg_set_luid[i] = 0; 10129 10130 last_label_luid = 0; 10131 move2add_luid = 1; 10132 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++) 10133 { 10134 rtx pat, note; 10135 10136 if (GET_CODE (insn) == CODE_LABEL) 10137 last_label_luid = move2add_luid; 10138 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i') 10139 continue; 10140 pat = PATTERN (insn); 10141 /* For simplicity, we only perform this optimization on 10142 straightforward SETs. */ 10143 if (GET_CODE (pat) == SET 10144 && GET_CODE (SET_DEST (pat)) == REG) 10145 { 10146 rtx reg = SET_DEST (pat); 10147 int regno = REGNO (reg); 10148 rtx src = SET_SRC (pat); 10149 10150 /* Check if we have valid information on the contents of this 10151 register in the mode of REG. */ 10152 /* ??? We don't know how zero / sign extension is handled, hence 10153 we can't go from a narrower to a wider mode. */ 10154 if (reg_set_luid[regno] > last_label_luid 10155 && (GET_MODE_SIZE (GET_MODE (reg)) 10156 <= GET_MODE_SIZE (reg_mode[regno])) 10157 && GET_CODE (reg_offset[regno]) == CONST_INT) 10158 { 10159 /* Try to transform (set (REGX) (CONST_INT A)) 10160 ... 10161 (set (REGX) (CONST_INT B)) 10162 to 10163 (set (REGX) (CONST_INT A)) 10164 ... 10165 (set (REGX) (plus (REGX) (CONST_INT B-A))) */ 10166 10167 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0) 10168 { 10169 int success = 0; 10170 rtx new_src 10171 = gen_mode_int (GET_MODE (reg), 10172 INTVAL (src) - INTVAL (reg_offset[regno])); 10173 /* (set (reg) (plus (reg) (const_int 0))) is not canonical; 10174 use (set (reg) (reg)) instead. 10175 We don't delete this insn, nor do we convert it into a 10176 note, to avoid losing register notes or the return 10177 value flag. jump2 already knowns how to get rid of 10178 no-op moves. */ 10179 if (new_src == const0_rtx) 10180 success = validate_change (insn, &SET_SRC (pat), reg, 0); 10181 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET) 10182 && have_add2_insn (GET_MODE (reg))) 10183 success = validate_change (insn, &PATTERN (insn), 10184 gen_add2_insn (reg, new_src), 0); 10185 reg_set_luid[regno] = move2add_luid; 10186 reg_mode[regno] = GET_MODE (reg); 10187 reg_offset[regno] = src; 10188 continue; 10189 } 10190 10191 /* Try to transform (set (REGX) (REGY)) 10192 (set (REGX) (PLUS (REGX) (CONST_INT A))) 10193 ... 10194 (set (REGX) (REGY)) 10195 (set (REGX) (PLUS (REGX) (CONST_INT B))) 10196 to 10197 (REGX) (REGY)) 10198 (set (REGX) (PLUS (REGX) (CONST_INT A))) 10199 ... 10200 (set (REGX) (plus (REGX) (CONST_INT B-A))) */ 10201 else if (GET_CODE (src) == REG 10202 && reg_base_reg[regno] == REGNO (src) 10203 && reg_set_luid[regno] > reg_set_luid[REGNO (src)]) 10204 { 10205 rtx next = next_nonnote_insn (insn); 10206 rtx set; 10207 if (next) 10208 set = single_set (next); 10209 if (next 10210 && set 10211 && SET_DEST (set) == reg 10212 && GET_CODE (SET_SRC (set)) == PLUS 10213 && XEXP (SET_SRC (set), 0) == reg 10214 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT) 10215 { 10216 rtx src3 = XEXP (SET_SRC (set), 1); 10217 rtx new_src 10218 = gen_mode_int (GET_MODE (reg), 10219 INTVAL (src3) 10220 - INTVAL (reg_offset[regno])); 10221 int success = 0; 10222 10223 if (new_src == const0_rtx) 10224 /* See above why we create (set (reg) (reg)) here. */ 10225 success 10226 = validate_change (next, &SET_SRC (set), reg, 0); 10227 else if ((rtx_cost (new_src, PLUS) 10228 < 2 + rtx_cost (src3, SET)) 10229 && have_add2_insn (GET_MODE (reg))) 10230 success 10231 = validate_change (next, &PATTERN (next), 10232 gen_add2_insn (reg, new_src), 0); 10233 if (success) 10234 { 10235 /* INSN might be the first insn in a basic block 10236 if the preceding insn is a conditional jump 10237 or a possible-throwing call. */ 10238 PUT_CODE (insn, NOTE); 10239 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 10240 NOTE_SOURCE_FILE (insn) = 0; 10241 } 10242 insn = next; 10243 reg_set_luid[regno] = move2add_luid; 10244 reg_mode[regno] = GET_MODE (reg); 10245 reg_offset[regno] = src3; 10246 continue; 10247 } 10248 } 10249 } 10250 } 10251 10252 for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) 10253 { 10254 if (REG_NOTE_KIND (note) == REG_INC 10255 && GET_CODE (XEXP (note, 0)) == REG) 10256 { 10257 /* Indicate that this register has been recently written to, 10258 but the exact contents are not available. */ 10259 int regno = REGNO (XEXP (note, 0)); 10260 if (regno < FIRST_PSEUDO_REGISTER) 10261 { 10262 reg_set_luid[regno] = move2add_luid; 10263 reg_offset[regno] = note; 10264 } 10265 } 10266 } 10267 note_stores (PATTERN (insn), move2add_note_store); 10268 /* If this is a CALL_INSN, all call used registers are stored with 10269 unknown values. */ 10270 if (GET_CODE (insn) == CALL_INSN) 10271 { 10272 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--) 10273 { 10274 if (call_used_regs[i]) 10275 { 10276 reg_set_luid[i] = move2add_luid; 10277 reg_offset[i] = insn; /* Invalidate contents. */ 10278 } 10279 } 10280 } 10281 } 10282} 10283 10284/* SET is a SET or CLOBBER that sets DST. 10285 Update reg_set_luid, reg_offset and reg_base_reg accordingly. 10286 Called from reload_cse_move2add via note_stores. */ 10287static void 10288move2add_note_store (dst, set) 10289 rtx dst, set; 10290{ 10291 int regno = 0; 10292 int i; 10293 10294 enum machine_mode mode = GET_MODE (dst); 10295 if (GET_CODE (dst) == SUBREG) 10296 { 10297 regno = SUBREG_WORD (dst); 10298 dst = SUBREG_REG (dst); 10299 } 10300 if (GET_CODE (dst) != REG) 10301 return; 10302 10303 regno += REGNO (dst); 10304 10305 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET 10306 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT 10307 && GET_CODE (SET_DEST (set)) != SIGN_EXTRACT 10308 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART) 10309 { 10310 rtx src = SET_SRC (set); 10311 10312 reg_mode[regno] = mode; 10313 switch (GET_CODE (src)) 10314 { 10315 case PLUS: 10316 { 10317 rtx src0 = XEXP (src, 0); 10318 if (GET_CODE (src0) == REG) 10319 { 10320 if (REGNO (src0) != regno 10321 || reg_offset[regno] != const0_rtx) 10322 { 10323 reg_base_reg[regno] = REGNO (src0); 10324 reg_set_luid[regno] = move2add_luid; 10325 } 10326 reg_offset[regno] = XEXP (src, 1); 10327 break; 10328 } 10329 reg_set_luid[regno] = move2add_luid; 10330 reg_offset[regno] = set; /* Invalidate contents. */ 10331 break; 10332 } 10333 10334 case REG: 10335 reg_base_reg[regno] = REGNO (SET_SRC (set)); 10336 reg_offset[regno] = const0_rtx; 10337 reg_set_luid[regno] = move2add_luid; 10338 break; 10339 10340 default: 10341 reg_base_reg[regno] = -1; 10342 reg_offset[regno] = SET_SRC (set); 10343 reg_set_luid[regno] = move2add_luid; 10344 break; 10345 } 10346 } 10347 else 10348 { 10349 for (i = regno + HARD_REGNO_NREGS (regno, mode) - 1; i >= regno; i--) 10350 { 10351 /* Indicate that this register has been recently written to, 10352 but the exact contents are not available. */ 10353 reg_set_luid[i] = move2add_luid; 10354 reg_offset[i] = dst; 10355 } 10356 } 10357} 10358 10359#ifdef AUTO_INC_DEC 10360static void 10361add_auto_inc_notes (insn, x) 10362 rtx insn; 10363 rtx x; 10364{ 10365 enum rtx_code code = GET_CODE (x); 10366 char *fmt; 10367 int i, j; 10368 10369 if (code == MEM && auto_inc_p (XEXP (x, 0))) 10370 { 10371 REG_NOTES (insn) 10372 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn)); 10373 return; 10374 } 10375 10376 /* Scan all the operand sub-expressions. */ 10377 fmt = GET_RTX_FORMAT (code); 10378 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 10379 { 10380 if (fmt[i] == 'e') 10381 add_auto_inc_notes (insn, XEXP (x, i)); 10382 else if (fmt[i] == 'E') 10383 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 10384 add_auto_inc_notes (insn, XVECEXP (x, i, j)); 10385 } 10386} 10387#endif 10388