1/* Code for RTL transformations to satisfy insn constraints.
2   Copyright (C) 2010-2015 Free Software Foundation, Inc.
3   Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5   This file is part of GCC.
6
7   GCC is free software; you can redistribute it and/or modify it under
8   the terms of the GNU General Public License as published by the Free
9   Software Foundation; either version 3, or (at your option) any later
10   version.
11
12   GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13   WARRANTY; without even the implied warranty of MERCHANTABILITY or
14   FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15   for more details.
16
17   You should have received a copy of the GNU General Public License
18   along with GCC; see the file COPYING3.  If not see
19   <http://www.gnu.org/licenses/>.  */
20
21
22/* This file contains code for 3 passes: constraint pass,
23   inheritance/split pass, and pass for undoing failed inheritance and
24   split.
25
26   The major goal of constraint pass is to transform RTL to satisfy
27   insn and address constraints by:
28     o choosing insn alternatives;
29     o generating *reload insns* (or reloads in brief) and *reload
30       pseudos* which will get necessary hard registers later;
31     o substituting pseudos with equivalent values and removing the
32       instructions that initialized those pseudos.
33
34   The constraint pass has biggest and most complicated code in LRA.
35   There are a lot of important details like:
36     o reuse of input reload pseudos to simplify reload pseudo
37       allocations;
38     o some heuristics to choose insn alternative to improve the
39       inheritance;
40     o early clobbers etc.
41
42   The pass is mimicking former reload pass in alternative choosing
43   because the reload pass is oriented to current machine description
44   model.  It might be changed if the machine description model is
45   changed.
46
47   There is special code for preventing all LRA and this pass cycling
48   in case of bugs.
49
50   On the first iteration of the pass we process every instruction and
51   choose an alternative for each one.  On subsequent iterations we try
52   to avoid reprocessing instructions if we can be sure that the old
53   choice is still valid.
54
55   The inheritance/spilt pass is to transform code to achieve
56   ineheritance and live range splitting.  It is done on backward
57   traversal of EBBs.
58
59   The inheritance optimization goal is to reuse values in hard
60   registers. There is analogous optimization in old reload pass.  The
61   inheritance is achieved by following transformation:
62
63       reload_p1 <- p	     reload_p1 <- p
64       ...		     new_p <- reload_p1
65       ...		=>   ...
66       reload_p2 <- p	     reload_p2 <- new_p
67
68   where p is spilled and not changed between the insns.  Reload_p1 is
69   also called *original pseudo* and new_p is called *inheritance
70   pseudo*.
71
72   The subsequent assignment pass will try to assign the same (or
73   another if it is not possible) hard register to new_p as to
74   reload_p1 or reload_p2.
75
76   If the assignment pass fails to assign a hard register to new_p,
77   this file will undo the inheritance and restore the original code.
78   This is because implementing the above sequence with a spilled
79   new_p would make the code much worse.  The inheritance is done in
80   EBB scope.  The above is just a simplified example to get an idea
81   of the inheritance as the inheritance is also done for non-reload
82   insns.
83
84   Splitting (transformation) is also done in EBB scope on the same
85   pass as the inheritance:
86
87       r <- ... or ... <- r		 r <- ... or ... <- r
88       ...				 s <- r (new insn -- save)
89       ...			  =>
90       ...				 r <- s (new insn -- restore)
91       ... <- r				 ... <- r
92
93    The *split pseudo* s is assigned to the hard register of the
94    original pseudo or hard register r.
95
96    Splitting is done:
97      o In EBBs with high register pressure for global pseudos (living
98	in at least 2 BBs) and assigned to hard registers when there
99	are more one reloads needing the hard registers;
100      o for pseudos needing save/restore code around calls.
101
102    If the split pseudo still has the same hard register as the
103    original pseudo after the subsequent assignment pass or the
104    original pseudo was split, the opposite transformation is done on
105    the same pass for undoing inheritance.  */
106
107#undef REG_OK_STRICT
108
109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "hard-reg-set.h"
114#include "rtl.h"
115#include "tm_p.h"
116#include "regs.h"
117#include "insn-config.h"
118#include "insn-codes.h"
119#include "recog.h"
120#include "output.h"
121#include "addresses.h"
122#include "target.h"
123#include "hashtab.h"
124#include "hash-set.h"
125#include "vec.h"
126#include "machmode.h"
127#include "input.h"
128#include "function.h"
129#include "symtab.h"
130#include "flags.h"
131#include "statistics.h"
132#include "double-int.h"
133#include "real.h"
134#include "fixed-value.h"
135#include "alias.h"
136#include "wide-int.h"
137#include "inchash.h"
138#include "tree.h"
139#include "expmed.h"
140#include "dojump.h"
141#include "explow.h"
142#include "calls.h"
143#include "emit-rtl.h"
144#include "varasm.h"
145#include "stmt.h"
146#include "expr.h"
147#include "predict.h"
148#include "dominance.h"
149#include "cfg.h"
150#include "cfgrtl.h"
151#include "basic-block.h"
152#include "except.h"
153#include "optabs.h"
154#include "df.h"
155#include "ira.h"
156#include "rtl-error.h"
157#include "params.h"
158#include "lra-int.h"
159
160/* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
161   insn.  Remember that LRA_CURR_RELOAD_NUM is the number of emitted
162   reload insns.  */
163static int bb_reload_num;
164
165/* The current insn being processed and corresponding its single set
166   (NULL otherwise), its data (basic block, the insn data, the insn
167   static data, and the mode of each operand).  */
168static rtx_insn *curr_insn;
169static rtx curr_insn_set;
170static basic_block curr_bb;
171static lra_insn_recog_data_t curr_id;
172static struct lra_static_insn_data *curr_static_id;
173static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
174/* Mode of the register substituted by its equivalence with VOIDmode
175   (e.g. constant) and whose subreg is given operand of the current
176   insn.  VOIDmode in all other cases.  */
177static machine_mode original_subreg_reg_mode[MAX_RECOG_OPERANDS];
178
179
180
181/* Start numbers for new registers and insns at the current constraints
182   pass start.	*/
183static int new_regno_start;
184static int new_insn_uid_start;
185
186/* If LOC is nonnull, strip any outer subreg from it.  */
187static inline rtx *
188strip_subreg (rtx *loc)
189{
190  return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
191}
192
193/* Return hard regno of REGNO or if it is was not assigned to a hard
194   register, use a hard register from its allocno class.  */
195static int
196get_try_hard_regno (int regno)
197{
198  int hard_regno;
199  enum reg_class rclass;
200
201  if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
202    hard_regno = lra_get_regno_hard_regno (regno);
203  if (hard_regno >= 0)
204    return hard_regno;
205  rclass = lra_get_allocno_class (regno);
206  if (rclass == NO_REGS)
207    return -1;
208  return ira_class_hard_regs[rclass][0];
209}
210
211/* Return final hard regno (plus offset) which will be after
212   elimination.	 We do this for matching constraints because the final
213   hard regno could have a different class.  */
214static int
215get_final_hard_regno (int hard_regno, int offset)
216{
217  if (hard_regno < 0)
218    return hard_regno;
219  hard_regno = lra_get_elimination_hard_regno (hard_regno);
220  return hard_regno + offset;
221}
222
223/* Return hard regno of X after removing subreg and making
224   elimination.  If X is not a register or subreg of register, return
225   -1.  For pseudo use its assignment.  */
226static int
227get_hard_regno (rtx x)
228{
229  rtx reg;
230  int offset, hard_regno;
231
232  reg = x;
233  if (GET_CODE (x) == SUBREG)
234    reg = SUBREG_REG (x);
235  if (! REG_P (reg))
236    return -1;
237  if ((hard_regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
238    hard_regno = lra_get_regno_hard_regno (hard_regno);
239  if (hard_regno < 0)
240    return -1;
241  offset = 0;
242  if (GET_CODE (x) == SUBREG)
243    offset += subreg_regno_offset (hard_regno, GET_MODE (reg),
244				   SUBREG_BYTE (x),  GET_MODE (x));
245  return get_final_hard_regno (hard_regno, offset);
246}
247
248/* If REGNO is a hard register or has been allocated a hard register,
249   return the class of that register.  If REGNO is a reload pseudo
250   created by the current constraints pass, return its allocno class.
251   Return NO_REGS otherwise.  */
252static enum reg_class
253get_reg_class (int regno)
254{
255  int hard_regno;
256
257  if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
258    hard_regno = lra_get_regno_hard_regno (regno);
259  if (hard_regno >= 0)
260    {
261      hard_regno = get_final_hard_regno (hard_regno, 0);
262      return REGNO_REG_CLASS (hard_regno);
263    }
264  if (regno >= new_regno_start)
265    return lra_get_allocno_class (regno);
266  return NO_REGS;
267}
268
269/* Return true if REG satisfies (or will satisfy) reg class constraint
270   CL.  Use elimination first if REG is a hard register.  If REG is a
271   reload pseudo created by this constraints pass, assume that it will
272   be allocated a hard register from its allocno class, but allow that
273   class to be narrowed to CL if it is currently a superset of CL.
274
275   If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
276   REGNO (reg), or NO_REGS if no change in its class was needed.  */
277static bool
278in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class)
279{
280  enum reg_class rclass, common_class;
281  machine_mode reg_mode;
282  int class_size, hard_regno, nregs, i, j;
283  int regno = REGNO (reg);
284
285  if (new_class != NULL)
286    *new_class = NO_REGS;
287  if (regno < FIRST_PSEUDO_REGISTER)
288    {
289      rtx final_reg = reg;
290      rtx *final_loc = &final_reg;
291
292      lra_eliminate_reg_if_possible (final_loc);
293      return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
294    }
295  reg_mode = GET_MODE (reg);
296  rclass = get_reg_class (regno);
297  if (regno < new_regno_start
298      /* Do not allow the constraints for reload instructions to
299	 influence the classes of new pseudos.  These reloads are
300	 typically moves that have many alternatives, and restricting
301	 reload pseudos for one alternative may lead to situations
302	 where other reload pseudos are no longer allocatable.  */
303      || (INSN_UID (curr_insn) >= new_insn_uid_start
304	  && curr_insn_set != NULL
305	  && ((OBJECT_P (SET_SRC (curr_insn_set))
306	       && ! CONSTANT_P (SET_SRC (curr_insn_set)))
307	      || (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
308		  && OBJECT_P (SUBREG_REG (SET_SRC (curr_insn_set)))
309		  && ! CONSTANT_P (SUBREG_REG (SET_SRC (curr_insn_set)))))))
310    /* When we don't know what class will be used finally for reload
311       pseudos, we use ALL_REGS.  */
312    return ((regno >= new_regno_start && rclass == ALL_REGS)
313	    || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
314		&& ! hard_reg_set_subset_p (reg_class_contents[cl],
315					    lra_no_alloc_regs)));
316  else
317    {
318      common_class = ira_reg_class_subset[rclass][cl];
319      if (new_class != NULL)
320	*new_class = common_class;
321      if (hard_reg_set_subset_p (reg_class_contents[common_class],
322				 lra_no_alloc_regs))
323	return false;
324      /* Check that there are enough allocatable regs.  */
325      class_size = ira_class_hard_regs_num[common_class];
326      for (i = 0; i < class_size; i++)
327	{
328	  hard_regno = ira_class_hard_regs[common_class][i];
329	  nregs = hard_regno_nregs[hard_regno][reg_mode];
330	  if (nregs == 1)
331	    return true;
332	  for (j = 0; j < nregs; j++)
333	    if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
334		|| ! TEST_HARD_REG_BIT (reg_class_contents[common_class],
335					hard_regno + j))
336	      break;
337	  if (j >= nregs)
338	    return true;
339	}
340      return false;
341    }
342}
343
344/* Return true if REGNO satisfies a memory constraint.	*/
345static bool
346in_mem_p (int regno)
347{
348  return get_reg_class (regno) == NO_REGS;
349}
350
351/* Return 1 if ADDR is a valid memory address for mode MODE in address
352   space AS, and check that each pseudo has the proper kind of hard
353   reg.	 */
354static int
355valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
356		 rtx addr, addr_space_t as)
357{
358#ifdef GO_IF_LEGITIMATE_ADDRESS
359  lra_assert (ADDR_SPACE_GENERIC_P (as));
360  GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
361  return 0;
362
363 win:
364  return 1;
365#else
366  return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
367#endif
368}
369
370namespace {
371  /* Temporarily eliminates registers in an address (for the lifetime of
372     the object).  */
373  class address_eliminator {
374  public:
375    address_eliminator (struct address_info *ad);
376    ~address_eliminator ();
377
378  private:
379    struct address_info *m_ad;
380    rtx *m_base_loc;
381    rtx m_base_reg;
382    rtx *m_index_loc;
383    rtx m_index_reg;
384  };
385}
386
387address_eliminator::address_eliminator (struct address_info *ad)
388  : m_ad (ad),
389    m_base_loc (strip_subreg (ad->base_term)),
390    m_base_reg (NULL_RTX),
391    m_index_loc (strip_subreg (ad->index_term)),
392    m_index_reg (NULL_RTX)
393{
394  if (m_base_loc != NULL)
395    {
396      m_base_reg = *m_base_loc;
397      lra_eliminate_reg_if_possible (m_base_loc);
398      if (m_ad->base_term2 != NULL)
399	*m_ad->base_term2 = *m_ad->base_term;
400    }
401  if (m_index_loc != NULL)
402    {
403      m_index_reg = *m_index_loc;
404      lra_eliminate_reg_if_possible (m_index_loc);
405    }
406}
407
408address_eliminator::~address_eliminator ()
409{
410  if (m_base_loc && *m_base_loc != m_base_reg)
411    {
412      *m_base_loc = m_base_reg;
413      if (m_ad->base_term2 != NULL)
414	*m_ad->base_term2 = *m_ad->base_term;
415    }
416  if (m_index_loc && *m_index_loc != m_index_reg)
417    *m_index_loc = m_index_reg;
418}
419
420/* Return true if the eliminated form of AD is a legitimate target address.  */
421static bool
422valid_address_p (struct address_info *ad)
423{
424  address_eliminator eliminator (ad);
425  return valid_address_p (ad->mode, *ad->outer, ad->as);
426}
427
428/* Return true if the eliminated form of memory reference OP satisfies
429   extra memory constraint CONSTRAINT.  */
430static bool
431satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
432{
433  struct address_info ad;
434
435  decompose_mem_address (&ad, op);
436  address_eliminator eliminator (&ad);
437  return constraint_satisfied_p (op, constraint);
438}
439
440/* Return true if the eliminated form of address AD satisfies extra
441   address constraint CONSTRAINT.  */
442static bool
443satisfies_address_constraint_p (struct address_info *ad,
444				enum constraint_num constraint)
445{
446  address_eliminator eliminator (ad);
447  return constraint_satisfied_p (*ad->outer, constraint);
448}
449
450/* Return true if the eliminated form of address OP satisfies extra
451   address constraint CONSTRAINT.  */
452static bool
453satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
454{
455  struct address_info ad;
456
457  decompose_lea_address (&ad, &op);
458  return satisfies_address_constraint_p (&ad, constraint);
459}
460
461/* Initiate equivalences for LRA.  As we keep original equivalences
462   before any elimination, we need to make copies otherwise any change
463   in insns might change the equivalences.  */
464void
465lra_init_equiv (void)
466{
467  ira_expand_reg_equiv ();
468  for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
469    {
470      rtx res;
471
472      if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
473	ira_reg_equiv[i].memory = copy_rtx (res);
474      if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
475	ira_reg_equiv[i].invariant = copy_rtx (res);
476    }
477}
478
479static rtx loc_equivalence_callback (rtx, const_rtx, void *);
480
481/* Update equivalence for REGNO.  We need to this as the equivalence
482   might contain other pseudos which are changed by their
483   equivalences.  */
484static void
485update_equiv (int regno)
486{
487  rtx x;
488
489  if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
490    ira_reg_equiv[regno].memory
491      = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
492				 NULL_RTX);
493  if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
494    ira_reg_equiv[regno].invariant
495      = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
496				 NULL_RTX);
497}
498
499/* If we have decided to substitute X with another value, return that
500   value, otherwise return X.  */
501static rtx
502get_equiv (rtx x)
503{
504  int regno;
505  rtx res;
506
507  if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
508      || ! ira_reg_equiv[regno].defined_p
509      || ! ira_reg_equiv[regno].profitable_p
510      || lra_get_regno_hard_regno (regno) >= 0)
511    return x;
512  if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
513    {
514      if (targetm.cannot_substitute_mem_equiv_p (res))
515	return x;
516      return res;
517    }
518  if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
519    return res;
520  if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
521    return res;
522  gcc_unreachable ();
523}
524
525/* If we have decided to substitute X with the equivalent value,
526   return that value after elimination for INSN, otherwise return
527   X.  */
528static rtx
529get_equiv_with_elimination (rtx x, rtx_insn *insn)
530{
531  rtx res = get_equiv (x);
532
533  if (x == res || CONSTANT_P (res))
534    return res;
535  return lra_eliminate_regs_1 (insn, res, GET_MODE (res),
536			       false, false, 0, true);
537}
538
539/* Set up curr_operand_mode.  */
540static void
541init_curr_operand_mode (void)
542{
543  int nop = curr_static_id->n_operands;
544  for (int i = 0; i < nop; i++)
545    {
546      machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
547      if (mode == VOIDmode)
548	{
549	  /* The .md mode for address operands is the mode of the
550	     addressed value rather than the mode of the address itself.  */
551	  if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
552	    mode = Pmode;
553	  else
554	    mode = curr_static_id->operand[i].mode;
555	}
556      curr_operand_mode[i] = mode;
557    }
558}
559
560
561
562/* The page contains code to reuse input reloads.  */
563
564/* Structure describes input reload of the current insns.  */
565struct input_reload
566{
567  /* Reloaded value.  */
568  rtx input;
569  /* Reload pseudo used.  */
570  rtx reg;
571};
572
573/* The number of elements in the following array.  */
574static int curr_insn_input_reloads_num;
575/* Array containing info about input reloads.  It is used to find the
576   same input reload and reuse the reload pseudo in this case.	*/
577static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
578
579/* Initiate data concerning reuse of input reloads for the current
580   insn.  */
581static void
582init_curr_insn_input_reloads (void)
583{
584  curr_insn_input_reloads_num = 0;
585}
586
587/* Create a new pseudo using MODE, RCLASS, ORIGINAL or reuse already
588   created input reload pseudo (only if TYPE is not OP_OUT).  Don't
589   reuse pseudo if IN_SUBREG_P is true and the reused pseudo should be
590   wrapped up in SUBREG.  The result pseudo is returned through
591   RESULT_REG.  Return TRUE if we created a new pseudo, FALSE if we
592   reused the already created input reload pseudo.  Use TITLE to
593   describe new registers for debug purposes.  */
594static bool
595get_reload_reg (enum op_type type, machine_mode mode, rtx original,
596		enum reg_class rclass, bool in_subreg_p,
597		const char *title, rtx *result_reg)
598{
599  int i, regno;
600  enum reg_class new_class;
601
602  if (type == OP_OUT)
603    {
604      *result_reg
605	= lra_create_new_reg_with_unique_value (mode, original, rclass, title);
606      return true;
607    }
608  /* Prevent reuse value of expression with side effects,
609     e.g. volatile memory.  */
610  if (! side_effects_p (original))
611    for (i = 0; i < curr_insn_input_reloads_num; i++)
612      if (rtx_equal_p (curr_insn_input_reloads[i].input, original)
613	  && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
614	{
615	  rtx reg = curr_insn_input_reloads[i].reg;
616	  regno = REGNO (reg);
617	  /* If input is equal to original and both are VOIDmode,
618	     GET_MODE (reg) might be still different from mode.
619	     Ensure we don't return *result_reg with wrong mode.  */
620	  if (GET_MODE (reg) != mode)
621	    {
622	      if (in_subreg_p)
623		continue;
624	      if (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (mode))
625		continue;
626	      reg = lowpart_subreg (mode, reg, GET_MODE (reg));
627	      if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
628		continue;
629	    }
630	  *result_reg = reg;
631	  if (lra_dump_file != NULL)
632	    {
633	      fprintf (lra_dump_file, "	 Reuse r%d for reload ", regno);
634	      dump_value_slim (lra_dump_file, original, 1);
635	    }
636	  if (new_class != lra_get_allocno_class (regno))
637	    lra_change_class (regno, new_class, ", change to", false);
638	  if (lra_dump_file != NULL)
639	    fprintf (lra_dump_file, "\n");
640	  return false;
641	}
642  *result_reg = lra_create_new_reg (mode, original, rclass, title);
643  lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
644  curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
645  curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
646  return true;
647}
648
649
650
651/* The page contains code to extract memory address parts.  */
652
653/* Wrapper around REGNO_OK_FOR_INDEX_P, to allow pseudos.  */
654static inline bool
655ok_for_index_p_nonstrict (rtx reg)
656{
657  unsigned regno = REGNO (reg);
658
659  return regno >= FIRST_PSEUDO_REGISTER || REGNO_OK_FOR_INDEX_P (regno);
660}
661
662/* A version of regno_ok_for_base_p for use here, when all pseudos
663   should count as OK.	Arguments as for regno_ok_for_base_p.  */
664static inline bool
665ok_for_base_p_nonstrict (rtx reg, machine_mode mode, addr_space_t as,
666			 enum rtx_code outer_code, enum rtx_code index_code)
667{
668  unsigned regno = REGNO (reg);
669
670  if (regno >= FIRST_PSEUDO_REGISTER)
671    return true;
672  return ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
673}
674
675
676
677/* The page contains major code to choose the current insn alternative
678   and generate reloads for it.	 */
679
680/* Return the offset from REGNO of the least significant register
681   in (reg:MODE REGNO).
682
683   This function is used to tell whether two registers satisfy
684   a matching constraint.  (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
685
686         REGNO1 + lra_constraint_offset (REGNO1, MODE1)
687	 == REGNO2 + lra_constraint_offset (REGNO2, MODE2)  */
688int
689lra_constraint_offset (int regno, machine_mode mode)
690{
691  lra_assert (regno < FIRST_PSEUDO_REGISTER);
692  if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (mode) > UNITS_PER_WORD
693      && SCALAR_INT_MODE_P (mode))
694    return hard_regno_nregs[regno][mode] - 1;
695  return 0;
696}
697
698/* Like rtx_equal_p except that it allows a REG and a SUBREG to match
699   if they are the same hard reg, and has special hacks for
700   auto-increment and auto-decrement.  This is specifically intended for
701   process_alt_operands to use in determining whether two operands
702   match.  X is the operand whose number is the lower of the two.
703
704   It is supposed that X is the output operand and Y is the input
705   operand.  Y_HARD_REGNO is the final hard regno of register Y or
706   register in subreg Y as we know it now.  Otherwise, it is a
707   negative value.  */
708static bool
709operands_match_p (rtx x, rtx y, int y_hard_regno)
710{
711  int i;
712  RTX_CODE code = GET_CODE (x);
713  const char *fmt;
714
715  if (x == y)
716    return true;
717  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
718      && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
719    {
720      int j;
721
722      i = get_hard_regno (x);
723      if (i < 0)
724	goto slow;
725
726      if ((j = y_hard_regno) < 0)
727	goto slow;
728
729      i += lra_constraint_offset (i, GET_MODE (x));
730      j += lra_constraint_offset (j, GET_MODE (y));
731
732      return i == j;
733    }
734
735  /* If two operands must match, because they are really a single
736     operand of an assembler insn, then two post-increments are invalid
737     because the assembler insn would increment only once.  On the
738     other hand, a post-increment matches ordinary indexing if the
739     post-increment is the output operand.  */
740  if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
741    return operands_match_p (XEXP (x, 0), y, y_hard_regno);
742
743  /* Two pre-increments are invalid because the assembler insn would
744     increment only once.  On the other hand, a pre-increment matches
745     ordinary indexing if the pre-increment is the input operand.  */
746  if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
747      || GET_CODE (y) == PRE_MODIFY)
748    return operands_match_p (x, XEXP (y, 0), -1);
749
750 slow:
751
752  if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
753      && x == SUBREG_REG (y))
754    return true;
755  if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
756      && SUBREG_REG (x) == y)
757    return true;
758
759  /* Now we have disposed of all the cases in which different rtx
760     codes can match.  */
761  if (code != GET_CODE (y))
762    return false;
763
764  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
765  if (GET_MODE (x) != GET_MODE (y))
766    return false;
767
768  switch (code)
769    {
770    CASE_CONST_UNIQUE:
771      return false;
772
773    case LABEL_REF:
774      return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
775    case SYMBOL_REF:
776      return XSTR (x, 0) == XSTR (y, 0);
777
778    default:
779      break;
780    }
781
782  /* Compare the elements.  If any pair of corresponding elements fail
783     to match, return false for the whole things.  */
784
785  fmt = GET_RTX_FORMAT (code);
786  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
787    {
788      int val, j;
789      switch (fmt[i])
790	{
791	case 'w':
792	  if (XWINT (x, i) != XWINT (y, i))
793	    return false;
794	  break;
795
796	case 'i':
797	  if (XINT (x, i) != XINT (y, i))
798	    return false;
799	  break;
800
801	case 'e':
802	  val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
803	  if (val == 0)
804	    return false;
805	  break;
806
807	case '0':
808	  break;
809
810	case 'E':
811	  if (XVECLEN (x, i) != XVECLEN (y, i))
812	    return false;
813	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
814	    {
815	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
816	      if (val == 0)
817		return false;
818	    }
819	  break;
820
821	  /* It is believed that rtx's at this level will never
822	     contain anything but integers and other rtx's, except for
823	     within LABEL_REFs and SYMBOL_REFs.	 */
824	default:
825	  gcc_unreachable ();
826	}
827    }
828  return true;
829}
830
831/* True if X is a constant that can be forced into the constant pool.
832   MODE is the mode of the operand, or VOIDmode if not known.  */
833#define CONST_POOL_OK_P(MODE, X)		\
834  ((MODE) != VOIDmode				\
835   && CONSTANT_P (X)				\
836   && GET_CODE (X) != HIGH			\
837   && !targetm.cannot_force_const_mem (MODE, X))
838
839/* True if C is a non-empty register class that has too few registers
840   to be safely used as a reload target class.	*/
841#define SMALL_REGISTER_CLASS_P(C)		\
842  (ira_class_hard_regs_num [(C)] == 1		\
843   || (ira_class_hard_regs_num [(C)] >= 1	\
844       && targetm.class_likely_spilled_p (C)))
845
846/* If REG is a reload pseudo, try to make its class satisfying CL.  */
847static void
848narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
849{
850  enum reg_class rclass;
851
852  /* Do not make more accurate class from reloads generated.  They are
853     mostly moves with a lot of constraints.  Making more accurate
854     class may results in very narrow class and impossibility of find
855     registers for several reloads of one insn.	 */
856  if (INSN_UID (curr_insn) >= new_insn_uid_start)
857    return;
858  if (GET_CODE (reg) == SUBREG)
859    reg = SUBREG_REG (reg);
860  if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
861    return;
862  if (in_class_p (reg, cl, &rclass) && rclass != cl)
863    lra_change_class (REGNO (reg), rclass, "      Change to", true);
864}
865
866/* Generate reloads for matching OUT and INS (array of input operand
867   numbers with end marker -1) with reg class GOAL_CLASS.  Add input
868   and output reloads correspondingly to the lists *BEFORE and *AFTER.
869   OUT might be negative.  In this case we generate input reloads for
870   matched input operands INS.  */
871static void
872match_reload (signed char out, signed char *ins, enum reg_class goal_class,
873	      rtx_insn **before, rtx_insn **after)
874{
875  int i, in;
876  rtx new_in_reg, new_out_reg, reg, clobber;
877  machine_mode inmode, outmode;
878  rtx in_rtx = *curr_id->operand_loc[ins[0]];
879  rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
880
881  inmode = curr_operand_mode[ins[0]];
882  outmode = out < 0 ? inmode : curr_operand_mode[out];
883  push_to_sequence (*before);
884  if (inmode != outmode)
885    {
886      if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
887	{
888	  reg = new_in_reg
889	    = lra_create_new_reg_with_unique_value (inmode, in_rtx,
890						    goal_class, "");
891	  if (SCALAR_INT_MODE_P (inmode))
892	    new_out_reg = gen_lowpart_SUBREG (outmode, reg);
893	  else
894	    new_out_reg = gen_rtx_SUBREG (outmode, reg, 0);
895	  LRA_SUBREG_P (new_out_reg) = 1;
896	  /* If the input reg is dying here, we can use the same hard
897	     register for REG and IN_RTX.  We do it only for original
898	     pseudos as reload pseudos can die although original
899	     pseudos still live where reload pseudos dies.  */
900	  if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
901	      && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx)))
902	    lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
903	}
904      else
905	{
906	  reg = new_out_reg
907	    = lra_create_new_reg_with_unique_value (outmode, out_rtx,
908						    goal_class, "");
909	  if (SCALAR_INT_MODE_P (outmode))
910	    new_in_reg = gen_lowpart_SUBREG (inmode, reg);
911	  else
912	    new_in_reg = gen_rtx_SUBREG (inmode, reg, 0);
913	  /* NEW_IN_REG is non-paradoxical subreg.  We don't want
914	     NEW_OUT_REG living above.  We add clobber clause for
915	     this.  This is just a temporary clobber.  We can remove
916	     it at the end of LRA work.  */
917	  clobber = emit_clobber (new_out_reg);
918	  LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
919	  LRA_SUBREG_P (new_in_reg) = 1;
920	  if (GET_CODE (in_rtx) == SUBREG)
921	    {
922	      rtx subreg_reg = SUBREG_REG (in_rtx);
923
924	      /* If SUBREG_REG is dying here and sub-registers IN_RTX
925		 and NEW_IN_REG are similar, we can use the same hard
926		 register for REG and SUBREG_REG.  */
927	      if (REG_P (subreg_reg)
928		  && (int) REGNO (subreg_reg) < lra_new_regno_start
929		  && GET_MODE (subreg_reg) == outmode
930		  && SUBREG_BYTE (in_rtx) == SUBREG_BYTE (new_in_reg)
931		  && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg)))
932		lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
933	    }
934	}
935    }
936  else
937    {
938      /* Pseudos have values -- see comments for lra_reg_info.
939	 Different pseudos with the same value do not conflict even if
940	 they live in the same place.  When we create a pseudo we
941	 assign value of original pseudo (if any) from which we
942	 created the new pseudo.  If we create the pseudo from the
943	 input pseudo, the new pseudo will no conflict with the input
944	 pseudo which is wrong when the input pseudo lives after the
945	 insn and as the new pseudo value is changed by the insn
946	 output.  Therefore we create the new pseudo from the output.
947
948	 We cannot reuse the current output register because we might
949	 have a situation like "a <- a op b", where the constraints
950	 force the second input operand ("b") to match the output
951	 operand ("a").  "b" must then be copied into a new register
952	 so that it doesn't clobber the current value of "a".  */
953
954      new_in_reg = new_out_reg
955	= lra_create_new_reg_with_unique_value (outmode, out_rtx,
956						goal_class, "");
957    }
958  /* In operand can be got from transformations before processing insn
959     constraints.  One example of such transformations is subreg
960     reloading (see function simplify_operand_subreg).  The new
961     pseudos created by the transformations might have inaccurate
962     class (ALL_REGS) and we should make their classes more
963     accurate.  */
964  narrow_reload_pseudo_class (in_rtx, goal_class);
965  lra_emit_move (copy_rtx (new_in_reg), in_rtx);
966  *before = get_insns ();
967  end_sequence ();
968  for (i = 0; (in = ins[i]) >= 0; i++)
969    {
970      lra_assert
971	(GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
972	 || GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]));
973      *curr_id->operand_loc[in] = new_in_reg;
974    }
975  lra_update_dups (curr_id, ins);
976  if (out < 0)
977    return;
978  /* See a comment for the input operand above.  */
979  narrow_reload_pseudo_class (out_rtx, goal_class);
980  if (find_reg_note (curr_insn, REG_UNUSED, out_rtx) == NULL_RTX)
981    {
982      start_sequence ();
983      lra_emit_move (out_rtx, copy_rtx (new_out_reg));
984      emit_insn (*after);
985      *after = get_insns ();
986      end_sequence ();
987    }
988  *curr_id->operand_loc[out] = new_out_reg;
989  lra_update_dup (curr_id, out);
990}
991
992/* Return register class which is union of all reg classes in insn
993   constraint alternative string starting with P.  */
994static enum reg_class
995reg_class_from_constraints (const char *p)
996{
997  int c, len;
998  enum reg_class op_class = NO_REGS;
999
1000  do
1001    switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1002      {
1003      case '#':
1004      case ',':
1005	return op_class;
1006
1007      case 'g':
1008	op_class = reg_class_subunion[op_class][GENERAL_REGS];
1009	break;
1010
1011      default:
1012	enum constraint_num cn = lookup_constraint (p);
1013	enum reg_class cl = reg_class_for_constraint (cn);
1014	if (cl == NO_REGS)
1015	  {
1016	    if (insn_extra_address_constraint (cn))
1017	      op_class
1018		= (reg_class_subunion
1019		   [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1020					      ADDRESS, SCRATCH)]);
1021	    break;
1022	  }
1023
1024	op_class = reg_class_subunion[op_class][cl];
1025 	break;
1026      }
1027  while ((p += len), c);
1028  return op_class;
1029}
1030
1031/* If OP is a register, return the class of the register as per
1032   get_reg_class, otherwise return NO_REGS.  */
1033static inline enum reg_class
1034get_op_class (rtx op)
1035{
1036  return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
1037}
1038
1039/* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
1040   otherwise.  If modes of MEM_PSEUDO and VAL are different, use
1041   SUBREG for VAL to make them equal.  */
1042static rtx_insn *
1043emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
1044{
1045  if (GET_MODE (mem_pseudo) != GET_MODE (val))
1046    {
1047      /* Usually size of mem_pseudo is greater than val size but in
1048	 rare cases it can be less as it can be defined by target
1049	 dependent macro HARD_REGNO_CALLER_SAVE_MODE.  */
1050      if (! MEM_P (val))
1051	{
1052	  val = gen_rtx_SUBREG (GET_MODE (mem_pseudo),
1053				GET_CODE (val) == SUBREG ? SUBREG_REG (val) : val,
1054				0);
1055	  LRA_SUBREG_P (val) = 1;
1056	}
1057      else
1058	{
1059	  mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
1060	  LRA_SUBREG_P (mem_pseudo) = 1;
1061	}
1062    }
1063  return as_a <rtx_insn *> (to_p
1064			    ? gen_move_insn (mem_pseudo, val)
1065			    : gen_move_insn (val, mem_pseudo));
1066}
1067
1068/* Process a special case insn (register move), return true if we
1069   don't need to process it anymore.  INSN should be a single set
1070   insn.  Set up that RTL was changed through CHANGE_P and macro
1071   SECONDARY_MEMORY_NEEDED says to use secondary memory through
1072   SEC_MEM_P.  */
1073static bool
1074check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
1075{
1076  int sregno, dregno;
1077  rtx dest, src, dreg, sreg, new_reg, scratch_reg;
1078  rtx_insn *before;
1079  enum reg_class dclass, sclass, secondary_class;
1080  secondary_reload_info sri;
1081
1082  lra_assert (curr_insn_set != NULL_RTX);
1083  dreg = dest = SET_DEST (curr_insn_set);
1084  sreg = src = SET_SRC (curr_insn_set);
1085  if (GET_CODE (dest) == SUBREG)
1086    dreg = SUBREG_REG (dest);
1087  if (GET_CODE (src) == SUBREG)
1088    sreg = SUBREG_REG (src);
1089  if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
1090    return false;
1091  sclass = dclass = NO_REGS;
1092  if (REG_P (dreg))
1093    dclass = get_reg_class (REGNO (dreg));
1094  if (dclass == ALL_REGS)
1095    /* ALL_REGS is used for new pseudos created by transformations
1096       like reload of SUBREG_REG (see function
1097       simplify_operand_subreg).  We don't know their class yet.  We
1098       should figure out the class from processing the insn
1099       constraints not in this fast path function.  Even if ALL_REGS
1100       were a right class for the pseudo, secondary_... hooks usually
1101       are not define for ALL_REGS.  */
1102    return false;
1103  if (REG_P (sreg))
1104    sclass = get_reg_class (REGNO (sreg));
1105  if (sclass == ALL_REGS)
1106    /* See comments above.  */
1107    return false;
1108  if (sclass == NO_REGS && dclass == NO_REGS)
1109    return false;
1110#ifdef SECONDARY_MEMORY_NEEDED
1111  if (SECONDARY_MEMORY_NEEDED (sclass, dclass, GET_MODE (src))
1112#ifdef SECONDARY_MEMORY_NEEDED_MODE
1113      && ((sclass != NO_REGS && dclass != NO_REGS)
1114	  || GET_MODE (src) != SECONDARY_MEMORY_NEEDED_MODE (GET_MODE (src)))
1115#endif
1116      )
1117    {
1118      *sec_mem_p = true;
1119      return false;
1120    }
1121#endif
1122  if (! REG_P (dreg) || ! REG_P (sreg))
1123    return false;
1124  sri.prev_sri = NULL;
1125  sri.icode = CODE_FOR_nothing;
1126  sri.extra_cost = 0;
1127  secondary_class = NO_REGS;
1128  /* Set up hard register for a reload pseudo for hook
1129     secondary_reload because some targets just ignore unassigned
1130     pseudos in the hook.  */
1131  if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
1132    {
1133      dregno = REGNO (dreg);
1134      reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
1135    }
1136  else
1137    dregno = -1;
1138  if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
1139    {
1140      sregno = REGNO (sreg);
1141      reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
1142    }
1143  else
1144    sregno = -1;
1145  if (sclass != NO_REGS)
1146    secondary_class
1147      = (enum reg_class) targetm.secondary_reload (false, dest,
1148						   (reg_class_t) sclass,
1149						   GET_MODE (src), &sri);
1150  if (sclass == NO_REGS
1151      || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
1152	  && dclass != NO_REGS))
1153    {
1154      enum reg_class old_sclass = secondary_class;
1155      secondary_reload_info old_sri = sri;
1156
1157      sri.prev_sri = NULL;
1158      sri.icode = CODE_FOR_nothing;
1159      sri.extra_cost = 0;
1160      secondary_class
1161	= (enum reg_class) targetm.secondary_reload (true, src,
1162						     (reg_class_t) dclass,
1163						     GET_MODE (src), &sri);
1164      /* Check the target hook consistency.  */
1165      lra_assert
1166	((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1167	 || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
1168	 || (secondary_class == old_sclass && sri.icode == old_sri.icode));
1169    }
1170  if (sregno >= 0)
1171    reg_renumber [sregno] = -1;
1172  if (dregno >= 0)
1173    reg_renumber [dregno] = -1;
1174  if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1175    return false;
1176  *change_p = true;
1177  new_reg = NULL_RTX;
1178  if (secondary_class != NO_REGS)
1179    new_reg = lra_create_new_reg_with_unique_value (GET_MODE (src), NULL_RTX,
1180						    secondary_class,
1181						    "secondary");
1182  start_sequence ();
1183  if (sri.icode == CODE_FOR_nothing)
1184    lra_emit_move (new_reg, src);
1185  else
1186    {
1187      enum reg_class scratch_class;
1188
1189      scratch_class = (reg_class_from_constraints
1190		       (insn_data[sri.icode].operand[2].constraint));
1191      scratch_reg = (lra_create_new_reg_with_unique_value
1192		     (insn_data[sri.icode].operand[2].mode, NULL_RTX,
1193		      scratch_class, "scratch"));
1194      emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
1195				      src, scratch_reg));
1196    }
1197  before = get_insns ();
1198  end_sequence ();
1199  lra_process_new_insns (curr_insn, before, NULL, "Inserting the move");
1200  if (new_reg != NULL_RTX)
1201    SET_SRC (curr_insn_set) = new_reg;
1202  else
1203    {
1204      if (lra_dump_file != NULL)
1205	{
1206	  fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
1207	  dump_insn_slim (lra_dump_file, curr_insn);
1208	}
1209      lra_set_insn_deleted (curr_insn);
1210      return true;
1211    }
1212  return false;
1213}
1214
1215/* The following data describe the result of process_alt_operands.
1216   The data are used in curr_insn_transform to generate reloads.  */
1217
1218/* The chosen reg classes which should be used for the corresponding
1219   operands.  */
1220static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
1221/* True if the operand should be the same as another operand and that
1222   other operand does not need a reload.  */
1223static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
1224/* True if the operand does not need a reload.	*/
1225static bool goal_alt_win[MAX_RECOG_OPERANDS];
1226/* True if the operand can be offsetable memory.  */
1227static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
1228/* The number of an operand to which given operand can be matched to.  */
1229static int goal_alt_matches[MAX_RECOG_OPERANDS];
1230/* The number of elements in the following array.  */
1231static int goal_alt_dont_inherit_ops_num;
1232/* Numbers of operands whose reload pseudos should not be inherited.  */
1233static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1234/* True if the insn commutative operands should be swapped.  */
1235static bool goal_alt_swapped;
1236/* The chosen insn alternative.	 */
1237static int goal_alt_number;
1238
1239/* The following five variables are used to choose the best insn
1240   alternative.	 They reflect final characteristics of the best
1241   alternative.	 */
1242
1243/* Number of necessary reloads and overall cost reflecting the
1244   previous value and other unpleasantness of the best alternative.  */
1245static int best_losers, best_overall;
1246/* Overall number hard registers used for reloads.  For example, on
1247   some targets we need 2 general registers to reload DFmode and only
1248   one floating point register.	 */
1249static int best_reload_nregs;
1250/* Overall number reflecting distances of previous reloading the same
1251   value.  The distances are counted from the current BB start.  It is
1252   used to improve inheritance chances.  */
1253static int best_reload_sum;
1254
1255/* True if the current insn should have no correspondingly input or
1256   output reloads.  */
1257static bool no_input_reloads_p, no_output_reloads_p;
1258
1259/* True if we swapped the commutative operands in the current
1260   insn.  */
1261static int curr_swapped;
1262
1263/* if CHECK_ONLY_P is false, arrange for address element *LOC to be a
1264   register of class CL.  Add any input reloads to list BEFORE.  AFTER
1265   is nonnull if *LOC is an automodified value; handle that case by
1266   adding the required output reloads to list AFTER.  Return true if
1267   the RTL was changed.
1268
1269   if CHECK_ONLY_P is true, check that the *LOC is a correct address
1270   register.  Return false if the address register is correct.  */
1271static bool
1272process_addr_reg (rtx *loc, bool check_only_p, rtx_insn **before, rtx_insn **after,
1273		  enum reg_class cl)
1274{
1275  int regno;
1276  enum reg_class rclass, new_class;
1277  rtx reg;
1278  rtx new_reg;
1279  machine_mode mode;
1280  bool subreg_p, before_p = false;
1281
1282  subreg_p = GET_CODE (*loc) == SUBREG;
1283  if (subreg_p)
1284    loc = &SUBREG_REG (*loc);
1285  reg = *loc;
1286  mode = GET_MODE (reg);
1287  if (! REG_P (reg))
1288    {
1289      if (check_only_p)
1290	return true;
1291      /* Always reload memory in an address even if the target supports
1292	 such addresses.  */
1293      new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, "address");
1294      before_p = true;
1295    }
1296  else
1297    {
1298      regno = REGNO (reg);
1299      rclass = get_reg_class (regno);
1300      if (! check_only_p
1301	  && (*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
1302	{
1303	  if (lra_dump_file != NULL)
1304	    {
1305	      fprintf (lra_dump_file,
1306		       "Changing pseudo %d in address of insn %u on equiv ",
1307		       REGNO (reg), INSN_UID (curr_insn));
1308	      dump_value_slim (lra_dump_file, *loc, 1);
1309	      fprintf (lra_dump_file, "\n");
1310	    }
1311	  *loc = copy_rtx (*loc);
1312	}
1313      if (*loc != reg || ! in_class_p (reg, cl, &new_class))
1314	{
1315	  if (check_only_p)
1316	    return true;
1317	  reg = *loc;
1318	  if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
1319			      mode, reg, cl, subreg_p, "address", &new_reg))
1320	    before_p = true;
1321	}
1322      else if (new_class != NO_REGS && rclass != new_class)
1323	{
1324	  if (check_only_p)
1325	    return true;
1326	  lra_change_class (regno, new_class, "	   Change to", true);
1327	  return false;
1328	}
1329      else
1330	return false;
1331    }
1332  if (before_p)
1333    {
1334      push_to_sequence (*before);
1335      lra_emit_move (new_reg, reg);
1336      *before = get_insns ();
1337      end_sequence ();
1338    }
1339  *loc = new_reg;
1340  if (after != NULL)
1341    {
1342      start_sequence ();
1343      lra_emit_move (before_p ? copy_rtx (reg) : reg, new_reg);
1344      emit_insn (*after);
1345      *after = get_insns ();
1346      end_sequence ();
1347    }
1348  return true;
1349}
1350
1351/* Insert move insn in simplify_operand_subreg. BEFORE returns
1352   the insn to be inserted before curr insn. AFTER returns the
1353   the insn to be inserted after curr insn.  ORIGREG and NEWREG
1354   are the original reg and new reg for reload.  */
1355static void
1356insert_move_for_subreg (rtx_insn **before, rtx_insn **after, rtx origreg,
1357			rtx newreg)
1358{
1359  if (before)
1360    {
1361      push_to_sequence (*before);
1362      lra_emit_move (newreg, origreg);
1363      *before = get_insns ();
1364      end_sequence ();
1365    }
1366  if (after)
1367    {
1368      start_sequence ();
1369      lra_emit_move (origreg, newreg);
1370      emit_insn (*after);
1371      *after = get_insns ();
1372      end_sequence ();
1373    }
1374}
1375
1376static int valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
1377
1378/* Make reloads for subreg in operand NOP with internal subreg mode
1379   REG_MODE, add new reloads for further processing.  Return true if
1380   any change was done.  */
1381static bool
1382simplify_operand_subreg (int nop, machine_mode reg_mode)
1383{
1384  int hard_regno;
1385  rtx_insn *before, *after;
1386  machine_mode mode, innermode;
1387  rtx reg, new_reg;
1388  rtx operand = *curr_id->operand_loc[nop];
1389  enum reg_class regclass;
1390  enum op_type type;
1391
1392  before = after = NULL;
1393
1394  if (GET_CODE (operand) != SUBREG)
1395    return false;
1396
1397  mode = GET_MODE (operand);
1398  reg = SUBREG_REG (operand);
1399  innermode = GET_MODE (reg);
1400  type = curr_static_id->operand[nop].type;
1401  /* If we change address for paradoxical subreg of memory, the
1402     address might violate the necessary alignment or the access might
1403     be slow.  So take this into consideration.  We should not worry
1404     about access beyond allocated memory for paradoxical memory
1405     subregs as we don't substitute such equiv memory (see processing
1406     equivalences in function lra_constraints) and because for spilled
1407     pseudos we allocate stack memory enough for the biggest
1408     corresponding paradoxical subreg.  */
1409  if (MEM_P (reg)
1410      && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (reg))
1411	  || MEM_ALIGN (reg) >= GET_MODE_ALIGNMENT (mode)))
1412    {
1413      rtx subst, old = *curr_id->operand_loc[nop];
1414
1415      alter_subreg (curr_id->operand_loc[nop], false);
1416      subst = *curr_id->operand_loc[nop];
1417      lra_assert (MEM_P (subst));
1418      if (! valid_address_p (innermode, XEXP (reg, 0),
1419			     MEM_ADDR_SPACE (reg))
1420	  || valid_address_p (GET_MODE (subst), XEXP (subst, 0),
1421			      MEM_ADDR_SPACE (subst)))
1422	return true;
1423      /* If the address was valid and became invalid, prefer to reload
1424	 the memory.  Typical case is when the index scale should
1425	 correspond the memory.  */
1426      *curr_id->operand_loc[nop] = old;
1427    }
1428  else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
1429    {
1430      alter_subreg (curr_id->operand_loc[nop], false);
1431      return true;
1432    }
1433  else if (CONSTANT_P (reg))
1434    {
1435      /* Try to simplify subreg of constant.  It is usually result of
1436	 equivalence substitution.  */
1437      if (innermode == VOIDmode
1438	  && (innermode = original_subreg_reg_mode[nop]) == VOIDmode)
1439	innermode = curr_static_id->operand[nop].mode;
1440      if ((new_reg = simplify_subreg (mode, reg, innermode,
1441				      SUBREG_BYTE (operand))) != NULL_RTX)
1442	{
1443	  *curr_id->operand_loc[nop] = new_reg;
1444	  return true;
1445	}
1446    }
1447  /* Put constant into memory when we have mixed modes.  It generates
1448     a better code in most cases as it does not need a secondary
1449     reload memory.  It also prevents LRA looping when LRA is using
1450     secondary reload memory again and again.  */
1451  if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
1452      && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
1453    {
1454      SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
1455      alter_subreg (curr_id->operand_loc[nop], false);
1456      return true;
1457    }
1458  /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
1459     if there may be a problem accessing OPERAND in the outer
1460     mode.  */
1461  if ((REG_P (reg)
1462       && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1463       && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1464       /* Don't reload paradoxical subregs because we could be looping
1465	  having repeatedly final regno out of hard regs range.  */
1466       && (hard_regno_nregs[hard_regno][innermode]
1467	   >= hard_regno_nregs[hard_regno][mode])
1468       && simplify_subreg_regno (hard_regno, innermode,
1469				 SUBREG_BYTE (operand), mode) < 0
1470       /* Don't reload subreg for matching reload.  It is actually
1471	  valid subreg in LRA.  */
1472       && ! LRA_SUBREG_P (operand))
1473      || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
1474    {
1475      enum reg_class rclass;
1476
1477      if (REG_P (reg))
1478	/* There is a big probability that we will get the same class
1479	   for the new pseudo and we will get the same insn which
1480	   means infinite looping.  So spill the new pseudo.  */
1481	rclass = NO_REGS;
1482      else
1483	/* The class will be defined later in curr_insn_transform.  */
1484	rclass
1485	  = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1486
1487      if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
1488			  rclass, TRUE, "subreg reg", &new_reg))
1489	{
1490	  bool insert_before, insert_after;
1491	  bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1492
1493	  insert_before = (type != OP_OUT
1494			   || GET_MODE_SIZE (innermode) > GET_MODE_SIZE (mode));
1495	  insert_after = (type != OP_IN);
1496	  insert_move_for_subreg (insert_before ? &before : NULL,
1497				  insert_after ? &after : NULL,
1498				  reg, new_reg);
1499	}
1500      SUBREG_REG (operand) = new_reg;
1501      lra_process_new_insns (curr_insn, before, after,
1502			     "Inserting subreg reload");
1503      return true;
1504    }
1505  /* Force a reload for a paradoxical subreg. For paradoxical subreg,
1506     IRA allocates hardreg to the inner pseudo reg according to its mode
1507     instead of the outermode, so the size of the hardreg may not be enough
1508     to contain the outermode operand, in that case we may need to insert
1509     reload for the reg. For the following two types of paradoxical subreg,
1510     we need to insert reload:
1511     1. If the op_type is OP_IN, and the hardreg could not be paired with
1512        other hardreg to contain the outermode operand
1513        (checked by in_hard_reg_set_p), we need to insert the reload.
1514     2. If the op_type is OP_OUT or OP_INOUT.
1515
1516     Here is a paradoxical subreg example showing how the reload is generated:
1517
1518     (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1519        (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
1520
1521     In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
1522     here, if reg107 is assigned to hardreg R15, because R15 is the last
1523     hardreg, compiler cannot find another hardreg to pair with R15 to
1524     contain TImode data. So we insert a TImode reload reg180 for it.
1525     After reload is inserted:
1526
1527     (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
1528        (reg:DI 107 [ __comp ])) -1
1529     (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1530        (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
1531
1532     Two reload hard registers will be allocated to reg180 to save TImode data
1533     in LRA_assign.  */
1534  else if (REG_P (reg)
1535	   && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1536	   && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1537	   && (hard_regno_nregs[hard_regno][innermode]
1538	       < hard_regno_nregs[hard_regno][mode])
1539	   && (regclass = lra_get_allocno_class (REGNO (reg)))
1540	   && (type != OP_IN
1541	       || !in_hard_reg_set_p (reg_class_contents[regclass],
1542				      mode, hard_regno)))
1543    {
1544      /* The class will be defined later in curr_insn_transform.  */
1545      enum reg_class rclass
1546	= (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1547
1548      if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
1549                          rclass, TRUE, "paradoxical subreg", &new_reg))
1550        {
1551	  rtx subreg;
1552	  bool insert_before, insert_after;
1553
1554	  PUT_MODE (new_reg, mode);
1555          subreg = simplify_gen_subreg (innermode, new_reg, mode, 0);
1556	  bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1557
1558	  insert_before = (type != OP_OUT);
1559	  insert_after = (type != OP_IN);
1560	  insert_move_for_subreg (insert_before ? &before : NULL,
1561				  insert_after ? &after : NULL,
1562				  reg, subreg);
1563	}
1564      SUBREG_REG (operand) = new_reg;
1565      lra_process_new_insns (curr_insn, before, after,
1566                             "Inserting paradoxical subreg reload");
1567      return true;
1568    }
1569  return false;
1570}
1571
1572/* Return TRUE if X refers for a hard register from SET.  */
1573static bool
1574uses_hard_regs_p (rtx x, HARD_REG_SET set)
1575{
1576  int i, j, x_hard_regno;
1577  machine_mode mode;
1578  const char *fmt;
1579  enum rtx_code code;
1580
1581  if (x == NULL_RTX)
1582    return false;
1583  code = GET_CODE (x);
1584  mode = GET_MODE (x);
1585  if (code == SUBREG)
1586    {
1587      x = SUBREG_REG (x);
1588      code = GET_CODE (x);
1589      if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (mode))
1590	mode = GET_MODE (x);
1591    }
1592
1593  if (REG_P (x))
1594    {
1595      x_hard_regno = get_hard_regno (x);
1596      return (x_hard_regno >= 0
1597	      && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
1598    }
1599  if (MEM_P (x))
1600    {
1601      struct address_info ad;
1602
1603      decompose_mem_address (&ad, x);
1604      if (ad.base_term != NULL && uses_hard_regs_p (*ad.base_term, set))
1605	return true;
1606      if (ad.index_term != NULL && uses_hard_regs_p (*ad.index_term, set))
1607	return true;
1608    }
1609  fmt = GET_RTX_FORMAT (code);
1610  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1611    {
1612      if (fmt[i] == 'e')
1613	{
1614	  if (uses_hard_regs_p (XEXP (x, i), set))
1615	    return true;
1616	}
1617      else if (fmt[i] == 'E')
1618	{
1619	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1620	    if (uses_hard_regs_p (XVECEXP (x, i, j), set))
1621	      return true;
1622	}
1623    }
1624  return false;
1625}
1626
1627/* Return true if OP is a spilled pseudo. */
1628static inline bool
1629spilled_pseudo_p (rtx op)
1630{
1631  return (REG_P (op)
1632	  && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
1633}
1634
1635/* Return true if X is a general constant.  */
1636static inline bool
1637general_constant_p (rtx x)
1638{
1639  return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
1640}
1641
1642static bool
1643reg_in_class_p (rtx reg, enum reg_class cl)
1644{
1645  if (cl == NO_REGS)
1646    return get_reg_class (REGNO (reg)) == NO_REGS;
1647  return in_class_p (reg, cl, NULL);
1648}
1649
1650/* Return true if SET of RCLASS contains no hard regs which can be
1651   used in MODE.  */
1652static bool
1653prohibited_class_reg_set_mode_p (enum reg_class rclass,
1654				 HARD_REG_SET &set,
1655				 enum machine_mode mode)
1656{
1657  HARD_REG_SET temp;
1658
1659  // ??? Is this assert right
1660  // lra_assert (hard_reg_set_subset_p (set, reg_class_contents[rclass]));
1661  COPY_HARD_REG_SET (temp, set);
1662  AND_COMPL_HARD_REG_SET (temp, lra_no_alloc_regs);
1663  return (hard_reg_set_subset_p
1664	  (temp, ira_prohibited_class_mode_regs[rclass][mode]));
1665}
1666
1667/* Major function to choose the current insn alternative and what
1668   operands should be reloaded and how.	 If ONLY_ALTERNATIVE is not
1669   negative we should consider only this alternative.  Return false if
1670   we can not choose the alternative or find how to reload the
1671   operands.  */
1672static bool
1673process_alt_operands (int only_alternative)
1674{
1675  bool ok_p = false;
1676  int nop, overall, nalt;
1677  int n_alternatives = curr_static_id->n_alternatives;
1678  int n_operands = curr_static_id->n_operands;
1679  /* LOSERS counts the operands that don't fit this alternative and
1680     would require loading.  */
1681  int losers;
1682  /* REJECT is a count of how undesirable this alternative says it is
1683     if any reloading is required.  If the alternative matches exactly
1684     then REJECT is ignored, but otherwise it gets this much counted
1685     against it in addition to the reloading needed.  */
1686  int reject;
1687  int op_reject;
1688  /* The number of elements in the following array.  */
1689  int early_clobbered_regs_num;
1690  /* Numbers of operands which are early clobber registers.  */
1691  int early_clobbered_nops[MAX_RECOG_OPERANDS];
1692  enum reg_class curr_alt[MAX_RECOG_OPERANDS];
1693  HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
1694  bool curr_alt_match_win[MAX_RECOG_OPERANDS];
1695  bool curr_alt_win[MAX_RECOG_OPERANDS];
1696  bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
1697  int curr_alt_matches[MAX_RECOG_OPERANDS];
1698  /* The number of elements in the following array.  */
1699  int curr_alt_dont_inherit_ops_num;
1700  /* Numbers of operands whose reload pseudos should not be inherited.	*/
1701  int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1702  rtx op;
1703  /* The register when the operand is a subreg of register, otherwise the
1704     operand itself.  */
1705  rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
1706  /* The register if the operand is a register or subreg of register,
1707     otherwise NULL.  */
1708  rtx operand_reg[MAX_RECOG_OPERANDS];
1709  int hard_regno[MAX_RECOG_OPERANDS];
1710  machine_mode biggest_mode[MAX_RECOG_OPERANDS];
1711  int reload_nregs, reload_sum;
1712  bool costly_p;
1713  enum reg_class cl;
1714
1715  /* Calculate some data common for all alternatives to speed up the
1716     function.	*/
1717  for (nop = 0; nop < n_operands; nop++)
1718    {
1719      rtx reg;
1720
1721      op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
1722      /* The real hard regno of the operand after the allocation.  */
1723      hard_regno[nop] = get_hard_regno (op);
1724
1725      operand_reg[nop] = reg = op;
1726      biggest_mode[nop] = GET_MODE (op);
1727      if (GET_CODE (op) == SUBREG)
1728	{
1729	  operand_reg[nop] = reg = SUBREG_REG (op);
1730	  if (GET_MODE_SIZE (biggest_mode[nop])
1731	      < GET_MODE_SIZE (GET_MODE (reg)))
1732	    biggest_mode[nop] = GET_MODE (reg);
1733	}
1734      if (! REG_P (reg))
1735	operand_reg[nop] = NULL_RTX;
1736      else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
1737	       || ((int) REGNO (reg)
1738		   == lra_get_elimination_hard_regno (REGNO (reg))))
1739	no_subreg_reg_operand[nop] = reg;
1740      else
1741	operand_reg[nop] = no_subreg_reg_operand[nop]
1742	  /* Just use natural mode for elimination result.  It should
1743	     be enough for extra constraints hooks.  */
1744	  = regno_reg_rtx[hard_regno[nop]];
1745    }
1746
1747  /* The constraints are made of several alternatives.	Each operand's
1748     constraint looks like foo,bar,... with commas separating the
1749     alternatives.  The first alternatives for all operands go
1750     together, the second alternatives go together, etc.
1751
1752     First loop over alternatives.  */
1753  alternative_mask preferred = curr_id->preferred_alternatives;
1754  if (only_alternative >= 0)
1755    preferred &= ALTERNATIVE_BIT (only_alternative);
1756
1757  for (nalt = 0; nalt < n_alternatives; nalt++)
1758    {
1759      /* Loop over operands for one constraint alternative.  */
1760      if (!TEST_BIT (preferred, nalt))
1761	continue;
1762
1763      overall = losers = reject = reload_nregs = reload_sum = 0;
1764      for (nop = 0; nop < n_operands; nop++)
1765	{
1766	  int inc = (curr_static_id
1767		     ->operand_alternative[nalt * n_operands + nop].reject);
1768	  if (lra_dump_file != NULL && inc != 0)
1769	    fprintf (lra_dump_file,
1770		     "            Staticly defined alt reject+=%d\n", inc);
1771	  reject += inc;
1772	}
1773      early_clobbered_regs_num = 0;
1774
1775      for (nop = 0; nop < n_operands; nop++)
1776	{
1777	  const char *p;
1778	  char *end;
1779	  int len, c, m, i, opalt_num, this_alternative_matches;
1780	  bool win, did_match, offmemok, early_clobber_p;
1781	  /* false => this operand can be reloaded somehow for this
1782	     alternative.  */
1783	  bool badop;
1784	  /* true => this operand can be reloaded if the alternative
1785	     allows regs.  */
1786	  bool winreg;
1787	  /* True if a constant forced into memory would be OK for
1788	     this operand.  */
1789	  bool constmemok;
1790	  enum reg_class this_alternative, this_costly_alternative;
1791	  HARD_REG_SET this_alternative_set, this_costly_alternative_set;
1792	  bool this_alternative_match_win, this_alternative_win;
1793	  bool this_alternative_offmemok;
1794	  bool scratch_p;
1795	  machine_mode mode;
1796	  enum constraint_num cn;
1797
1798	  opalt_num = nalt * n_operands + nop;
1799	  if (curr_static_id->operand_alternative[opalt_num].anything_ok)
1800	    {
1801	      /* Fast track for no constraints at all.	*/
1802	      curr_alt[nop] = NO_REGS;
1803	      CLEAR_HARD_REG_SET (curr_alt_set[nop]);
1804	      curr_alt_win[nop] = true;
1805	      curr_alt_match_win[nop] = false;
1806	      curr_alt_offmemok[nop] = false;
1807	      curr_alt_matches[nop] = -1;
1808	      continue;
1809	    }
1810
1811	  op = no_subreg_reg_operand[nop];
1812	  mode = curr_operand_mode[nop];
1813
1814	  win = did_match = winreg = offmemok = constmemok = false;
1815	  badop = true;
1816
1817	  early_clobber_p = false;
1818	  p = curr_static_id->operand_alternative[opalt_num].constraint;
1819
1820	  this_costly_alternative = this_alternative = NO_REGS;
1821	  /* We update set of possible hard regs besides its class
1822	     because reg class might be inaccurate.  For example,
1823	     union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
1824	     is translated in HI_REGS because classes are merged by
1825	     pairs and there is no accurate intermediate class.	 */
1826	  CLEAR_HARD_REG_SET (this_alternative_set);
1827	  CLEAR_HARD_REG_SET (this_costly_alternative_set);
1828	  this_alternative_win = false;
1829	  this_alternative_match_win = false;
1830	  this_alternative_offmemok = false;
1831	  this_alternative_matches = -1;
1832
1833	  /* An empty constraint should be excluded by the fast
1834	     track.  */
1835	  lra_assert (*p != 0 && *p != ',');
1836
1837	  op_reject = 0;
1838	  /* Scan this alternative's specs for this operand; set WIN
1839	     if the operand fits any letter in this alternative.
1840	     Otherwise, clear BADOP if this operand could fit some
1841	     letter after reloads, or set WINREG if this operand could
1842	     fit after reloads provided the constraint allows some
1843	     registers.	 */
1844	  costly_p = false;
1845	  do
1846	    {
1847	      switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1848		{
1849		case '\0':
1850		  len = 0;
1851		  break;
1852		case ',':
1853		  c = '\0';
1854		  break;
1855
1856		case '&':
1857		  early_clobber_p = true;
1858		  break;
1859
1860		case '$':
1861		  op_reject += LRA_MAX_REJECT;
1862		  break;
1863		case '^':
1864		  op_reject += LRA_LOSER_COST_FACTOR;
1865		  break;
1866
1867		case '#':
1868		  /* Ignore rest of this alternative.  */
1869		  c = '\0';
1870		  break;
1871
1872		case '0':  case '1':  case '2':	 case '3':  case '4':
1873		case '5':  case '6':  case '7':	 case '8':  case '9':
1874		  {
1875		    int m_hregno;
1876		    bool match_p;
1877
1878		    m = strtoul (p, &end, 10);
1879		    p = end;
1880		    len = 0;
1881		    lra_assert (nop > m);
1882
1883		    this_alternative_matches = m;
1884		    m_hregno = get_hard_regno (*curr_id->operand_loc[m]);
1885		    /* We are supposed to match a previous operand.
1886		       If we do, we win if that one did.  If we do
1887		       not, count both of the operands as losers.
1888		       (This is too conservative, since most of the
1889		       time only a single reload insn will be needed
1890		       to make the two operands win.  As a result,
1891		       this alternative may be rejected when it is
1892		       actually desirable.)  */
1893		    match_p = false;
1894		    if (operands_match_p (*curr_id->operand_loc[nop],
1895					  *curr_id->operand_loc[m], m_hregno))
1896		      {
1897			/* We should reject matching of an early
1898			   clobber operand if the matching operand is
1899			   not dying in the insn.  */
1900			if (! curr_static_id->operand[m].early_clobber
1901			    || operand_reg[nop] == NULL_RTX
1902			    || (find_regno_note (curr_insn, REG_DEAD,
1903						 REGNO (op))
1904				|| REGNO (op) == REGNO (operand_reg[m])))
1905			  match_p = true;
1906		      }
1907		    if (match_p)
1908		      {
1909			/* If we are matching a non-offsettable
1910			   address where an offsettable address was
1911			   expected, then we must reject this
1912			   combination, because we can't reload
1913			   it.	*/
1914			if (curr_alt_offmemok[m]
1915			    && MEM_P (*curr_id->operand_loc[m])
1916			    && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
1917			  continue;
1918		      }
1919		    else
1920		      {
1921			/* Operands don't match.  Both operands must
1922			   allow a reload register, otherwise we
1923			   cannot make them match.  */
1924			if (curr_alt[m] == NO_REGS)
1925			  break;
1926			/* Retroactively mark the operand we had to
1927			   match as a loser, if it wasn't already and
1928			   it wasn't matched to a register constraint
1929			   (e.g it might be matched by memory). */
1930			if (curr_alt_win[m]
1931			    && (operand_reg[m] == NULL_RTX
1932				|| hard_regno[m] < 0))
1933			  {
1934			    losers++;
1935			    reload_nregs
1936			      += (ira_reg_class_max_nregs[curr_alt[m]]
1937				  [GET_MODE (*curr_id->operand_loc[m])]);
1938			  }
1939
1940			/* Prefer matching earlyclobber alternative as
1941			   it results in less hard regs required for
1942			   the insn than a non-matching earlyclobber
1943			   alternative.  */
1944			if (curr_static_id->operand[m].early_clobber)
1945			  {
1946			    if (lra_dump_file != NULL)
1947			      fprintf
1948				(lra_dump_file,
1949				 "            %d Matching earlyclobber alt:"
1950				 " reject--\n",
1951				 nop);
1952			    reject--;
1953			  }
1954			/* Otherwise we prefer no matching
1955			   alternatives because it gives more freedom
1956			   in RA.  */
1957			else if (operand_reg[nop] == NULL_RTX
1958				 || (find_regno_note (curr_insn, REG_DEAD,
1959						      REGNO (operand_reg[nop]))
1960				     == NULL_RTX))
1961			  {
1962			    if (lra_dump_file != NULL)
1963			      fprintf
1964				(lra_dump_file,
1965				 "            %d Matching alt: reject+=2\n",
1966				 nop);
1967			    reject += 2;
1968			  }
1969		      }
1970		    /* If we have to reload this operand and some
1971		       previous operand also had to match the same
1972		       thing as this operand, we don't know how to do
1973		       that.  */
1974		    if (!match_p || !curr_alt_win[m])
1975		      {
1976			for (i = 0; i < nop; i++)
1977			  if (curr_alt_matches[i] == m)
1978			    break;
1979			if (i < nop)
1980			  break;
1981		      }
1982		    else
1983		      did_match = true;
1984
1985		    /* This can be fixed with reloads if the operand
1986		       we are supposed to match can be fixed with
1987		       reloads. */
1988		    badop = false;
1989		    this_alternative = curr_alt[m];
1990		    COPY_HARD_REG_SET (this_alternative_set, curr_alt_set[m]);
1991		    winreg = this_alternative != NO_REGS;
1992		    break;
1993		  }
1994
1995		case 'g':
1996		  if (MEM_P (op)
1997		      || general_constant_p (op)
1998		      || spilled_pseudo_p (op))
1999		    win = true;
2000		  cl = GENERAL_REGS;
2001		  goto reg;
2002
2003		default:
2004		  cn = lookup_constraint (p);
2005		  switch (get_constraint_type (cn))
2006		    {
2007		    case CT_REGISTER:
2008		      cl = reg_class_for_constraint (cn);
2009		      if (cl != NO_REGS)
2010			goto reg;
2011		      break;
2012
2013		    case CT_CONST_INT:
2014		      if (CONST_INT_P (op)
2015			  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2016			win = true;
2017		      break;
2018
2019		    case CT_MEMORY:
2020		      if (MEM_P (op)
2021			  && satisfies_memory_constraint_p (op, cn))
2022			win = true;
2023		      else if (spilled_pseudo_p (op))
2024			win = true;
2025
2026		      /* If we didn't already win, we can reload constants
2027			 via force_const_mem or put the pseudo value into
2028			 memory, or make other memory by reloading the
2029			 address like for 'o'.  */
2030		      if (CONST_POOL_OK_P (mode, op)
2031			  || MEM_P (op) || REG_P (op))
2032			badop = false;
2033		      constmemok = true;
2034		      offmemok = true;
2035		      break;
2036
2037		    case CT_ADDRESS:
2038		      /* If we didn't already win, we can reload the address
2039			 into a base register.  */
2040		      if (satisfies_address_constraint_p (op, cn))
2041			win = true;
2042		      cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2043					   ADDRESS, SCRATCH);
2044		      badop = false;
2045		      goto reg;
2046
2047		    case CT_FIXED_FORM:
2048		      if (constraint_satisfied_p (op, cn))
2049			win = true;
2050		      break;
2051		    }
2052		  break;
2053
2054		reg:
2055		  this_alternative = reg_class_subunion[this_alternative][cl];
2056		  IOR_HARD_REG_SET (this_alternative_set,
2057				    reg_class_contents[cl]);
2058		  if (costly_p)
2059		    {
2060		      this_costly_alternative
2061			= reg_class_subunion[this_costly_alternative][cl];
2062		      IOR_HARD_REG_SET (this_costly_alternative_set,
2063					reg_class_contents[cl]);
2064		    }
2065		  if (mode == BLKmode)
2066		    break;
2067		  winreg = true;
2068		  if (REG_P (op))
2069		    {
2070		      if (hard_regno[nop] >= 0
2071			  && in_hard_reg_set_p (this_alternative_set,
2072						mode, hard_regno[nop]))
2073			win = true;
2074		      else if (hard_regno[nop] < 0
2075			       && in_class_p (op, this_alternative, NULL))
2076			win = true;
2077		    }
2078		  break;
2079		}
2080	      if (c != ' ' && c != '\t')
2081		costly_p = c == '*';
2082	    }
2083	  while ((p += len), c);
2084
2085	  scratch_p = (operand_reg[nop] != NULL_RTX
2086		       && lra_former_scratch_p (REGNO (operand_reg[nop])));
2087	  /* Record which operands fit this alternative.  */
2088	  if (win)
2089	    {
2090	      this_alternative_win = true;
2091	      if (operand_reg[nop] != NULL_RTX)
2092		{
2093		  if (hard_regno[nop] >= 0)
2094		    {
2095		      if (in_hard_reg_set_p (this_costly_alternative_set,
2096					     mode, hard_regno[nop]))
2097			{
2098			  if (lra_dump_file != NULL)
2099			    fprintf (lra_dump_file,
2100				     "            %d Costly set: reject++\n",
2101				     nop);
2102			  reject++;
2103			}
2104		    }
2105		  else
2106		    {
2107		      /* Prefer won reg to spilled pseudo under other
2108			 equal conditions for possibe inheritance.  */
2109		      if (! scratch_p)
2110			{
2111			  if (lra_dump_file != NULL)
2112			    fprintf
2113			      (lra_dump_file,
2114			       "            %d Non pseudo reload: reject++\n",
2115			       nop);
2116			  reject++;
2117			}
2118		      if (in_class_p (operand_reg[nop],
2119				      this_costly_alternative, NULL))
2120			{
2121			  if (lra_dump_file != NULL)
2122			    fprintf
2123			      (lra_dump_file,
2124			       "            %d Non pseudo costly reload:"
2125			       " reject++\n",
2126			       nop);
2127			  reject++;
2128			}
2129		    }
2130		  /* We simulate the behaviour of old reload here.
2131		     Although scratches need hard registers and it
2132		     might result in spilling other pseudos, no reload
2133		     insns are generated for the scratches.  So it
2134		     might cost something but probably less than old
2135		     reload pass believes.  */
2136		  if (scratch_p)
2137		    {
2138		      if (lra_dump_file != NULL)
2139			fprintf (lra_dump_file,
2140				 "            %d Scratch win: reject+=2\n",
2141				 nop);
2142		      reject += 2;
2143		    }
2144		}
2145	    }
2146	  else if (did_match)
2147	    this_alternative_match_win = true;
2148	  else
2149	    {
2150	      int const_to_mem = 0;
2151	      bool no_regs_p;
2152
2153	      reject += op_reject;
2154	      /* Never do output reload of stack pointer.  It makes
2155		 impossible to do elimination when SP is changed in
2156		 RTL.  */
2157	      if (op == stack_pointer_rtx && ! frame_pointer_needed
2158		  && curr_static_id->operand[nop].type != OP_IN)
2159		goto fail;
2160
2161	      /* If this alternative asks for a specific reg class, see if there
2162		 is at least one allocatable register in that class.  */
2163	      no_regs_p
2164		= (this_alternative == NO_REGS
2165		   || (hard_reg_set_subset_p
2166		       (reg_class_contents[this_alternative],
2167			lra_no_alloc_regs)));
2168
2169	      /* For asms, verify that the class for this alternative is possible
2170		 for the mode that is specified.  */
2171	      if (!no_regs_p && INSN_CODE (curr_insn) < 0)
2172		{
2173		  int i;
2174		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2175		    if (HARD_REGNO_MODE_OK (i, mode)
2176			&& in_hard_reg_set_p (reg_class_contents[this_alternative],
2177					      mode, i))
2178		      break;
2179		  if (i == FIRST_PSEUDO_REGISTER)
2180		    winreg = false;
2181		}
2182
2183	      /* If this operand accepts a register, and if the
2184		 register class has at least one allocatable register,
2185		 then this operand can be reloaded.  */
2186	      if (winreg && !no_regs_p)
2187		badop = false;
2188
2189	      if (badop)
2190		{
2191		  if (lra_dump_file != NULL)
2192		    fprintf (lra_dump_file,
2193			     "            alt=%d: Bad operand -- refuse\n",
2194			     nalt);
2195		  goto fail;
2196		}
2197
2198	      /* If not assigned pseudo has a class which a subset of
2199		 required reg class, it is a less costly alternative
2200		 as the pseudo still can get a hard reg of necessary
2201		 class.  */
2202	      if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
2203		  && (cl = get_reg_class (REGNO (op))) != NO_REGS
2204		  && ira_class_subset_p[this_alternative][cl])
2205		{
2206		  if (lra_dump_file != NULL)
2207		    fprintf
2208		      (lra_dump_file,
2209		       "            %d Super set class reg: reject-=3\n", nop);
2210		  reject -= 3;
2211		}
2212
2213	      this_alternative_offmemok = offmemok;
2214	      if (this_costly_alternative != NO_REGS)
2215		{
2216		  if (lra_dump_file != NULL)
2217		    fprintf (lra_dump_file,
2218			     "            %d Costly loser: reject++\n", nop);
2219		  reject++;
2220		}
2221	      /* If the operand is dying, has a matching constraint,
2222		 and satisfies constraints of the matched operand
2223		 which failed to satisfy the own constraints, most probably
2224		 the reload for this operand will be gone.  */
2225	      if (this_alternative_matches >= 0
2226		  && !curr_alt_win[this_alternative_matches]
2227		  && REG_P (op)
2228		  && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
2229		  && (hard_regno[nop] >= 0
2230		      ? in_hard_reg_set_p (this_alternative_set,
2231					   mode, hard_regno[nop])
2232		      : in_class_p (op, this_alternative, NULL)))
2233		{
2234		  if (lra_dump_file != NULL)
2235		    fprintf
2236		      (lra_dump_file,
2237		       "            %d Dying matched operand reload: reject++\n",
2238		       nop);
2239		  reject++;
2240		}
2241	      else
2242		{
2243		  /* Strict_low_part requires to reload the register
2244		     not the sub-register.  In this case we should
2245		     check that a final reload hard reg can hold the
2246		     value mode.  */
2247		  if (curr_static_id->operand[nop].strict_low
2248		      && REG_P (op)
2249		      && hard_regno[nop] < 0
2250		      && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
2251		      && ira_class_hard_regs_num[this_alternative] > 0
2252		      && ! HARD_REGNO_MODE_OK (ira_class_hard_regs
2253					       [this_alternative][0],
2254					       GET_MODE
2255					       (*curr_id->operand_loc[nop])))
2256		    {
2257		      if (lra_dump_file != NULL)
2258			fprintf
2259			  (lra_dump_file,
2260			   "            alt=%d: Strict low subreg reload -- refuse\n",
2261			   nalt);
2262		      goto fail;
2263		    }
2264		  losers++;
2265		}
2266	      if (operand_reg[nop] != NULL_RTX
2267		  /* Output operands and matched input operands are
2268		     not inherited.  The following conditions do not
2269		     exactly describe the previous statement but they
2270		     are pretty close.  */
2271		  && curr_static_id->operand[nop].type != OP_OUT
2272		  && (this_alternative_matches < 0
2273		      || curr_static_id->operand[nop].type != OP_IN))
2274		{
2275		  int last_reload = (lra_reg_info[ORIGINAL_REGNO
2276						  (operand_reg[nop])]
2277				     .last_reload);
2278
2279		  /* The value of reload_sum has sense only if we
2280		     process insns in their order.  It happens only on
2281		     the first constraints sub-pass when we do most of
2282		     reload work.  */
2283		  if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
2284		    reload_sum += last_reload - bb_reload_num;
2285		}
2286	      /* If this is a constant that is reloaded into the
2287		 desired class by copying it to memory first, count
2288		 that as another reload.  This is consistent with
2289		 other code and is required to avoid choosing another
2290		 alternative when the constant is moved into memory.
2291		 Note that the test here is precisely the same as in
2292		 the code below that calls force_const_mem.  */
2293	      if (CONST_POOL_OK_P (mode, op)
2294		  && ((targetm.preferred_reload_class
2295		       (op, this_alternative) == NO_REGS)
2296		      || no_input_reloads_p))
2297		{
2298		  const_to_mem = 1;
2299		  if (! no_regs_p)
2300		    losers++;
2301		}
2302
2303	      /* Alternative loses if it requires a type of reload not
2304		 permitted for this insn.  We can always reload
2305		 objects with a REG_UNUSED note.  */
2306	      if ((curr_static_id->operand[nop].type != OP_IN
2307		   && no_output_reloads_p
2308		   && ! find_reg_note (curr_insn, REG_UNUSED, op))
2309		  || (curr_static_id->operand[nop].type != OP_OUT
2310		      && no_input_reloads_p && ! const_to_mem)
2311		  || (this_alternative_matches >= 0
2312		      && (no_input_reloads_p
2313			  || (no_output_reloads_p
2314			      && (curr_static_id->operand
2315				  [this_alternative_matches].type != OP_IN)
2316			      && ! find_reg_note (curr_insn, REG_UNUSED,
2317						  no_subreg_reg_operand
2318						  [this_alternative_matches])))))
2319		{
2320		  if (lra_dump_file != NULL)
2321		    fprintf
2322		      (lra_dump_file,
2323		       "            alt=%d: No input/otput reload -- refuse\n",
2324		       nalt);
2325		  goto fail;
2326		}
2327
2328	      /* Alternative loses if it required class pseudo can not
2329		 hold value of required mode.  Such insns can be
2330		 described by insn definitions with mode iterators.  */
2331	      if (GET_MODE (*curr_id->operand_loc[nop]) != VOIDmode
2332		  && ! hard_reg_set_empty_p (this_alternative_set)
2333		  /* It is common practice for constraints to use a
2334		     class which does not have actually enough regs to
2335		     hold the value (e.g. x86 AREG for mode requiring
2336		     more one general reg).  Therefore we have 2
2337		     conditions to check that the reload pseudo can
2338		     not hold the mode value.  */
2339		  && ! HARD_REGNO_MODE_OK (ira_class_hard_regs
2340					   [this_alternative][0],
2341					   GET_MODE (*curr_id->operand_loc[nop]))
2342		  /* The above condition is not enough as the first
2343		     reg in ira_class_hard_regs can be not aligned for
2344		     multi-words mode values.  */
2345		  && (prohibited_class_reg_set_mode_p
2346		      (this_alternative, this_alternative_set,
2347		       GET_MODE (*curr_id->operand_loc[nop]))))
2348		{
2349		  if (lra_dump_file != NULL)
2350		    fprintf (lra_dump_file,
2351			     "            alt=%d: reload pseudo for op %d "
2352			     " can not hold the mode value -- refuse\n",
2353			     nalt, nop);
2354		  goto fail;
2355		}
2356
2357	      /* Check strong discouragement of reload of non-constant
2358		 into class THIS_ALTERNATIVE.  */
2359	      if (! CONSTANT_P (op) && ! no_regs_p
2360		  && (targetm.preferred_reload_class
2361		      (op, this_alternative) == NO_REGS
2362		      || (curr_static_id->operand[nop].type == OP_OUT
2363			  && (targetm.preferred_output_reload_class
2364			      (op, this_alternative) == NO_REGS))))
2365		{
2366		  if (lra_dump_file != NULL)
2367		    fprintf (lra_dump_file,
2368			     "            %d Non-prefered reload: reject+=%d\n",
2369			     nop, LRA_MAX_REJECT);
2370		  reject += LRA_MAX_REJECT;
2371		}
2372
2373	      if (! (MEM_P (op) && offmemok)
2374		  && ! (const_to_mem && constmemok))
2375		{
2376		  /* We prefer to reload pseudos over reloading other
2377		     things, since such reloads may be able to be
2378		     eliminated later.  So bump REJECT in other cases.
2379		     Don't do this in the case where we are forcing a
2380		     constant into memory and it will then win since
2381		     we don't want to have a different alternative
2382		     match then.  */
2383		  if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2384		    {
2385		      if (lra_dump_file != NULL)
2386			fprintf
2387			  (lra_dump_file,
2388			   "            %d Non-pseudo reload: reject+=2\n",
2389			   nop);
2390		      reject += 2;
2391		    }
2392
2393		  if (! no_regs_p)
2394		    reload_nregs
2395		      += ira_reg_class_max_nregs[this_alternative][mode];
2396
2397		  if (SMALL_REGISTER_CLASS_P (this_alternative))
2398		    {
2399		      if (lra_dump_file != NULL)
2400			fprintf
2401			  (lra_dump_file,
2402			   "            %d Small class reload: reject+=%d\n",
2403			   nop, LRA_LOSER_COST_FACTOR / 2);
2404		      reject += LRA_LOSER_COST_FACTOR / 2;
2405		    }
2406		}
2407
2408	      /* We are trying to spill pseudo into memory.  It is
2409		 usually more costly than moving to a hard register
2410		 although it might takes the same number of
2411		 reloads.  */
2412	      if (no_regs_p && REG_P (op) && hard_regno[nop] >= 0)
2413		{
2414		  if (lra_dump_file != NULL)
2415		    fprintf
2416		      (lra_dump_file,
2417		       "            %d Spill pseudo into memory: reject+=3\n",
2418		       nop);
2419		  reject += 3;
2420		  if (VECTOR_MODE_P (mode))
2421		    {
2422		      /* Spilling vectors into memory is usually more
2423			 costly as they contain big values.  */
2424		      if (lra_dump_file != NULL)
2425			fprintf
2426			  (lra_dump_file,
2427			   "            %d Spill vector pseudo: reject+=2\n",
2428			   nop);
2429		      reject += 2;
2430		    }
2431		}
2432
2433#ifdef SECONDARY_MEMORY_NEEDED
2434	      /* If reload requires moving value through secondary
2435		 memory, it will need one more insn at least.  */
2436	      if (this_alternative != NO_REGS
2437		  && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
2438		  && ((curr_static_id->operand[nop].type != OP_OUT
2439		       && SECONDARY_MEMORY_NEEDED (cl, this_alternative,
2440						   GET_MODE (op)))
2441		      || (curr_static_id->operand[nop].type != OP_IN
2442			  && SECONDARY_MEMORY_NEEDED (this_alternative, cl,
2443						      GET_MODE (op)))))
2444		losers++;
2445#endif
2446	      /* Input reloads can be inherited more often than output
2447		 reloads can be removed, so penalize output
2448		 reloads.  */
2449	      if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
2450		{
2451		  if (lra_dump_file != NULL)
2452		    fprintf
2453		      (lra_dump_file,
2454		       "            %d Non input pseudo reload: reject++\n",
2455		       nop);
2456		  reject++;
2457		}
2458	    }
2459
2460	  if (early_clobber_p && ! scratch_p)
2461	    {
2462	      if (lra_dump_file != NULL)
2463		fprintf (lra_dump_file,
2464			 "            %d Early clobber: reject++\n", nop);
2465	      reject++;
2466	    }
2467	  /* ??? We check early clobbers after processing all operands
2468	     (see loop below) and there we update the costs more.
2469	     Should we update the cost (may be approximately) here
2470	     because of early clobber register reloads or it is a rare
2471	     or non-important thing to be worth to do it.  */
2472	  overall = losers * LRA_LOSER_COST_FACTOR + reject;
2473	  if ((best_losers == 0 || losers != 0) && best_overall < overall)
2474            {
2475              if (lra_dump_file != NULL)
2476		fprintf (lra_dump_file,
2477			 "            alt=%d,overall=%d,losers=%d -- refuse\n",
2478			 nalt, overall, losers);
2479              goto fail;
2480            }
2481
2482	  curr_alt[nop] = this_alternative;
2483	  COPY_HARD_REG_SET (curr_alt_set[nop], this_alternative_set);
2484	  curr_alt_win[nop] = this_alternative_win;
2485	  curr_alt_match_win[nop] = this_alternative_match_win;
2486	  curr_alt_offmemok[nop] = this_alternative_offmemok;
2487	  curr_alt_matches[nop] = this_alternative_matches;
2488
2489	  if (this_alternative_matches >= 0
2490	      && !did_match && !this_alternative_win)
2491	    curr_alt_win[this_alternative_matches] = false;
2492
2493	  if (early_clobber_p && operand_reg[nop] != NULL_RTX)
2494	    early_clobbered_nops[early_clobbered_regs_num++] = nop;
2495	}
2496      if (curr_insn_set != NULL_RTX && n_operands == 2
2497	  /* Prevent processing non-move insns.  */
2498	  && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
2499	      || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
2500	  && ((! curr_alt_win[0] && ! curr_alt_win[1]
2501	       && REG_P (no_subreg_reg_operand[0])
2502	       && REG_P (no_subreg_reg_operand[1])
2503	       && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2504		   || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
2505	      || (! curr_alt_win[0] && curr_alt_win[1]
2506		  && REG_P (no_subreg_reg_operand[1])
2507		  && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
2508	      || (curr_alt_win[0] && ! curr_alt_win[1]
2509		  && REG_P (no_subreg_reg_operand[0])
2510		  && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2511		  && (! CONST_POOL_OK_P (curr_operand_mode[1],
2512					 no_subreg_reg_operand[1])
2513		      || (targetm.preferred_reload_class
2514			  (no_subreg_reg_operand[1],
2515			   (enum reg_class) curr_alt[1]) != NO_REGS))
2516		  /* If it is a result of recent elimination in move
2517		     insn we can transform it into an add still by
2518		     using this alternative.  */
2519		  && GET_CODE (no_subreg_reg_operand[1]) != PLUS)))
2520	{
2521	  /* We have a move insn and a new reload insn will be similar
2522	     to the current insn.  We should avoid such situation as it
2523	     results in LRA cycling.  */
2524	  overall += LRA_MAX_REJECT;
2525	}
2526      ok_p = true;
2527      curr_alt_dont_inherit_ops_num = 0;
2528      for (nop = 0; nop < early_clobbered_regs_num; nop++)
2529	{
2530	  int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
2531	  HARD_REG_SET temp_set;
2532
2533	  i = early_clobbered_nops[nop];
2534	  if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
2535	      || hard_regno[i] < 0)
2536	    continue;
2537	  lra_assert (operand_reg[i] != NULL_RTX);
2538	  clobbered_hard_regno = hard_regno[i];
2539	  CLEAR_HARD_REG_SET (temp_set);
2540	  add_to_hard_reg_set (&temp_set, biggest_mode[i], clobbered_hard_regno);
2541	  first_conflict_j = last_conflict_j = -1;
2542	  for (j = 0; j < n_operands; j++)
2543	    if (j == i
2544		/* We don't want process insides of match_operator and
2545		   match_parallel because otherwise we would process
2546		   their operands once again generating a wrong
2547		   code.  */
2548		|| curr_static_id->operand[j].is_operator)
2549	      continue;
2550	    else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
2551		     || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
2552	      continue;
2553	    /* If we don't reload j-th operand, check conflicts.  */
2554	    else if ((curr_alt_win[j] || curr_alt_match_win[j])
2555		     && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
2556	      {
2557		if (first_conflict_j < 0)
2558		  first_conflict_j = j;
2559		last_conflict_j = j;
2560	      }
2561	  if (last_conflict_j < 0)
2562	    continue;
2563	  /* If earlyclobber operand conflicts with another
2564	     non-matching operand which is actually the same register
2565	     as the earlyclobber operand, it is better to reload the
2566	     another operand as an operand matching the earlyclobber
2567	     operand can be also the same.  */
2568	  if (first_conflict_j == last_conflict_j
2569	      && operand_reg[last_conflict_j]
2570	      != NULL_RTX && ! curr_alt_match_win[last_conflict_j]
2571	      && REGNO (operand_reg[i]) == REGNO (operand_reg[last_conflict_j]))
2572	    {
2573	      curr_alt_win[last_conflict_j] = false;
2574	      curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
2575		= last_conflict_j;
2576	      losers++;
2577	      /* Early clobber was already reflected in REJECT. */
2578	      lra_assert (reject > 0);
2579	      if (lra_dump_file != NULL)
2580		fprintf
2581		  (lra_dump_file,
2582		   "            %d Conflict early clobber reload: reject--\n",
2583		   i);
2584	      reject--;
2585	      overall += LRA_LOSER_COST_FACTOR - 1;
2586	    }
2587	  else
2588	    {
2589	      /* We need to reload early clobbered register and the
2590		 matched registers.  */
2591	      for (j = 0; j < n_operands; j++)
2592		if (curr_alt_matches[j] == i)
2593		  {
2594		    curr_alt_match_win[j] = false;
2595		    losers++;
2596		    overall += LRA_LOSER_COST_FACTOR;
2597		  }
2598	      if (! curr_alt_match_win[i])
2599		curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
2600	      else
2601		{
2602		  /* Remember pseudos used for match reloads are never
2603		     inherited.  */
2604		  lra_assert (curr_alt_matches[i] >= 0);
2605		  curr_alt_win[curr_alt_matches[i]] = false;
2606		}
2607	      curr_alt_win[i] = curr_alt_match_win[i] = false;
2608	      losers++;
2609	      /* Early clobber was already reflected in REJECT. */
2610	      lra_assert (reject > 0);
2611	      if (lra_dump_file != NULL)
2612		fprintf
2613		  (lra_dump_file,
2614		   "            %d Matched conflict early clobber reloads:"
2615		   "reject--\n",
2616		   i);
2617	      reject--;
2618	      overall += LRA_LOSER_COST_FACTOR - 1;
2619	    }
2620	}
2621      if (lra_dump_file != NULL)
2622	fprintf (lra_dump_file, "          alt=%d,overall=%d,losers=%d,rld_nregs=%d\n",
2623		 nalt, overall, losers, reload_nregs);
2624
2625      /* If this alternative can be made to work by reloading, and it
2626	 needs less reloading than the others checked so far, record
2627	 it as the chosen goal for reloading.  */
2628      if ((best_losers != 0 && losers == 0)
2629	  || (((best_losers == 0 && losers == 0)
2630	       || (best_losers != 0 && losers != 0))
2631	      && (best_overall > overall
2632		  || (best_overall == overall
2633		      /* If the cost of the reloads is the same,
2634			 prefer alternative which requires minimal
2635			 number of reload regs.  */
2636		      && (reload_nregs < best_reload_nregs
2637			  || (reload_nregs == best_reload_nregs
2638			      && (best_reload_sum < reload_sum
2639				  || (best_reload_sum == reload_sum
2640				      && nalt < goal_alt_number))))))))
2641	{
2642	  for (nop = 0; nop < n_operands; nop++)
2643	    {
2644	      goal_alt_win[nop] = curr_alt_win[nop];
2645	      goal_alt_match_win[nop] = curr_alt_match_win[nop];
2646	      goal_alt_matches[nop] = curr_alt_matches[nop];
2647	      goal_alt[nop] = curr_alt[nop];
2648	      goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
2649	    }
2650	  goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
2651	  for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
2652	    goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
2653	  goal_alt_swapped = curr_swapped;
2654	  best_overall = overall;
2655	  best_losers = losers;
2656	  best_reload_nregs = reload_nregs;
2657	  best_reload_sum = reload_sum;
2658	  goal_alt_number = nalt;
2659	}
2660      if (losers == 0)
2661	/* Everything is satisfied.  Do not process alternatives
2662	   anymore.  */
2663	break;
2664    fail:
2665      ;
2666    }
2667  return ok_p;
2668}
2669
2670/* Make reload base reg from address AD.  */
2671static rtx
2672base_to_reg (struct address_info *ad)
2673{
2674  enum reg_class cl;
2675  int code = -1;
2676  rtx new_inner = NULL_RTX;
2677  rtx new_reg = NULL_RTX;
2678  rtx_insn *insn;
2679  rtx_insn *last_insn = get_last_insn();
2680
2681  lra_assert (ad->base == ad->base_term && ad->disp == ad->disp_term);
2682  cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
2683                       get_index_code (ad));
2684  new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX,
2685                                cl, "base");
2686  new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
2687                                   ad->disp_term == NULL
2688                                   ? gen_int_mode (0, ad->mode)
2689                                   : *ad->disp_term);
2690  if (!valid_address_p (ad->mode, new_inner, ad->as))
2691    return NULL_RTX;
2692  insn = emit_insn (gen_rtx_SET (ad->mode, new_reg, *ad->base_term));
2693  code = recog_memoized (insn);
2694  if (code < 0)
2695    {
2696      delete_insns_since (last_insn);
2697      return NULL_RTX;
2698    }
2699
2700  return new_inner;
2701}
2702
2703/* Make reload base reg + disp from address AD.  Return the new pseudo.  */
2704static rtx
2705base_plus_disp_to_reg (struct address_info *ad)
2706{
2707  enum reg_class cl;
2708  rtx new_reg;
2709
2710  lra_assert (ad->base == ad->base_term && ad->disp == ad->disp_term);
2711  cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
2712		       get_index_code (ad));
2713  new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX,
2714				cl, "base + disp");
2715  lra_emit_add (new_reg, *ad->base_term, *ad->disp_term);
2716  return new_reg;
2717}
2718
2719/* Make reload of index part of address AD.  Return the new
2720   pseudo.  */
2721static rtx
2722index_part_to_reg (struct address_info *ad)
2723{
2724  rtx new_reg;
2725
2726  new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
2727				INDEX_REG_CLASS, "index term");
2728  expand_mult (GET_MODE (*ad->index), *ad->index_term,
2729	       GEN_INT (get_index_scale (ad)), new_reg, 1);
2730  return new_reg;
2731}
2732
2733/* Return true if we can add a displacement to address AD, even if that
2734   makes the address invalid.  The fix-up code requires any new address
2735   to be the sum of the BASE_TERM, INDEX and DISP_TERM fields.  */
2736static bool
2737can_add_disp_p (struct address_info *ad)
2738{
2739  return (!ad->autoinc_p
2740	  && ad->segment == NULL
2741	  && ad->base == ad->base_term
2742	  && ad->disp == ad->disp_term);
2743}
2744
2745/* Make equiv substitution in address AD.  Return true if a substitution
2746   was made.  */
2747static bool
2748equiv_address_substitution (struct address_info *ad)
2749{
2750  rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
2751  HOST_WIDE_INT disp, scale;
2752  bool change_p;
2753
2754  base_term = strip_subreg (ad->base_term);
2755  if (base_term == NULL)
2756    base_reg = new_base_reg = NULL_RTX;
2757  else
2758    {
2759      base_reg = *base_term;
2760      new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
2761    }
2762  index_term = strip_subreg (ad->index_term);
2763  if (index_term == NULL)
2764    index_reg = new_index_reg = NULL_RTX;
2765  else
2766    {
2767      index_reg = *index_term;
2768      new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
2769    }
2770  if (base_reg == new_base_reg && index_reg == new_index_reg)
2771    return false;
2772  disp = 0;
2773  change_p = false;
2774  if (lra_dump_file != NULL)
2775    {
2776      fprintf (lra_dump_file, "Changing address in insn %d ",
2777	       INSN_UID (curr_insn));
2778      dump_value_slim (lra_dump_file, *ad->outer, 1);
2779    }
2780  if (base_reg != new_base_reg)
2781    {
2782      if (REG_P (new_base_reg))
2783	{
2784	  *base_term = new_base_reg;
2785	  change_p = true;
2786	}
2787      else if (GET_CODE (new_base_reg) == PLUS
2788	       && REG_P (XEXP (new_base_reg, 0))
2789	       && CONST_INT_P (XEXP (new_base_reg, 1))
2790	       && can_add_disp_p (ad))
2791	{
2792	  disp += INTVAL (XEXP (new_base_reg, 1));
2793	  *base_term = XEXP (new_base_reg, 0);
2794	  change_p = true;
2795	}
2796      if (ad->base_term2 != NULL)
2797	*ad->base_term2 = *ad->base_term;
2798    }
2799  if (index_reg != new_index_reg)
2800    {
2801      if (REG_P (new_index_reg))
2802	{
2803	  *index_term = new_index_reg;
2804	  change_p = true;
2805	}
2806      else if (GET_CODE (new_index_reg) == PLUS
2807	       && REG_P (XEXP (new_index_reg, 0))
2808	       && CONST_INT_P (XEXP (new_index_reg, 1))
2809	       && can_add_disp_p (ad)
2810	       && (scale = get_index_scale (ad)))
2811	{
2812	  disp += INTVAL (XEXP (new_index_reg, 1)) * scale;
2813	  *index_term = XEXP (new_index_reg, 0);
2814	  change_p = true;
2815	}
2816    }
2817  if (disp != 0)
2818    {
2819      if (ad->disp != NULL)
2820	*ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
2821      else
2822	{
2823	  *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
2824	  update_address (ad);
2825	}
2826      change_p = true;
2827    }
2828  if (lra_dump_file != NULL)
2829    {
2830      if (! change_p)
2831	fprintf (lra_dump_file, " -- no change\n");
2832      else
2833	{
2834	  fprintf (lra_dump_file, " on equiv ");
2835	  dump_value_slim (lra_dump_file, *ad->outer, 1);
2836	  fprintf (lra_dump_file, "\n");
2837	}
2838    }
2839  return change_p;
2840}
2841
2842/* Major function to make reloads for an address in operand NOP or
2843   check its correctness (If CHECK_ONLY_P is true). The supported
2844   cases are:
2845
2846   1) an address that existed before LRA started, at which point it
2847   must have been valid.  These addresses are subject to elimination
2848   and may have become invalid due to the elimination offset being out
2849   of range.
2850
2851   2) an address created by forcing a constant to memory
2852   (force_const_to_mem).  The initial form of these addresses might
2853   not be valid, and it is this function's job to make them valid.
2854
2855   3) a frame address formed from a register and a (possibly zero)
2856   constant offset.  As above, these addresses might not be valid and
2857   this function must make them so.
2858
2859   Add reloads to the lists *BEFORE and *AFTER.  We might need to add
2860   reloads to *AFTER because of inc/dec, {pre, post} modify in the
2861   address.  Return true for any RTL change.
2862
2863   The function is a helper function which does not produce all
2864   transformations (when CHECK_ONLY_P is false) which can be
2865   necessary.  It does just basic steps.  To do all necessary
2866   transformations use function process_address.  */
2867static bool
2868process_address_1 (int nop, bool check_only_p,
2869		   rtx_insn **before, rtx_insn **after)
2870{
2871  struct address_info ad;
2872  rtx new_reg;
2873  rtx op = *curr_id->operand_loc[nop];
2874  const char *constraint = curr_static_id->operand[nop].constraint;
2875  enum constraint_num cn = lookup_constraint (constraint);
2876  bool change_p = false;
2877
2878  if (insn_extra_address_constraint (cn))
2879    decompose_lea_address (&ad, curr_id->operand_loc[nop]);
2880  else if (MEM_P (op))
2881    decompose_mem_address (&ad, op);
2882  else if (GET_CODE (op) == SUBREG
2883	   && MEM_P (SUBREG_REG (op)))
2884    decompose_mem_address (&ad, SUBREG_REG (op));
2885  else
2886    return false;
2887  /* If INDEX_REG_CLASS is assigned to base_term already and isn't to
2888     index_term, swap them so to avoid assigning INDEX_REG_CLASS to both
2889     when INDEX_REG_CLASS is a single register class.  */
2890  if (ad.base_term != NULL
2891      && ad.index_term != NULL
2892      && ira_class_hard_regs_num[INDEX_REG_CLASS] == 1
2893      && REG_P (*ad.base_term)
2894      && REG_P (*ad.index_term)
2895      && in_class_p (*ad.base_term, INDEX_REG_CLASS, NULL)
2896      && ! in_class_p (*ad.index_term, INDEX_REG_CLASS, NULL))
2897    {
2898      std::swap (ad.base, ad.index);
2899      std::swap (ad.base_term, ad.index_term);
2900    }
2901  if (! check_only_p)
2902    change_p = equiv_address_substitution (&ad);
2903  if (ad.base_term != NULL
2904      && (process_addr_reg
2905	  (ad.base_term, check_only_p, before,
2906	   (ad.autoinc_p
2907	    && !(REG_P (*ad.base_term)
2908		 && find_regno_note (curr_insn, REG_DEAD,
2909				     REGNO (*ad.base_term)) != NULL_RTX)
2910	    ? after : NULL),
2911	   base_reg_class (ad.mode, ad.as, ad.base_outer_code,
2912			   get_index_code (&ad)))))
2913    {
2914      change_p = true;
2915      if (ad.base_term2 != NULL)
2916	*ad.base_term2 = *ad.base_term;
2917    }
2918  if (ad.index_term != NULL
2919      && process_addr_reg (ad.index_term, check_only_p,
2920			   before, NULL, INDEX_REG_CLASS))
2921    change_p = true;
2922
2923  /* Target hooks sometimes don't treat extra-constraint addresses as
2924     legitimate address_operands, so handle them specially.  */
2925  if (insn_extra_address_constraint (cn)
2926      && satisfies_address_constraint_p (&ad, cn))
2927    return change_p;
2928
2929  if (check_only_p)
2930    return change_p;
2931
2932  /* There are three cases where the shape of *AD.INNER may now be invalid:
2933
2934     1) the original address was valid, but either elimination or
2935     equiv_address_substitution was applied and that made
2936     the address invalid.
2937
2938     2) the address is an invalid symbolic address created by
2939     force_const_to_mem.
2940
2941     3) the address is a frame address with an invalid offset.
2942
2943     4) the address is a frame address with an invalid base.
2944
2945     All these cases involve a non-autoinc address, so there is no
2946     point revalidating other types.  */
2947  if (ad.autoinc_p || valid_address_p (&ad))
2948    return change_p;
2949
2950  /* Any index existed before LRA started, so we can assume that the
2951     presence and shape of the index is valid.  */
2952  push_to_sequence (*before);
2953  lra_assert (ad.disp == ad.disp_term);
2954  if (ad.base == NULL)
2955    {
2956      if (ad.index == NULL)
2957	{
2958	  int code = -1;
2959	  enum reg_class cl = base_reg_class (ad.mode, ad.as,
2960					      SCRATCH, SCRATCH);
2961	  rtx addr = *ad.inner;
2962
2963	  new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "addr");
2964#ifdef HAVE_lo_sum
2965	  {
2966	    rtx_insn *insn;
2967	    rtx_insn *last = get_last_insn ();
2968
2969	    /* addr => lo_sum (new_base, addr), case (2) above.  */
2970	    insn = emit_insn (gen_rtx_SET
2971			      (VOIDmode, new_reg,
2972			       gen_rtx_HIGH (Pmode, copy_rtx (addr))));
2973	    code = recog_memoized (insn);
2974	    if (code >= 0)
2975	      {
2976		*ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
2977		if (! valid_address_p (ad.mode, *ad.outer, ad.as))
2978		  {
2979		    /* Try to put lo_sum into register.  */
2980		    insn = emit_insn (gen_rtx_SET
2981				      (VOIDmode, new_reg,
2982				       gen_rtx_LO_SUM (Pmode, new_reg, addr)));
2983		    code = recog_memoized (insn);
2984		    if (code >= 0)
2985		      {
2986			*ad.inner = new_reg;
2987			if (! valid_address_p (ad.mode, *ad.outer, ad.as))
2988			  {
2989			    *ad.inner = addr;
2990			    code = -1;
2991			  }
2992		      }
2993
2994		  }
2995	      }
2996	    if (code < 0)
2997	      delete_insns_since (last);
2998	  }
2999#endif
3000	  if (code < 0)
3001	    {
3002	      /* addr => new_base, case (2) above.  */
3003	      lra_emit_move (new_reg, addr);
3004	      *ad.inner = new_reg;
3005	    }
3006	}
3007      else
3008	{
3009	  /* index * scale + disp => new base + index * scale,
3010	     case (1) above.  */
3011	  enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
3012					      GET_CODE (*ad.index));
3013
3014	  lra_assert (INDEX_REG_CLASS != NO_REGS);
3015	  new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "disp");
3016	  lra_emit_move (new_reg, *ad.disp);
3017	  *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3018					   new_reg, *ad.index);
3019	}
3020    }
3021  else if (ad.index == NULL)
3022    {
3023      int regno;
3024      enum reg_class cl;
3025      rtx set;
3026      rtx_insn *insns, *last_insn;
3027      /* Try to reload base into register only if the base is invalid
3028         for the address but with valid offset, case (4) above.  */
3029      start_sequence ();
3030      new_reg = base_to_reg (&ad);
3031
3032      /* base + disp => new base, cases (1) and (3) above.  */
3033      /* Another option would be to reload the displacement into an
3034	 index register.  However, postreload has code to optimize
3035	 address reloads that have the same base and different
3036	 displacements, so reloading into an index register would
3037	 not necessarily be a win.  */
3038      if (new_reg == NULL_RTX)
3039        new_reg = base_plus_disp_to_reg (&ad);
3040      insns = get_insns ();
3041      last_insn = get_last_insn ();
3042      /* If we generated at least two insns, try last insn source as
3043	 an address.  If we succeed, we generate one less insn.  */
3044      if (last_insn != insns && (set = single_set (last_insn)) != NULL_RTX
3045	  && GET_CODE (SET_SRC (set)) == PLUS
3046	  && REG_P (XEXP (SET_SRC (set), 0))
3047	  && CONSTANT_P (XEXP (SET_SRC (set), 1)))
3048	{
3049	  *ad.inner = SET_SRC (set);
3050	  if (valid_address_p (ad.mode, *ad.outer, ad.as))
3051	    {
3052	      *ad.base_term = XEXP (SET_SRC (set), 0);
3053	      *ad.disp_term = XEXP (SET_SRC (set), 1);
3054	      cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3055				   get_index_code (&ad));
3056	      regno = REGNO (*ad.base_term);
3057	      if (regno >= FIRST_PSEUDO_REGISTER
3058		  && cl != lra_get_allocno_class (regno))
3059		lra_change_class (regno, cl, "      Change to", true);
3060	      new_reg = SET_SRC (set);
3061	      delete_insns_since (PREV_INSN (last_insn));
3062	    }
3063	}
3064      /* Try if target can split displacement into legitimite new disp
3065	 and offset.  If it's the case, we replace the last insn with
3066	 insns for base + offset => new_reg and set new_reg + new disp
3067	 to *ad.inner.  */
3068      last_insn = get_last_insn ();
3069      if ((set = single_set (last_insn)) != NULL_RTX
3070	  && GET_CODE (SET_SRC (set)) == PLUS
3071	  && REG_P (XEXP (SET_SRC (set), 0))
3072	  && REGNO (XEXP (SET_SRC (set), 0)) < FIRST_PSEUDO_REGISTER
3073	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3074	{
3075	  rtx addend, disp = XEXP (SET_SRC (set), 1);
3076	  if (targetm.legitimize_address_displacement (&disp, &addend,
3077						       ad.mode))
3078	    {
3079	      rtx_insn *new_insns;
3080	      start_sequence ();
3081	      lra_emit_add (new_reg, XEXP (SET_SRC (set), 0), addend);
3082	      new_insns = get_insns ();
3083	      end_sequence ();
3084	      new_reg = gen_rtx_PLUS (Pmode, new_reg, disp);
3085	      delete_insns_since (PREV_INSN (last_insn));
3086	      add_insn (new_insns);
3087	      insns = get_insns ();
3088	    }
3089	}
3090      end_sequence ();
3091      emit_insn (insns);
3092      *ad.inner = new_reg;
3093    }
3094  else if (ad.disp_term != NULL)
3095    {
3096      /* base + scale * index + disp => new base + scale * index,
3097	 case (1) above.  */
3098      new_reg = base_plus_disp_to_reg (&ad);
3099      *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3100				       new_reg, *ad.index);
3101    }
3102  else if (get_index_scale (&ad) == 1)
3103    {
3104      /* The last transformation to one reg will be made in
3105	 curr_insn_transform function.  */
3106      end_sequence ();
3107      return false;
3108    }
3109  else
3110    {
3111      /* base + scale * index => base + new_reg,
3112	 case (1) above.
3113      Index part of address may become invalid.  For example, we
3114      changed pseudo on the equivalent memory and a subreg of the
3115      pseudo onto the memory of different mode for which the scale is
3116      prohibitted.  */
3117      new_reg = index_part_to_reg (&ad);
3118      *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3119				       *ad.base_term, new_reg);
3120    }
3121  *before = get_insns ();
3122  end_sequence ();
3123  return true;
3124}
3125
3126/* If CHECK_ONLY_P is false, do address reloads until it is necessary.
3127   Use process_address_1 as a helper function.  Return true for any
3128   RTL changes.
3129
3130   If CHECK_ONLY_P is true, just check address correctness.  Return
3131   false if the address correct.  */
3132static bool
3133process_address (int nop, bool check_only_p,
3134		 rtx_insn **before, rtx_insn **after)
3135{
3136  bool res = false;
3137
3138  while (process_address_1 (nop, check_only_p, before, after))
3139    {
3140      if (check_only_p)
3141	return true;
3142      res = true;
3143    }
3144  return res;
3145}
3146
3147/* Emit insns to reload VALUE into a new register.  VALUE is an
3148   auto-increment or auto-decrement RTX whose operand is a register or
3149   memory location; so reloading involves incrementing that location.
3150   IN is either identical to VALUE, or some cheaper place to reload
3151   value being incremented/decremented from.
3152
3153   INC_AMOUNT is the number to increment or decrement by (always
3154   positive and ignored for POST_MODIFY/PRE_MODIFY).
3155
3156   Return pseudo containing the result.	 */
3157static rtx
3158emit_inc (enum reg_class new_rclass, rtx in, rtx value, int inc_amount)
3159{
3160  /* REG or MEM to be copied and incremented.  */
3161  rtx incloc = XEXP (value, 0);
3162  /* Nonzero if increment after copying.  */
3163  int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
3164	      || GET_CODE (value) == POST_MODIFY);
3165  rtx_insn *last;
3166  rtx inc;
3167  rtx_insn *add_insn;
3168  int code;
3169  rtx real_in = in == value ? incloc : in;
3170  rtx result;
3171  bool plus_p = true;
3172
3173  if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
3174    {
3175      lra_assert (GET_CODE (XEXP (value, 1)) == PLUS
3176		  || GET_CODE (XEXP (value, 1)) == MINUS);
3177      lra_assert (rtx_equal_p (XEXP (XEXP (value, 1), 0), XEXP (value, 0)));
3178      plus_p = GET_CODE (XEXP (value, 1)) == PLUS;
3179      inc = XEXP (XEXP (value, 1), 1);
3180    }
3181  else
3182    {
3183      if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
3184	inc_amount = -inc_amount;
3185
3186      inc = GEN_INT (inc_amount);
3187    }
3188
3189  if (! post && REG_P (incloc))
3190    result = incloc;
3191  else
3192    result = lra_create_new_reg (GET_MODE (value), value, new_rclass,
3193				 "INC/DEC result");
3194
3195  if (real_in != result)
3196    {
3197      /* First copy the location to the result register.  */
3198      lra_assert (REG_P (result));
3199      emit_insn (gen_move_insn (result, real_in));
3200    }
3201
3202  /* We suppose that there are insns to add/sub with the constant
3203     increment permitted in {PRE/POST)_{DEC/INC/MODIFY}.  At least the
3204     old reload worked with this assumption.  If the assumption
3205     becomes wrong, we should use approach in function
3206     base_plus_disp_to_reg.  */
3207  if (in == value)
3208    {
3209      /* See if we can directly increment INCLOC.  */
3210      last = get_last_insn ();
3211      add_insn = emit_insn (plus_p
3212			    ? gen_add2_insn (incloc, inc)
3213			    : gen_sub2_insn (incloc, inc));
3214
3215      code = recog_memoized (add_insn);
3216      if (code >= 0)
3217	{
3218	  if (! post && result != incloc)
3219	    emit_insn (gen_move_insn (result, incloc));
3220	  return result;
3221	}
3222      delete_insns_since (last);
3223    }
3224
3225  /* If couldn't do the increment directly, must increment in RESULT.
3226     The way we do this depends on whether this is pre- or
3227     post-increment.  For pre-increment, copy INCLOC to the reload
3228     register, increment it there, then save back.  */
3229  if (! post)
3230    {
3231      if (real_in != result)
3232	emit_insn (gen_move_insn (result, real_in));
3233      if (plus_p)
3234	emit_insn (gen_add2_insn (result, inc));
3235      else
3236	emit_insn (gen_sub2_insn (result, inc));
3237      if (result != incloc)
3238	emit_insn (gen_move_insn (incloc, result));
3239    }
3240  else
3241    {
3242      /* Post-increment.
3243
3244	 Because this might be a jump insn or a compare, and because
3245	 RESULT may not be available after the insn in an input
3246	 reload, we must do the incrementing before the insn being
3247	 reloaded for.
3248
3249	 We have already copied IN to RESULT.  Increment the copy in
3250	 RESULT, save that back, then decrement RESULT so it has
3251	 the original value.  */
3252      if (plus_p)
3253	emit_insn (gen_add2_insn (result, inc));
3254      else
3255	emit_insn (gen_sub2_insn (result, inc));
3256      emit_insn (gen_move_insn (incloc, result));
3257      /* Restore non-modified value for the result.  We prefer this
3258	 way because it does not require an additional hard
3259	 register.  */
3260      if (plus_p)
3261	{
3262	  if (CONST_INT_P (inc))
3263	    emit_insn (gen_add2_insn (result,
3264				      gen_int_mode (-INTVAL (inc),
3265						    GET_MODE (result))));
3266	  else
3267	    emit_insn (gen_sub2_insn (result, inc));
3268	}
3269      else
3270	emit_insn (gen_add2_insn (result, inc));
3271    }
3272  return result;
3273}
3274
3275/* Return true if the current move insn does not need processing as we
3276   already know that it satisfies its constraints.  */
3277static bool
3278simple_move_p (void)
3279{
3280  rtx dest, src;
3281  enum reg_class dclass, sclass;
3282
3283  lra_assert (curr_insn_set != NULL_RTX);
3284  dest = SET_DEST (curr_insn_set);
3285  src = SET_SRC (curr_insn_set);
3286  return ((dclass = get_op_class (dest)) != NO_REGS
3287	  && (sclass = get_op_class (src)) != NO_REGS
3288	  /* The backend guarantees that register moves of cost 2
3289	     never need reloads.  */
3290	  && targetm.register_move_cost (GET_MODE (src), sclass, dclass) == 2);
3291 }
3292
3293/* Swap operands NOP and NOP + 1. */
3294static inline void
3295swap_operands (int nop)
3296{
3297  machine_mode mode = curr_operand_mode[nop];
3298  curr_operand_mode[nop] = curr_operand_mode[nop + 1];
3299  curr_operand_mode[nop + 1] = mode;
3300  mode = original_subreg_reg_mode[nop];
3301  original_subreg_reg_mode[nop] = original_subreg_reg_mode[nop + 1];
3302  original_subreg_reg_mode[nop + 1] = mode;
3303  rtx x = *curr_id->operand_loc[nop];
3304  *curr_id->operand_loc[nop] = *curr_id->operand_loc[nop + 1];
3305  *curr_id->operand_loc[nop + 1] = x;
3306  /* Swap the duplicates too.  */
3307  lra_update_dup (curr_id, nop);
3308  lra_update_dup (curr_id, nop + 1);
3309}
3310
3311/* Main entry point of the constraint code: search the body of the
3312   current insn to choose the best alternative.  It is mimicking insn
3313   alternative cost calculation model of former reload pass.  That is
3314   because machine descriptions were written to use this model.  This
3315   model can be changed in future.  Make commutative operand exchange
3316   if it is chosen.
3317
3318   if CHECK_ONLY_P is false, do RTL changes to satisfy the
3319   constraints.  Return true if any change happened during function
3320   call.
3321
3322   If CHECK_ONLY_P is true then don't do any transformation.  Just
3323   check that the insn satisfies all constraints.  If the insn does
3324   not satisfy any constraint, return true.  */
3325static bool
3326curr_insn_transform (bool check_only_p)
3327{
3328  int i, j, k;
3329  int n_operands;
3330  int n_alternatives;
3331  int commutative;
3332  signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
3333  signed char match_inputs[MAX_RECOG_OPERANDS + 1];
3334  rtx_insn *before, *after;
3335  bool alt_p = false;
3336  /* Flag that the insn has been changed through a transformation.  */
3337  bool change_p;
3338  bool sec_mem_p;
3339#ifdef SECONDARY_MEMORY_NEEDED
3340  bool use_sec_mem_p;
3341#endif
3342  int max_regno_before;
3343  int reused_alternative_num;
3344
3345  curr_insn_set = single_set (curr_insn);
3346  if (curr_insn_set != NULL_RTX && simple_move_p ())
3347    return false;
3348
3349  no_input_reloads_p = no_output_reloads_p = false;
3350  goal_alt_number = -1;
3351  change_p = sec_mem_p = false;
3352  /* JUMP_INSNs and CALL_INSNs are not allowed to have any output
3353     reloads; neither are insns that SET cc0.  Insns that use CC0 are
3354     not allowed to have any input reloads.  */
3355  if (JUMP_P (curr_insn) || CALL_P (curr_insn))
3356    no_output_reloads_p = true;
3357
3358#ifdef HAVE_cc0
3359  if (reg_referenced_p (cc0_rtx, PATTERN (curr_insn)))
3360    no_input_reloads_p = true;
3361  if (reg_set_p (cc0_rtx, PATTERN (curr_insn)))
3362    no_output_reloads_p = true;
3363#endif
3364
3365  n_operands = curr_static_id->n_operands;
3366  n_alternatives = curr_static_id->n_alternatives;
3367
3368  /* Just return "no reloads" if insn has no operands with
3369     constraints.  */
3370  if (n_operands == 0 || n_alternatives == 0)
3371    return false;
3372
3373  max_regno_before = max_reg_num ();
3374
3375  for (i = 0; i < n_operands; i++)
3376    {
3377      goal_alt_matched[i][0] = -1;
3378      goal_alt_matches[i] = -1;
3379    }
3380
3381  commutative = curr_static_id->commutative;
3382
3383  /* Now see what we need for pseudos that didn't get hard regs or got
3384     the wrong kind of hard reg.  For this, we must consider all the
3385     operands together against the register constraints.  */
3386
3387  best_losers = best_overall = INT_MAX;
3388  best_reload_sum = 0;
3389
3390  curr_swapped = false;
3391  goal_alt_swapped = false;
3392
3393  if (! check_only_p)
3394    /* Make equivalence substitution and memory subreg elimination
3395       before address processing because an address legitimacy can
3396       depend on memory mode.  */
3397    for (i = 0; i < n_operands; i++)
3398      {
3399	rtx op = *curr_id->operand_loc[i];
3400	rtx subst, old = op;
3401	bool op_change_p = false;
3402
3403	if (GET_CODE (old) == SUBREG)
3404	  old = SUBREG_REG (old);
3405	subst = get_equiv_with_elimination (old, curr_insn);
3406	original_subreg_reg_mode[i] = VOIDmode;
3407	if (subst != old)
3408	  {
3409	    subst = copy_rtx (subst);
3410	    lra_assert (REG_P (old));
3411	    if (GET_CODE (op) != SUBREG)
3412	      *curr_id->operand_loc[i] = subst;
3413	    else
3414	      {
3415		SUBREG_REG (op) = subst;
3416		if (GET_MODE (subst) == VOIDmode)
3417		  original_subreg_reg_mode[i] = GET_MODE (old);
3418	      }
3419	    if (lra_dump_file != NULL)
3420	      {
3421		fprintf (lra_dump_file,
3422			 "Changing pseudo %d in operand %i of insn %u on equiv ",
3423			 REGNO (old), i, INSN_UID (curr_insn));
3424		dump_value_slim (lra_dump_file, subst, 1);
3425		fprintf (lra_dump_file, "\n");
3426	      }
3427	    op_change_p = change_p = true;
3428	  }
3429	if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
3430	  {
3431	    change_p = true;
3432	    lra_update_dup (curr_id, i);
3433	  }
3434      }
3435
3436  /* Reload address registers and displacements.  We do it before
3437     finding an alternative because of memory constraints.  */
3438  before = after = NULL;
3439  for (i = 0; i < n_operands; i++)
3440    if (! curr_static_id->operand[i].is_operator
3441	&& process_address (i, check_only_p, &before, &after))
3442      {
3443	if (check_only_p)
3444	  return true;
3445	change_p = true;
3446	lra_update_dup (curr_id, i);
3447      }
3448
3449  if (change_p)
3450    /* If we've changed the instruction then any alternative that
3451       we chose previously may no longer be valid.  */
3452    lra_set_used_insn_alternative (curr_insn, -1);
3453
3454  if (! check_only_p && curr_insn_set != NULL_RTX
3455      && check_and_process_move (&change_p, &sec_mem_p))
3456    return change_p;
3457
3458 try_swapped:
3459
3460  reused_alternative_num = check_only_p ? -1 : curr_id->used_insn_alternative;
3461  if (lra_dump_file != NULL && reused_alternative_num >= 0)
3462    fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
3463	     reused_alternative_num, INSN_UID (curr_insn));
3464
3465  if (process_alt_operands (reused_alternative_num))
3466    alt_p = true;
3467
3468  if (check_only_p)
3469    return ! alt_p || best_losers != 0;
3470
3471  /* If insn is commutative (it's safe to exchange a certain pair of
3472     operands) then we need to try each alternative twice, the second
3473     time matching those two operands as if we had exchanged them.  To
3474     do this, really exchange them in operands.
3475
3476     If we have just tried the alternatives the second time, return
3477     operands to normal and drop through.  */
3478
3479  if (reused_alternative_num < 0 && commutative >= 0)
3480    {
3481      curr_swapped = !curr_swapped;
3482      if (curr_swapped)
3483	{
3484	  swap_operands (commutative);
3485	  goto try_swapped;
3486	}
3487      else
3488	swap_operands (commutative);
3489    }
3490
3491  if (! alt_p && ! sec_mem_p)
3492    {
3493      /* No alternative works with reloads??  */
3494      if (INSN_CODE (curr_insn) >= 0)
3495	fatal_insn ("unable to generate reloads for:", curr_insn);
3496      error_for_asm (curr_insn,
3497		     "inconsistent operand constraints in an %<asm%>");
3498      /* Avoid further trouble with this insn.	*/
3499      PATTERN (curr_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3500      lra_invalidate_insn_data (curr_insn);
3501      return true;
3502    }
3503
3504  /* If the best alternative is with operands 1 and 2 swapped, swap
3505     them.  Update the operand numbers of any reloads already
3506     pushed.  */
3507
3508  if (goal_alt_swapped)
3509    {
3510      if (lra_dump_file != NULL)
3511	fprintf (lra_dump_file, "  Commutative operand exchange in insn %u\n",
3512		 INSN_UID (curr_insn));
3513
3514      /* Swap the duplicates too.  */
3515      swap_operands (commutative);
3516      change_p = true;
3517    }
3518
3519#ifdef SECONDARY_MEMORY_NEEDED
3520  /* Some target macros SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
3521     too conservatively.  So we use the secondary memory only if there
3522     is no any alternative without reloads.  */
3523  use_sec_mem_p = false;
3524  if (! alt_p)
3525    use_sec_mem_p = true;
3526  else if (sec_mem_p)
3527    {
3528      for (i = 0; i < n_operands; i++)
3529	if (! goal_alt_win[i] && ! goal_alt_match_win[i])
3530	  break;
3531      use_sec_mem_p = i < n_operands;
3532    }
3533
3534  if (use_sec_mem_p)
3535    {
3536      rtx new_reg, src, dest, rld;
3537      machine_mode sec_mode, rld_mode;
3538
3539      lra_assert (sec_mem_p);
3540      lra_assert (curr_static_id->operand[0].type == OP_OUT
3541		  && curr_static_id->operand[1].type == OP_IN);
3542      dest = *curr_id->operand_loc[0];
3543      src = *curr_id->operand_loc[1];
3544      rld = (GET_MODE_SIZE (GET_MODE (dest)) <= GET_MODE_SIZE (GET_MODE (src))
3545	     ? dest : src);
3546      rld_mode = GET_MODE (rld);
3547#ifdef SECONDARY_MEMORY_NEEDED_MODE
3548      sec_mode = SECONDARY_MEMORY_NEEDED_MODE (rld_mode);
3549#else
3550      sec_mode = rld_mode;
3551#endif
3552      new_reg = lra_create_new_reg (sec_mode, NULL_RTX,
3553				    NO_REGS, "secondary");
3554      /* If the mode is changed, it should be wider.  */
3555      lra_assert (GET_MODE_SIZE (sec_mode) >= GET_MODE_SIZE (rld_mode));
3556      if (sec_mode != rld_mode)
3557        {
3558	  /* If the target says specifically to use another mode for
3559	     secondary memory moves we can not reuse the original
3560	     insn.  */
3561	  after = emit_spill_move (false, new_reg, dest);
3562	  lra_process_new_insns (curr_insn, NULL, after,
3563				 "Inserting the sec. move");
3564	  /* We may have non null BEFORE here (e.g. after address
3565	     processing.  */
3566	  push_to_sequence (before);
3567	  before = emit_spill_move (true, new_reg, src);
3568	  emit_insn (before);
3569	  before = get_insns ();
3570	  end_sequence ();
3571	  lra_process_new_insns (curr_insn, before, NULL, "Changing on");
3572	  lra_set_insn_deleted (curr_insn);
3573	}
3574      else if (dest == rld)
3575        {
3576	  *curr_id->operand_loc[0] = new_reg;
3577	  after = emit_spill_move (false, new_reg, dest);
3578	  lra_process_new_insns (curr_insn, NULL, after,
3579				 "Inserting the sec. move");
3580	}
3581      else
3582	{
3583	  *curr_id->operand_loc[1] = new_reg;
3584	  /* See comments above.  */
3585	  push_to_sequence (before);
3586	  before = emit_spill_move (true, new_reg, src);
3587	  emit_insn (before);
3588	  before = get_insns ();
3589	  end_sequence ();
3590	  lra_process_new_insns (curr_insn, before, NULL,
3591				 "Inserting the sec. move");
3592	}
3593      lra_update_insn_regno_info (curr_insn);
3594      return true;
3595    }
3596#endif
3597
3598  lra_assert (goal_alt_number >= 0);
3599  lra_set_used_insn_alternative (curr_insn, goal_alt_number);
3600
3601  if (lra_dump_file != NULL)
3602    {
3603      const char *p;
3604
3605      fprintf (lra_dump_file, "	 Choosing alt %d in insn %u:",
3606	       goal_alt_number, INSN_UID (curr_insn));
3607      for (i = 0; i < n_operands; i++)
3608	{
3609	  p = (curr_static_id->operand_alternative
3610	       [goal_alt_number * n_operands + i].constraint);
3611	  if (*p == '\0')
3612	    continue;
3613	  fprintf (lra_dump_file, "  (%d) ", i);
3614	  for (; *p != '\0' && *p != ',' && *p != '#'; p++)
3615	    fputc (*p, lra_dump_file);
3616	}
3617      if (INSN_CODE (curr_insn) >= 0
3618          && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
3619        fprintf (lra_dump_file, " {%s}", p);
3620      if (curr_id->sp_offset != 0)
3621        fprintf (lra_dump_file, " (sp_off=%" HOST_WIDE_INT_PRINT "d)",
3622		 curr_id->sp_offset);
3623       fprintf (lra_dump_file, "\n");
3624    }
3625
3626  /* Right now, for any pair of operands I and J that are required to
3627     match, with J < I, goal_alt_matches[I] is J.  Add I to
3628     goal_alt_matched[J].  */
3629
3630  for (i = 0; i < n_operands; i++)
3631    if ((j = goal_alt_matches[i]) >= 0)
3632      {
3633	for (k = 0; goal_alt_matched[j][k] >= 0; k++)
3634	  ;
3635	/* We allow matching one output operand and several input
3636	   operands.  */
3637	lra_assert (k == 0
3638		    || (curr_static_id->operand[j].type == OP_OUT
3639			&& curr_static_id->operand[i].type == OP_IN
3640			&& (curr_static_id->operand
3641			    [goal_alt_matched[j][0]].type == OP_IN)));
3642	goal_alt_matched[j][k] = i;
3643	goal_alt_matched[j][k + 1] = -1;
3644      }
3645
3646  for (i = 0; i < n_operands; i++)
3647    goal_alt_win[i] |= goal_alt_match_win[i];
3648
3649  /* Any constants that aren't allowed and can't be reloaded into
3650     registers are here changed into memory references.	 */
3651  for (i = 0; i < n_operands; i++)
3652    if (goal_alt_win[i])
3653      {
3654	int regno;
3655	enum reg_class new_class;
3656	rtx reg = *curr_id->operand_loc[i];
3657
3658	if (GET_CODE (reg) == SUBREG)
3659	  reg = SUBREG_REG (reg);
3660
3661	if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
3662	  {
3663	    bool ok_p = in_class_p (reg, goal_alt[i], &new_class);
3664
3665	    if (new_class != NO_REGS && get_reg_class (regno) != new_class)
3666	      {
3667		lra_assert (ok_p);
3668		lra_change_class (regno, new_class, "      Change to", true);
3669	      }
3670	  }
3671      }
3672    else
3673      {
3674	const char *constraint;
3675	char c;
3676	rtx op = *curr_id->operand_loc[i];
3677	rtx subreg = NULL_RTX;
3678	machine_mode mode = curr_operand_mode[i];
3679
3680	if (GET_CODE (op) == SUBREG)
3681	  {
3682	    subreg = op;
3683	    op = SUBREG_REG (op);
3684	    mode = GET_MODE (op);
3685	  }
3686
3687	if (CONST_POOL_OK_P (mode, op)
3688	    && ((targetm.preferred_reload_class
3689		 (op, (enum reg_class) goal_alt[i]) == NO_REGS)
3690		|| no_input_reloads_p))
3691	  {
3692	    rtx tem = force_const_mem (mode, op);
3693
3694	    change_p = true;
3695	    if (subreg != NULL_RTX)
3696	      tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
3697
3698	    *curr_id->operand_loc[i] = tem;
3699	    lra_update_dup (curr_id, i);
3700	    process_address (i, false, &before, &after);
3701
3702	    /* If the alternative accepts constant pool refs directly
3703	       there will be no reload needed at all.  */
3704	    if (subreg != NULL_RTX)
3705	      continue;
3706	    /* Skip alternatives before the one requested.  */
3707	    constraint = (curr_static_id->operand_alternative
3708			  [goal_alt_number * n_operands + i].constraint);
3709	    for (;
3710		 (c = *constraint) && c != ',' && c != '#';
3711		 constraint += CONSTRAINT_LEN (c, constraint))
3712	      {
3713		enum constraint_num cn = lookup_constraint (constraint);
3714		if (insn_extra_memory_constraint (cn)
3715		    && satisfies_memory_constraint_p (tem, cn))
3716		  break;
3717	      }
3718	    if (c == '\0' || c == ',' || c == '#')
3719	      continue;
3720
3721	    goal_alt_win[i] = true;
3722	  }
3723      }
3724
3725  for (i = 0; i < n_operands; i++)
3726    {
3727      int regno;
3728      bool optional_p = false;
3729      rtx old, new_reg;
3730      rtx op = *curr_id->operand_loc[i];
3731
3732      if (goal_alt_win[i])
3733	{
3734	  if (goal_alt[i] == NO_REGS
3735	      && REG_P (op)
3736	      /* When we assign NO_REGS it means that we will not
3737		 assign a hard register to the scratch pseudo by
3738		 assigment pass and the scratch pseudo will be
3739		 spilled.  Spilled scratch pseudos are transformed
3740		 back to scratches at the LRA end.  */
3741	      && lra_former_scratch_operand_p (curr_insn, i)
3742	      && lra_former_scratch_p (REGNO (op)))
3743	    {
3744	      int regno = REGNO (op);
3745	      lra_change_class (regno, NO_REGS, "      Change to", true);
3746	      if (lra_get_regno_hard_regno (regno) >= 0)
3747		/* We don't have to mark all insn affected by the
3748		   spilled pseudo as there is only one such insn, the
3749		   current one.  */
3750		reg_renumber[regno] = -1;
3751	      lra_assert (bitmap_single_bit_set_p
3752			  (&lra_reg_info[REGNO (op)].insn_bitmap));
3753	    }
3754	  /* We can do an optional reload.  If the pseudo got a hard
3755	     reg, we might improve the code through inheritance.  If
3756	     it does not get a hard register we coalesce memory/memory
3757	     moves later.  Ignore move insns to avoid cycling.  */
3758	  if (! lra_simple_p
3759	      && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
3760	      && goal_alt[i] != NO_REGS && REG_P (op)
3761	      && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
3762	      && regno < new_regno_start
3763	      && ! lra_former_scratch_p (regno)
3764	      && reg_renumber[regno] < 0
3765	      /* Check that the optional reload pseudo will be able to
3766		 hold given mode value.  */
3767	      && ! (prohibited_class_reg_set_mode_p
3768		    (goal_alt[i], reg_class_contents[goal_alt[i]],
3769		     PSEUDO_REGNO_MODE (regno)))
3770	      && (curr_insn_set == NULL_RTX
3771		  || !((REG_P (SET_SRC (curr_insn_set))
3772			|| MEM_P (SET_SRC (curr_insn_set))
3773			|| GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
3774		       && (REG_P (SET_DEST (curr_insn_set))
3775			   || MEM_P (SET_DEST (curr_insn_set))
3776			   || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
3777	    optional_p = true;
3778	  else
3779	    continue;
3780	}
3781
3782      /* Operands that match previous ones have already been handled.  */
3783      if (goal_alt_matches[i] >= 0)
3784	continue;
3785
3786      /* We should not have an operand with a non-offsettable address
3787	 appearing where an offsettable address will do.  It also may
3788	 be a case when the address should be special in other words
3789	 not a general one (e.g. it needs no index reg).  */
3790      if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
3791	{
3792	  enum reg_class rclass;
3793	  rtx *loc = &XEXP (op, 0);
3794	  enum rtx_code code = GET_CODE (*loc);
3795
3796	  push_to_sequence (before);
3797	  rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
3798				   MEM, SCRATCH);
3799	  if (GET_RTX_CLASS (code) == RTX_AUTOINC)
3800	    new_reg = emit_inc (rclass, *loc, *loc,
3801				/* This value does not matter for MODIFY.  */
3802				GET_MODE_SIZE (GET_MODE (op)));
3803	  else if (get_reload_reg (OP_IN, Pmode, *loc, rclass, FALSE,
3804				   "offsetable address", &new_reg))
3805	    lra_emit_move (new_reg, *loc);
3806	  before = get_insns ();
3807	  end_sequence ();
3808	  *loc = new_reg;
3809	  lra_update_dup (curr_id, i);
3810	}
3811      else if (goal_alt_matched[i][0] == -1)
3812	{
3813	  machine_mode mode;
3814	  rtx reg, *loc;
3815	  int hard_regno, byte;
3816	  enum op_type type = curr_static_id->operand[i].type;
3817
3818	  loc = curr_id->operand_loc[i];
3819	  mode = curr_operand_mode[i];
3820	  if (GET_CODE (*loc) == SUBREG)
3821	    {
3822	      reg = SUBREG_REG (*loc);
3823	      byte = SUBREG_BYTE (*loc);
3824	      if (REG_P (reg)
3825		  /* Strict_low_part requires reload the register not
3826		     the sub-register.	*/
3827		  && (curr_static_id->operand[i].strict_low
3828		      || (GET_MODE_SIZE (mode)
3829			  <= GET_MODE_SIZE (GET_MODE (reg))
3830			  && (hard_regno
3831			      = get_try_hard_regno (REGNO (reg))) >= 0
3832			  && (simplify_subreg_regno
3833			      (hard_regno,
3834			       GET_MODE (reg), byte, mode) < 0)
3835			  && (goal_alt[i] == NO_REGS
3836			      || (simplify_subreg_regno
3837				  (ira_class_hard_regs[goal_alt[i]][0],
3838				   GET_MODE (reg), byte, mode) >= 0)))))
3839		{
3840		  if (type == OP_OUT)
3841		    type = OP_INOUT;
3842		  loc = &SUBREG_REG (*loc);
3843		  mode = GET_MODE (*loc);
3844		}
3845	    }
3846	  old = *loc;
3847	  if (get_reload_reg (type, mode, old, goal_alt[i],
3848			      loc != curr_id->operand_loc[i], "", &new_reg)
3849	      && type != OP_OUT)
3850	    {
3851	      push_to_sequence (before);
3852	      lra_emit_move (new_reg, old);
3853	      before = get_insns ();
3854	      end_sequence ();
3855	    }
3856	  *loc = new_reg;
3857	  if (type != OP_IN
3858	      && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX)
3859	    {
3860	      start_sequence ();
3861	      lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
3862	      emit_insn (after);
3863	      after = get_insns ();
3864	      end_sequence ();
3865	      *loc = new_reg;
3866	    }
3867	  for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
3868	    if (goal_alt_dont_inherit_ops[j] == i)
3869	      {
3870		lra_set_regno_unique_value (REGNO (new_reg));
3871		break;
3872	      }
3873	  lra_update_dup (curr_id, i);
3874	}
3875      else if (curr_static_id->operand[i].type == OP_IN
3876	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
3877		   == OP_OUT))
3878	{
3879	  /* generate reloads for input and matched outputs.  */
3880	  match_inputs[0] = i;
3881	  match_inputs[1] = -1;
3882	  match_reload (goal_alt_matched[i][0], match_inputs,
3883			goal_alt[i], &before, &after);
3884	}
3885      else if (curr_static_id->operand[i].type == OP_OUT
3886	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
3887		   == OP_IN))
3888	/* Generate reloads for output and matched inputs.  */
3889	match_reload (i, goal_alt_matched[i], goal_alt[i], &before, &after);
3890      else if (curr_static_id->operand[i].type == OP_IN
3891	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
3892		   == OP_IN))
3893	{
3894	  /* Generate reloads for matched inputs.  */
3895	  match_inputs[0] = i;
3896	  for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
3897	    match_inputs[j + 1] = k;
3898	  match_inputs[j + 1] = -1;
3899	  match_reload (-1, match_inputs, goal_alt[i], &before, &after);
3900	}
3901      else
3902	/* We must generate code in any case when function
3903	   process_alt_operands decides that it is possible.  */
3904	gcc_unreachable ();
3905      if (optional_p)
3906	{
3907	  lra_assert (REG_P (op));
3908	  regno = REGNO (op);
3909	  op = *curr_id->operand_loc[i]; /* Substitution.  */
3910	  if (GET_CODE (op) == SUBREG)
3911	    op = SUBREG_REG (op);
3912	  gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
3913	  bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
3914	  lra_reg_info[REGNO (op)].restore_regno = regno;
3915	  if (lra_dump_file != NULL)
3916	    fprintf (lra_dump_file,
3917		     "      Making reload reg %d for reg %d optional\n",
3918		     REGNO (op), regno);
3919	}
3920    }
3921  if (before != NULL_RTX || after != NULL_RTX
3922      || max_regno_before != max_reg_num ())
3923    change_p = true;
3924  if (change_p)
3925    {
3926      lra_update_operator_dups (curr_id);
3927      /* Something changes -- process the insn.	 */
3928      lra_update_insn_regno_info (curr_insn);
3929    }
3930  lra_process_new_insns (curr_insn, before, after, "Inserting insn reload");
3931  return change_p;
3932}
3933
3934/* Return true if INSN satisfies all constraints.  In other words, no
3935   reload insns are needed.  */
3936bool
3937lra_constrain_insn (rtx_insn *insn)
3938{
3939  int saved_new_regno_start = new_regno_start;
3940  int saved_new_insn_uid_start = new_insn_uid_start;
3941  bool change_p;
3942
3943  curr_insn = insn;
3944  curr_id = lra_get_insn_recog_data (curr_insn);
3945  curr_static_id = curr_id->insn_static_data;
3946  new_insn_uid_start = get_max_uid ();
3947  new_regno_start = max_reg_num ();
3948  change_p = curr_insn_transform (true);
3949  new_regno_start = saved_new_regno_start;
3950  new_insn_uid_start = saved_new_insn_uid_start;
3951  return ! change_p;
3952}
3953
3954/* Return true if X is in LIST.	 */
3955static bool
3956in_list_p (rtx x, rtx list)
3957{
3958  for (; list != NULL_RTX; list = XEXP (list, 1))
3959    if (XEXP (list, 0) == x)
3960      return true;
3961  return false;
3962}
3963
3964/* Return true if X contains an allocatable hard register (if
3965   HARD_REG_P) or a (spilled if SPILLED_P) pseudo.  */
3966static bool
3967contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
3968{
3969  int i, j;
3970  const char *fmt;
3971  enum rtx_code code;
3972
3973  code = GET_CODE (x);
3974  if (REG_P (x))
3975    {
3976      int regno = REGNO (x);
3977      HARD_REG_SET alloc_regs;
3978
3979      if (hard_reg_p)
3980	{
3981	  if (regno >= FIRST_PSEUDO_REGISTER)
3982	    regno = lra_get_regno_hard_regno (regno);
3983	  if (regno < 0)
3984	    return false;
3985	  COMPL_HARD_REG_SET (alloc_regs, lra_no_alloc_regs);
3986	  return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
3987	}
3988      else
3989	{
3990	  if (regno < FIRST_PSEUDO_REGISTER)
3991	    return false;
3992	  if (! spilled_p)
3993	    return true;
3994	  return lra_get_regno_hard_regno (regno) < 0;
3995	}
3996    }
3997  fmt = GET_RTX_FORMAT (code);
3998  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3999    {
4000      if (fmt[i] == 'e')
4001	{
4002	  if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
4003	    return true;
4004	}
4005      else if (fmt[i] == 'E')
4006	{
4007	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4008	    if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
4009	      return true;
4010	}
4011    }
4012  return false;
4013}
4014
4015/* Return true if X contains a symbol reg.  */
4016static bool
4017contains_symbol_ref_p (rtx x)
4018{
4019  int i, j;
4020  const char *fmt;
4021  enum rtx_code code;
4022
4023  code = GET_CODE (x);
4024  if (code == SYMBOL_REF)
4025    return true;
4026  fmt = GET_RTX_FORMAT (code);
4027  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4028    {
4029      if (fmt[i] == 'e')
4030	{
4031	  if (contains_symbol_ref_p (XEXP (x, i)))
4032	    return true;
4033	}
4034      else if (fmt[i] == 'E')
4035	{
4036	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4037	    if (contains_symbol_ref_p (XVECEXP (x, i, j)))
4038	      return true;
4039	}
4040    }
4041  return false;
4042}
4043
4044/* Process all regs in location *LOC and change them on equivalent
4045   substitution.  Return true if any change was done.  */
4046static bool
4047loc_equivalence_change_p (rtx *loc)
4048{
4049  rtx subst, reg, x = *loc;
4050  bool result = false;
4051  enum rtx_code code = GET_CODE (x);
4052  const char *fmt;
4053  int i, j;
4054
4055  if (code == SUBREG)
4056    {
4057      reg = SUBREG_REG (x);
4058      if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
4059	  && GET_MODE (subst) == VOIDmode)
4060	{
4061	  /* We cannot reload debug location.  Simplify subreg here
4062	     while we know the inner mode.  */
4063	  *loc = simplify_gen_subreg (GET_MODE (x), subst,
4064				      GET_MODE (reg), SUBREG_BYTE (x));
4065	  return true;
4066	}
4067    }
4068  if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
4069    {
4070      *loc = subst;
4071      return true;
4072    }
4073
4074  /* Scan all the operand sub-expressions.  */
4075  fmt = GET_RTX_FORMAT (code);
4076  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4077    {
4078      if (fmt[i] == 'e')
4079	result = loc_equivalence_change_p (&XEXP (x, i)) || result;
4080      else if (fmt[i] == 'E')
4081	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4082	  result
4083	    = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
4084    }
4085  return result;
4086}
4087
4088/* Similar to loc_equivalence_change_p, but for use as
4089   simplify_replace_fn_rtx callback.  DATA is insn for which the
4090   elimination is done.  If it null we don't do the elimination.  */
4091static rtx
4092loc_equivalence_callback (rtx loc, const_rtx, void *data)
4093{
4094  if (!REG_P (loc))
4095    return NULL_RTX;
4096
4097  rtx subst = (data == NULL
4098	       ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx_insn *) data));
4099  if (subst != loc)
4100    return subst;
4101
4102  return NULL_RTX;
4103}
4104
4105/* Maximum number of generated reload insns per an insn.  It is for
4106   preventing this pass cycling in a bug case.	*/
4107#define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
4108
4109/* The current iteration number of this LRA pass.  */
4110int lra_constraint_iter;
4111
4112/* True if we substituted equiv which needs checking register
4113   allocation correctness because the equivalent value contains
4114   allocatable hard registers or when we restore multi-register
4115   pseudo.  */
4116bool lra_risky_transformations_p;
4117
4118/* Return true if REGNO is referenced in more than one block.  */
4119static bool
4120multi_block_pseudo_p (int regno)
4121{
4122  basic_block bb = NULL;
4123  unsigned int uid;
4124  bitmap_iterator bi;
4125
4126  if (regno < FIRST_PSEUDO_REGISTER)
4127    return false;
4128
4129    EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
4130      if (bb == NULL)
4131	bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
4132      else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
4133	return true;
4134    return false;
4135}
4136
4137/* Return true if LIST contains a deleted insn.  */
4138static bool
4139contains_deleted_insn_p (rtx_insn_list *list)
4140{
4141  for (; list != NULL_RTX; list = list->next ())
4142    if (NOTE_P (list->insn ())
4143	&& NOTE_KIND (list->insn ()) == NOTE_INSN_DELETED)
4144      return true;
4145  return false;
4146}
4147
4148/* Return true if X contains a pseudo dying in INSN.  */
4149static bool
4150dead_pseudo_p (rtx x, rtx insn)
4151{
4152  int i, j;
4153  const char *fmt;
4154  enum rtx_code code;
4155
4156  if (REG_P (x))
4157    return (insn != NULL_RTX
4158	    && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
4159  code = GET_CODE (x);
4160  fmt = GET_RTX_FORMAT (code);
4161  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4162    {
4163      if (fmt[i] == 'e')
4164	{
4165	  if (dead_pseudo_p (XEXP (x, i), insn))
4166	    return true;
4167	}
4168      else if (fmt[i] == 'E')
4169	{
4170	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4171	    if (dead_pseudo_p (XVECEXP (x, i, j), insn))
4172	      return true;
4173	}
4174    }
4175  return false;
4176}
4177
4178/* Return true if INSN contains a dying pseudo in INSN right hand
4179   side.  */
4180static bool
4181insn_rhs_dead_pseudo_p (rtx_insn *insn)
4182{
4183  rtx set = single_set (insn);
4184
4185  gcc_assert (set != NULL);
4186  return dead_pseudo_p (SET_SRC (set), insn);
4187}
4188
4189/* Return true if any init insn of REGNO contains a dying pseudo in
4190   insn right hand side.  */
4191static bool
4192init_insn_rhs_dead_pseudo_p (int regno)
4193{
4194  rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
4195
4196  if (insns == NULL)
4197    return false;
4198  for (; insns != NULL_RTX; insns = insns->next ())
4199    if (insn_rhs_dead_pseudo_p (insns->insn ()))
4200      return true;
4201  return false;
4202}
4203
4204/* Return TRUE if REGNO has a reverse equivalence.  The equivalence is
4205   reverse only if we have one init insn with given REGNO as a
4206   source.  */
4207static bool
4208reverse_equiv_p (int regno)
4209{
4210  rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
4211  rtx set;
4212
4213  if (insns == NULL)
4214    return false;
4215  if (! INSN_P (insns->insn ())
4216      || insns->next () != NULL)
4217    return false;
4218  if ((set = single_set (insns->insn ())) == NULL_RTX)
4219    return false;
4220  return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
4221}
4222
4223/* Return TRUE if REGNO was reloaded in an equivalence init insn.  We
4224   call this function only for non-reverse equivalence.  */
4225static bool
4226contains_reloaded_insn_p (int regno)
4227{
4228  rtx set;
4229  rtx_insn_list *list = ira_reg_equiv[regno].init_insns;
4230
4231  for (; list != NULL; list = list->next ())
4232    if ((set = single_set (list->insn ())) == NULL_RTX
4233	|| ! REG_P (SET_DEST (set))
4234	|| (int) REGNO (SET_DEST (set)) != regno)
4235      return true;
4236  return false;
4237}
4238
4239/* Entry function of LRA constraint pass.  Return true if the
4240   constraint pass did change the code.	 */
4241bool
4242lra_constraints (bool first_p)
4243{
4244  bool changed_p;
4245  int i, hard_regno, new_insns_num;
4246  unsigned int min_len, new_min_len, uid;
4247  rtx set, x, reg, dest_reg;
4248  basic_block last_bb;
4249  bitmap_head equiv_insn_bitmap;
4250  bitmap_iterator bi;
4251
4252  lra_constraint_iter++;
4253  if (lra_dump_file != NULL)
4254    fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
4255	     lra_constraint_iter);
4256  changed_p = false;
4257  if (pic_offset_table_rtx
4258      && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
4259    lra_risky_transformations_p = true;
4260  else
4261    lra_risky_transformations_p = false;
4262  new_insn_uid_start = get_max_uid ();
4263  new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
4264  /* Mark used hard regs for target stack size calulations.  */
4265  for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4266    if (lra_reg_info[i].nrefs != 0
4267	&& (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
4268      {
4269	int j, nregs;
4270
4271	nregs = hard_regno_nregs[hard_regno][lra_reg_info[i].biggest_mode];
4272	for (j = 0; j < nregs; j++)
4273	  df_set_regs_ever_live (hard_regno + j, true);
4274      }
4275  /* Do elimination before the equivalence processing as we can spill
4276     some pseudos during elimination.  */
4277  lra_eliminate (false, first_p);
4278  bitmap_initialize (&equiv_insn_bitmap, &reg_obstack);
4279  for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4280    if (lra_reg_info[i].nrefs != 0)
4281      {
4282	ira_reg_equiv[i].profitable_p = true;
4283	reg = regno_reg_rtx[i];
4284	if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
4285	  {
4286	    bool pseudo_p = contains_reg_p (x, false, false);
4287
4288	    /* After RTL transformation, we can not guarantee that
4289	       pseudo in the substitution was not reloaded which might
4290	       make equivalence invalid.  For example, in reverse
4291	       equiv of p0
4292
4293	       p0 <- ...
4294	       ...
4295	       equiv_mem <- p0
4296
4297	       the memory address register was reloaded before the 2nd
4298	       insn.  */
4299	    if ((! first_p && pseudo_p)
4300		/* We don't use DF for compilation speed sake.  So it
4301		   is problematic to update live info when we use an
4302		   equivalence containing pseudos in more than one
4303		   BB.  */
4304		|| (pseudo_p && multi_block_pseudo_p (i))
4305		/* If an init insn was deleted for some reason, cancel
4306		   the equiv.  We could update the equiv insns after
4307		   transformations including an equiv insn deletion
4308		   but it is not worthy as such cases are extremely
4309		   rare.  */
4310		|| contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
4311		/* If it is not a reverse equivalence, we check that a
4312		   pseudo in rhs of the init insn is not dying in the
4313		   insn.  Otherwise, the live info at the beginning of
4314		   the corresponding BB might be wrong after we
4315		   removed the insn.  When the equiv can be a
4316		   constant, the right hand side of the init insn can
4317		   be a pseudo.  */
4318		|| (! reverse_equiv_p (i)
4319		    && (init_insn_rhs_dead_pseudo_p (i)
4320			/* If we reloaded the pseudo in an equivalence
4321			   init insn, we can not remove the equiv init
4322			   insns and the init insns might write into
4323			   const memory in this case.  */
4324			|| contains_reloaded_insn_p (i)))
4325		/* Prevent access beyond equivalent memory for
4326		   paradoxical subregs.  */
4327		|| (MEM_P (x)
4328		    && (GET_MODE_SIZE (lra_reg_info[i].biggest_mode)
4329			> GET_MODE_SIZE (GET_MODE (x))))
4330		|| (pic_offset_table_rtx
4331		    && ((CONST_POOL_OK_P (PSEUDO_REGNO_MODE (i), x)
4332			 && (targetm.preferred_reload_class
4333			     (x, lra_get_allocno_class (i)) == NO_REGS))
4334			|| contains_symbol_ref_p (x))))
4335	      ira_reg_equiv[i].defined_p = false;
4336	    if (contains_reg_p (x, false, true))
4337	      ira_reg_equiv[i].profitable_p = false;
4338	    if (get_equiv (reg) != reg)
4339	      bitmap_ior_into (&equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
4340	  }
4341      }
4342  for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4343    update_equiv (i);
4344  /* We should add all insns containing pseudos which should be
4345     substituted by their equivalences.  */
4346  EXECUTE_IF_SET_IN_BITMAP (&equiv_insn_bitmap, 0, uid, bi)
4347    lra_push_insn_by_uid (uid);
4348  min_len = lra_insn_stack_length ();
4349  new_insns_num = 0;
4350  last_bb = NULL;
4351  changed_p = false;
4352  while ((new_min_len = lra_insn_stack_length ()) != 0)
4353    {
4354      curr_insn = lra_pop_insn ();
4355      --new_min_len;
4356      curr_bb = BLOCK_FOR_INSN (curr_insn);
4357      if (curr_bb != last_bb)
4358	{
4359	  last_bb = curr_bb;
4360	  bb_reload_num = lra_curr_reload_num;
4361	}
4362      if (min_len > new_min_len)
4363	{
4364	  min_len = new_min_len;
4365	  new_insns_num = 0;
4366	}
4367      if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
4368	internal_error
4369	  ("Max. number of generated reload insns per insn is achieved (%d)\n",
4370	   MAX_RELOAD_INSNS_NUMBER);
4371      new_insns_num++;
4372      if (DEBUG_INSN_P (curr_insn))
4373	{
4374	  /* We need to check equivalence in debug insn and change
4375	     pseudo to the equivalent value if necessary.  */
4376	  curr_id = lra_get_insn_recog_data (curr_insn);
4377	  if (bitmap_bit_p (&equiv_insn_bitmap, INSN_UID (curr_insn)))
4378	    {
4379	      rtx old = *curr_id->operand_loc[0];
4380	      *curr_id->operand_loc[0]
4381		= simplify_replace_fn_rtx (old, NULL_RTX,
4382					   loc_equivalence_callback, curr_insn);
4383	      if (old != *curr_id->operand_loc[0])
4384		{
4385		  lra_update_insn_regno_info (curr_insn);
4386		  changed_p = true;
4387		}
4388	    }
4389	}
4390      else if (INSN_P (curr_insn))
4391	{
4392	  if ((set = single_set (curr_insn)) != NULL_RTX)
4393	    {
4394	      dest_reg = SET_DEST (set);
4395	      /* The equivalence pseudo could be set up as SUBREG in a
4396		 case when it is a call restore insn in a mode
4397		 different from the pseudo mode.  */
4398	      if (GET_CODE (dest_reg) == SUBREG)
4399		dest_reg = SUBREG_REG (dest_reg);
4400	      if ((REG_P (dest_reg)
4401		   && (x = get_equiv (dest_reg)) != dest_reg
4402		   /* Remove insns which set up a pseudo whose value
4403		      can not be changed.  Such insns might be not in
4404		      init_insns because we don't update equiv data
4405		      during insn transformations.
4406
4407		      As an example, let suppose that a pseudo got
4408		      hard register and on the 1st pass was not
4409		      changed to equivalent constant.  We generate an
4410		      additional insn setting up the pseudo because of
4411		      secondary memory movement.  Then the pseudo is
4412		      spilled and we use the equiv constant.  In this
4413		      case we should remove the additional insn and
4414		      this insn is not init_insns list.  */
4415		   && (! MEM_P (x) || MEM_READONLY_P (x)
4416		       /* Check that this is actually an insn setting
4417			  up the equivalence.  */
4418		       || in_list_p (curr_insn,
4419				     ira_reg_equiv
4420				     [REGNO (dest_reg)].init_insns)))
4421		  || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
4422		      && in_list_p (curr_insn,
4423				    ira_reg_equiv
4424				    [REGNO (SET_SRC (set))].init_insns)))
4425		{
4426		  /* This is equiv init insn of pseudo which did not get a
4427		     hard register -- remove the insn.	*/
4428		  if (lra_dump_file != NULL)
4429		    {
4430		      fprintf (lra_dump_file,
4431			       "      Removing equiv init insn %i (freq=%d)\n",
4432			       INSN_UID (curr_insn),
4433			       REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
4434		      dump_insn_slim (lra_dump_file, curr_insn);
4435		    }
4436		  if (contains_reg_p (x, true, false))
4437		    lra_risky_transformations_p = true;
4438		  lra_set_insn_deleted (curr_insn);
4439		  continue;
4440		}
4441	    }
4442	  curr_id = lra_get_insn_recog_data (curr_insn);
4443	  curr_static_id = curr_id->insn_static_data;
4444	  init_curr_insn_input_reloads ();
4445	  init_curr_operand_mode ();
4446	  if (curr_insn_transform (false))
4447	    changed_p = true;
4448	  /* Check non-transformed insns too for equiv change as USE
4449	     or CLOBBER don't need reloads but can contain pseudos
4450	     being changed on their equivalences.  */
4451	  else if (bitmap_bit_p (&equiv_insn_bitmap, INSN_UID (curr_insn))
4452		   && loc_equivalence_change_p (&PATTERN (curr_insn)))
4453	    {
4454	      lra_update_insn_regno_info (curr_insn);
4455	      changed_p = true;
4456	    }
4457	}
4458    }
4459  bitmap_clear (&equiv_insn_bitmap);
4460  /* If we used a new hard regno, changed_p should be true because the
4461     hard reg is assigned to a new pseudo.  */
4462#ifdef ENABLE_CHECKING
4463  if (! changed_p)
4464    {
4465      for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4466	if (lra_reg_info[i].nrefs != 0
4467	    && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
4468	  {
4469	    int j, nregs = hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (i)];
4470
4471	    for (j = 0; j < nregs; j++)
4472	      lra_assert (df_regs_ever_live_p (hard_regno + j));
4473	  }
4474    }
4475#endif
4476  return changed_p;
4477}
4478
4479/* Initiate the LRA constraint pass.  It is done once per
4480   function.  */
4481void
4482lra_constraints_init (void)
4483{
4484}
4485
4486/* Finalize the LRA constraint pass.  It is done once per
4487   function.  */
4488void
4489lra_constraints_finish (void)
4490{
4491}
4492
4493
4494
4495/* This page contains code to do inheritance/split
4496   transformations.  */
4497
4498/* Number of reloads passed so far in current EBB.  */
4499static int reloads_num;
4500
4501/* Number of calls passed so far in current EBB.  */
4502static int calls_num;
4503
4504/* Current reload pseudo check for validity of elements in
4505   USAGE_INSNS.	 */
4506static int curr_usage_insns_check;
4507
4508/* Info about last usage of registers in EBB to do inheritance/split
4509   transformation.  Inheritance transformation is done from a spilled
4510   pseudo and split transformations from a hard register or a pseudo
4511   assigned to a hard register.	 */
4512struct usage_insns
4513{
4514  /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
4515     value INSNS is valid.  The insns is chain of optional debug insns
4516     and a finishing non-debug insn using the corresponding reg.  The
4517     value is also used to mark the registers which are set up in the
4518     current insn.  The negated insn uid is used for this.  */
4519  int check;
4520  /* Value of global reloads_num at the last insn in INSNS.  */
4521  int reloads_num;
4522  /* Value of global reloads_nums at the last insn in INSNS.  */
4523  int calls_num;
4524  /* It can be true only for splitting.	 And it means that the restore
4525     insn should be put after insn given by the following member.  */
4526  bool after_p;
4527  /* Next insns in the current EBB which use the original reg and the
4528     original reg value is not changed between the current insn and
4529     the next insns.  In order words, e.g. for inheritance, if we need
4530     to use the original reg value again in the next insns we can try
4531     to use the value in a hard register from a reload insn of the
4532     current insn.  */
4533  rtx insns;
4534};
4535
4536/* Map: regno -> corresponding pseudo usage insns.  */
4537static struct usage_insns *usage_insns;
4538
4539static void
4540setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
4541{
4542  usage_insns[regno].check = curr_usage_insns_check;
4543  usage_insns[regno].insns = insn;
4544  usage_insns[regno].reloads_num = reloads_num;
4545  usage_insns[regno].calls_num = calls_num;
4546  usage_insns[regno].after_p = after_p;
4547}
4548
4549/* The function is used to form list REGNO usages which consists of
4550   optional debug insns finished by a non-debug insn using REGNO.
4551   RELOADS_NUM is current number of reload insns processed so far.  */
4552static void
4553add_next_usage_insn (int regno, rtx insn, int reloads_num)
4554{
4555  rtx next_usage_insns;
4556
4557  if (usage_insns[regno].check == curr_usage_insns_check
4558      && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
4559      && DEBUG_INSN_P (insn))
4560    {
4561      /* Check that we did not add the debug insn yet.	*/
4562      if (next_usage_insns != insn
4563	  && (GET_CODE (next_usage_insns) != INSN_LIST
4564	      || XEXP (next_usage_insns, 0) != insn))
4565	usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
4566						      next_usage_insns);
4567    }
4568  else if (NONDEBUG_INSN_P (insn))
4569    setup_next_usage_insn (regno, insn, reloads_num, false);
4570  else
4571    usage_insns[regno].check = 0;
4572}
4573
4574/* Return first non-debug insn in list USAGE_INSNS.  */
4575static rtx_insn *
4576skip_usage_debug_insns (rtx usage_insns)
4577{
4578  rtx insn;
4579
4580  /* Skip debug insns.  */
4581  for (insn = usage_insns;
4582       insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
4583       insn = XEXP (insn, 1))
4584    ;
4585  return safe_as_a <rtx_insn *> (insn);
4586}
4587
4588/* Return true if we need secondary memory moves for insn in
4589   USAGE_INSNS after inserting inherited pseudo of class INHER_CL
4590   into the insn.  */
4591static bool
4592check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
4593				 rtx usage_insns ATTRIBUTE_UNUSED)
4594{
4595#ifndef SECONDARY_MEMORY_NEEDED
4596  return false;
4597#else
4598  rtx_insn *insn;
4599  rtx set, dest;
4600  enum reg_class cl;
4601
4602  if (inher_cl == ALL_REGS
4603      || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
4604    return false;
4605  lra_assert (INSN_P (insn));
4606  if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
4607    return false;
4608  dest = SET_DEST (set);
4609  if (! REG_P (dest))
4610    return false;
4611  lra_assert (inher_cl != NO_REGS);
4612  cl = get_reg_class (REGNO (dest));
4613  return (cl != NO_REGS && cl != ALL_REGS
4614	  && SECONDARY_MEMORY_NEEDED (inher_cl, cl, GET_MODE (dest)));
4615#endif
4616}
4617
4618/* Registers involved in inheritance/split in the current EBB
4619   (inheritance/split pseudos and original registers).	*/
4620static bitmap_head check_only_regs;
4621
4622/* Do inheritance transformations for insn INSN, which defines (if
4623   DEF_P) or uses ORIGINAL_REGNO.  NEXT_USAGE_INSNS specifies which
4624   instruction in the EBB next uses ORIGINAL_REGNO; it has the same
4625   form as the "insns" field of usage_insns.  Return true if we
4626   succeed in such transformation.
4627
4628   The transformations look like:
4629
4630     p <- ...		  i <- ...
4631     ...		  p <- i    (new insn)
4632     ...	     =>
4633     <- ... p ...	  <- ... i ...
4634   or
4635     ...		  i <- p    (new insn)
4636     <- ... p ...	  <- ... i ...
4637     ...	     =>
4638     <- ... p ...	  <- ... i ...
4639   where p is a spilled original pseudo and i is a new inheritance pseudo.
4640
4641
4642   The inheritance pseudo has the smallest class of two classes CL and
4643   class of ORIGINAL REGNO.  */
4644static bool
4645inherit_reload_reg (bool def_p, int original_regno,
4646		    enum reg_class cl, rtx_insn *insn, rtx next_usage_insns)
4647{
4648  if (optimize_function_for_size_p (cfun))
4649    return false;
4650
4651  enum reg_class rclass = lra_get_allocno_class (original_regno);
4652  rtx original_reg = regno_reg_rtx[original_regno];
4653  rtx new_reg, usage_insn;
4654  rtx_insn *new_insns;
4655
4656  lra_assert (! usage_insns[original_regno].after_p);
4657  if (lra_dump_file != NULL)
4658    fprintf (lra_dump_file,
4659	     "    <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
4660  if (! ira_reg_classes_intersect_p[cl][rclass])
4661    {
4662      if (lra_dump_file != NULL)
4663	{
4664	  fprintf (lra_dump_file,
4665		   "    Rejecting inheritance for %d "
4666		   "because of disjoint classes %s and %s\n",
4667		   original_regno, reg_class_names[cl],
4668		   reg_class_names[rclass]);
4669	  fprintf (lra_dump_file,
4670		   "    >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4671	}
4672      return false;
4673    }
4674  if ((ira_class_subset_p[cl][rclass] && cl != rclass)
4675      /* We don't use a subset of two classes because it can be
4676	 NO_REGS.  This transformation is still profitable in most
4677	 cases even if the classes are not intersected as register
4678	 move is probably cheaper than a memory load.  */
4679      || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
4680    {
4681      if (lra_dump_file != NULL)
4682	fprintf (lra_dump_file, "    Use smallest class of %s and %s\n",
4683		 reg_class_names[cl], reg_class_names[rclass]);
4684
4685      rclass = cl;
4686    }
4687  if (check_secondary_memory_needed_p (rclass, next_usage_insns))
4688    {
4689      /* Reject inheritance resulting in secondary memory moves.
4690	 Otherwise, there is a danger in LRA cycling.  Also such
4691	 transformation will be unprofitable.  */
4692      if (lra_dump_file != NULL)
4693	{
4694	  rtx_insn *insn = skip_usage_debug_insns (next_usage_insns);
4695	  rtx set = single_set (insn);
4696
4697	  lra_assert (set != NULL_RTX);
4698
4699	  rtx dest = SET_DEST (set);
4700
4701	  lra_assert (REG_P (dest));
4702	  fprintf (lra_dump_file,
4703		   "    Rejecting inheritance for insn %d(%s)<-%d(%s) "
4704		   "as secondary mem is needed\n",
4705		   REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
4706		   original_regno, reg_class_names[rclass]);
4707	  fprintf (lra_dump_file,
4708		   "    >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4709	}
4710      return false;
4711    }
4712  new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
4713				rclass, "inheritance");
4714  start_sequence ();
4715  if (def_p)
4716    lra_emit_move (original_reg, new_reg);
4717  else
4718    lra_emit_move (new_reg, original_reg);
4719  new_insns = get_insns ();
4720  end_sequence ();
4721  if (NEXT_INSN (new_insns) != NULL_RTX)
4722    {
4723      if (lra_dump_file != NULL)
4724	{
4725	  fprintf (lra_dump_file,
4726		   "    Rejecting inheritance %d->%d "
4727		   "as it results in 2 or more insns:\n",
4728		   original_regno, REGNO (new_reg));
4729	  dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
4730	  fprintf (lra_dump_file,
4731		   "	>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4732	}
4733      return false;
4734    }
4735  lra_substitute_pseudo_within_insn (insn, original_regno, new_reg, false);
4736  lra_update_insn_regno_info (insn);
4737  if (! def_p)
4738    /* We now have a new usage insn for original regno.  */
4739    setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
4740  if (lra_dump_file != NULL)
4741    fprintf (lra_dump_file, "    Original reg change %d->%d (bb%d):\n",
4742	     original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
4743  lra_reg_info[REGNO (new_reg)].restore_regno = original_regno;
4744  bitmap_set_bit (&check_only_regs, REGNO (new_reg));
4745  bitmap_set_bit (&check_only_regs, original_regno);
4746  bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
4747  if (def_p)
4748    lra_process_new_insns (insn, NULL, new_insns,
4749			   "Add original<-inheritance");
4750  else
4751    lra_process_new_insns (insn, new_insns, NULL,
4752			   "Add inheritance<-original");
4753  while (next_usage_insns != NULL_RTX)
4754    {
4755      if (GET_CODE (next_usage_insns) != INSN_LIST)
4756	{
4757	  usage_insn = next_usage_insns;
4758	  lra_assert (NONDEBUG_INSN_P (usage_insn));
4759	  next_usage_insns = NULL;
4760	}
4761      else
4762	{
4763	  usage_insn = XEXP (next_usage_insns, 0);
4764	  lra_assert (DEBUG_INSN_P (usage_insn));
4765	  next_usage_insns = XEXP (next_usage_insns, 1);
4766	}
4767      lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false);
4768      lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
4769      if (lra_dump_file != NULL)
4770	{
4771	  fprintf (lra_dump_file,
4772		   "    Inheritance reuse change %d->%d (bb%d):\n",
4773		   original_regno, REGNO (new_reg),
4774		   BLOCK_FOR_INSN (usage_insn)->index);
4775	  dump_insn_slim (lra_dump_file, usage_insn);
4776	}
4777    }
4778  if (lra_dump_file != NULL)
4779    fprintf (lra_dump_file,
4780	     "	  >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4781  return true;
4782}
4783
4784/* Return true if we need a caller save/restore for pseudo REGNO which
4785   was assigned to a hard register.  */
4786static inline bool
4787need_for_call_save_p (int regno)
4788{
4789  lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
4790  return (usage_insns[regno].calls_num < calls_num
4791	  && (overlaps_hard_reg_set_p
4792	      ((flag_ipa_ra &&
4793		! hard_reg_set_empty_p (lra_reg_info[regno].actual_call_used_reg_set))
4794	       ? lra_reg_info[regno].actual_call_used_reg_set
4795	       : call_used_reg_set,
4796	       PSEUDO_REGNO_MODE (regno), reg_renumber[regno])
4797	      || HARD_REGNO_CALL_PART_CLOBBERED (reg_renumber[regno],
4798						 PSEUDO_REGNO_MODE (regno))));
4799}
4800
4801/* Global registers occurring in the current EBB.  */
4802static bitmap_head ebb_global_regs;
4803
4804/* Return true if we need a split for hard register REGNO or pseudo
4805   REGNO which was assigned to a hard register.
4806   POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
4807   used for reloads since the EBB end.	It is an approximation of the
4808   used hard registers in the split range.  The exact value would
4809   require expensive calculations.  If we were aggressive with
4810   splitting because of the approximation, the split pseudo will save
4811   the same hard register assignment and will be removed in the undo
4812   pass.  We still need the approximation because too aggressive
4813   splitting would result in too inaccurate cost calculation in the
4814   assignment pass because of too many generated moves which will be
4815   probably removed in the undo pass.  */
4816static inline bool
4817need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
4818{
4819  int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
4820
4821  lra_assert (hard_regno >= 0);
4822  return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
4823	   /* Don't split eliminable hard registers, otherwise we can
4824	      split hard registers like hard frame pointer, which
4825	      lives on BB start/end according to DF-infrastructure,
4826	      when there is a pseudo assigned to the register and
4827	      living in the same BB.  */
4828	   && (regno >= FIRST_PSEUDO_REGISTER
4829	       || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
4830	   && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
4831	   /* Don't split call clobbered hard regs living through
4832	      calls, otherwise we might have a check problem in the
4833	      assign sub-pass as in the most cases (exception is a
4834	      situation when lra_risky_transformations_p value is
4835	      true) the assign pass assumes that all pseudos living
4836	      through calls are assigned to call saved hard regs.  */
4837	   && (regno >= FIRST_PSEUDO_REGISTER
4838	       || ! TEST_HARD_REG_BIT (call_used_reg_set, regno)
4839	       || usage_insns[regno].calls_num == calls_num)
4840	   /* We need at least 2 reloads to make pseudo splitting
4841	      profitable.  We should provide hard regno splitting in
4842	      any case to solve 1st insn scheduling problem when
4843	      moving hard register definition up might result in
4844	      impossibility to find hard register for reload pseudo of
4845	      small register class.  */
4846	   && (usage_insns[regno].reloads_num
4847	       + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
4848	   && (regno < FIRST_PSEUDO_REGISTER
4849	       /* For short living pseudos, spilling + inheritance can
4850		  be considered a substitution for splitting.
4851		  Therefore we do not splitting for local pseudos.  It
4852		  decreases also aggressiveness of splitting.  The
4853		  minimal number of references is chosen taking into
4854		  account that for 2 references splitting has no sense
4855		  as we can just spill the pseudo.  */
4856	       || (regno >= FIRST_PSEUDO_REGISTER
4857		   && lra_reg_info[regno].nrefs > 3
4858		   && bitmap_bit_p (&ebb_global_regs, regno))))
4859	  || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
4860}
4861
4862/* Return class for the split pseudo created from original pseudo with
4863   ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO.	 We
4864   choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
4865   results in no secondary memory movements.  */
4866static enum reg_class
4867choose_split_class (enum reg_class allocno_class,
4868		    int hard_regno ATTRIBUTE_UNUSED,
4869		    machine_mode mode ATTRIBUTE_UNUSED)
4870{
4871#ifndef SECONDARY_MEMORY_NEEDED
4872  return allocno_class;
4873#else
4874  int i;
4875  enum reg_class cl, best_cl = NO_REGS;
4876  enum reg_class hard_reg_class ATTRIBUTE_UNUSED
4877    = REGNO_REG_CLASS (hard_regno);
4878
4879  if (! SECONDARY_MEMORY_NEEDED (allocno_class, allocno_class, mode)
4880      && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
4881    return allocno_class;
4882  for (i = 0;
4883       (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
4884       i++)
4885    if (! SECONDARY_MEMORY_NEEDED (cl, hard_reg_class, mode)
4886	&& ! SECONDARY_MEMORY_NEEDED (hard_reg_class, cl, mode)
4887	&& TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
4888	&& (best_cl == NO_REGS
4889	    || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
4890      best_cl = cl;
4891  return best_cl;
4892#endif
4893}
4894
4895/* Do split transformations for insn INSN, which defines or uses
4896   ORIGINAL_REGNO.  NEXT_USAGE_INSNS specifies which instruction in
4897   the EBB next uses ORIGINAL_REGNO; it has the same form as the
4898   "insns" field of usage_insns.
4899
4900   The transformations look like:
4901
4902     p <- ...		  p <- ...
4903     ...		  s <- p    (new insn -- save)
4904     ...	     =>
4905     ...		  p <- s    (new insn -- restore)
4906     <- ... p ...	  <- ... p ...
4907   or
4908     <- ... p ...	  <- ... p ...
4909     ...		  s <- p    (new insn -- save)
4910     ...	     =>
4911     ...		  p <- s    (new insn -- restore)
4912     <- ... p ...	  <- ... p ...
4913
4914   where p is an original pseudo got a hard register or a hard
4915   register and s is a new split pseudo.  The save is put before INSN
4916   if BEFORE_P is true.	 Return true if we succeed in such
4917   transformation.  */
4918static bool
4919split_reg (bool before_p, int original_regno, rtx_insn *insn,
4920	   rtx next_usage_insns)
4921{
4922  enum reg_class rclass;
4923  rtx original_reg;
4924  int hard_regno, nregs;
4925  rtx new_reg, usage_insn;
4926  rtx_insn *restore, *save;
4927  bool after_p;
4928  bool call_save_p;
4929
4930  if (original_regno < FIRST_PSEUDO_REGISTER)
4931    {
4932      rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
4933      hard_regno = original_regno;
4934      call_save_p = false;
4935      nregs = 1;
4936    }
4937  else
4938    {
4939      hard_regno = reg_renumber[original_regno];
4940      nregs = hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (original_regno)];
4941      rclass = lra_get_allocno_class (original_regno);
4942      original_reg = regno_reg_rtx[original_regno];
4943      call_save_p = need_for_call_save_p (original_regno);
4944    }
4945  original_reg = regno_reg_rtx[original_regno];
4946  lra_assert (hard_regno >= 0);
4947  if (lra_dump_file != NULL)
4948    fprintf (lra_dump_file,
4949	     "	  ((((((((((((((((((((((((((((((((((((((((((((((((\n");
4950  if (call_save_p)
4951    {
4952      machine_mode mode = GET_MODE (original_reg);
4953
4954      mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
4955					  hard_regno_nregs[hard_regno][mode],
4956					  mode);
4957      new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, "save");
4958    }
4959  else
4960    {
4961      rclass = choose_split_class (rclass, hard_regno,
4962				   GET_MODE (original_reg));
4963      if (rclass == NO_REGS)
4964	{
4965	  if (lra_dump_file != NULL)
4966	    {
4967	      fprintf (lra_dump_file,
4968		       "    Rejecting split of %d(%s): "
4969		       "no good reg class for %d(%s)\n",
4970		       original_regno,
4971		       reg_class_names[lra_get_allocno_class (original_regno)],
4972		       hard_regno,
4973		       reg_class_names[REGNO_REG_CLASS (hard_regno)]);
4974	      fprintf
4975		(lra_dump_file,
4976		 "    ))))))))))))))))))))))))))))))))))))))))))))))))\n");
4977	    }
4978	  return false;
4979	}
4980      new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
4981				    rclass, "split");
4982      reg_renumber[REGNO (new_reg)] = hard_regno;
4983    }
4984  save = emit_spill_move (true, new_reg, original_reg);
4985  if (NEXT_INSN (save) != NULL_RTX && !call_save_p)
4986    {
4987      if (lra_dump_file != NULL)
4988	{
4989	  fprintf
4990	    (lra_dump_file,
4991	     "	  Rejecting split %d->%d resulting in > 2 save insns:\n",
4992	     original_regno, REGNO (new_reg));
4993	  dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
4994	  fprintf (lra_dump_file,
4995		   "	))))))))))))))))))))))))))))))))))))))))))))))))\n");
4996	}
4997      return false;
4998    }
4999  restore = emit_spill_move (false, new_reg, original_reg);
5000  if (NEXT_INSN (restore) != NULL_RTX && !call_save_p)
5001    {
5002      if (lra_dump_file != NULL)
5003	{
5004	  fprintf (lra_dump_file,
5005		   "	Rejecting split %d->%d "
5006		   "resulting in > 2 restore insns:\n",
5007		   original_regno, REGNO (new_reg));
5008	  dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
5009	  fprintf (lra_dump_file,
5010		   "	))))))))))))))))))))))))))))))))))))))))))))))))\n");
5011	}
5012      return false;
5013    }
5014  after_p = usage_insns[original_regno].after_p;
5015  lra_reg_info[REGNO (new_reg)].restore_regno = original_regno;
5016  bitmap_set_bit (&check_only_regs, REGNO (new_reg));
5017  bitmap_set_bit (&check_only_regs, original_regno);
5018  bitmap_set_bit (&lra_split_regs, REGNO (new_reg));
5019  for (;;)
5020    {
5021      if (GET_CODE (next_usage_insns) != INSN_LIST)
5022	{
5023	  usage_insn = next_usage_insns;
5024	  break;
5025	}
5026      usage_insn = XEXP (next_usage_insns, 0);
5027      lra_assert (DEBUG_INSN_P (usage_insn));
5028      next_usage_insns = XEXP (next_usage_insns, 1);
5029      lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false);
5030      lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
5031      if (lra_dump_file != NULL)
5032	{
5033	  fprintf (lra_dump_file, "    Split reuse change %d->%d:\n",
5034		   original_regno, REGNO (new_reg));
5035	  dump_insn_slim (lra_dump_file, usage_insn);
5036	}
5037    }
5038  lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
5039  lra_assert (usage_insn != insn || (after_p && before_p));
5040  lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
5041			 after_p ? NULL : restore,
5042			 after_p ? restore : NULL,
5043			 call_save_p
5044			 ?  "Add reg<-save" : "Add reg<-split");
5045  lra_process_new_insns (insn, before_p ? save : NULL,
5046			 before_p ? NULL : save,
5047			 call_save_p
5048			 ?  "Add save<-reg" : "Add split<-reg");
5049  if (nregs > 1)
5050    /* If we are trying to split multi-register.  We should check
5051       conflicts on the next assignment sub-pass.  IRA can allocate on
5052       sub-register levels, LRA do this on pseudos level right now and
5053       this discrepancy may create allocation conflicts after
5054       splitting.  */
5055    lra_risky_transformations_p = true;
5056  if (lra_dump_file != NULL)
5057    fprintf (lra_dump_file,
5058	     "	  ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5059  return true;
5060}
5061
5062/* Recognize that we need a split transformation for insn INSN, which
5063   defines or uses REGNO in its insn biggest MODE (we use it only if
5064   REGNO is a hard register).  POTENTIAL_RELOAD_HARD_REGS contains
5065   hard registers which might be used for reloads since the EBB end.
5066   Put the save before INSN if BEFORE_P is true.  MAX_UID is maximla
5067   uid before starting INSN processing.  Return true if we succeed in
5068   such transformation.  */
5069static bool
5070split_if_necessary (int regno, machine_mode mode,
5071		    HARD_REG_SET potential_reload_hard_regs,
5072		    bool before_p, rtx_insn *insn, int max_uid)
5073{
5074  bool res = false;
5075  int i, nregs = 1;
5076  rtx next_usage_insns;
5077
5078  if (regno < FIRST_PSEUDO_REGISTER)
5079    nregs = hard_regno_nregs[regno][mode];
5080  for (i = 0; i < nregs; i++)
5081    if (usage_insns[regno + i].check == curr_usage_insns_check
5082	&& (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
5083	/* To avoid processing the register twice or more.  */
5084	&& ((GET_CODE (next_usage_insns) != INSN_LIST
5085	     && INSN_UID (next_usage_insns) < max_uid)
5086	    || (GET_CODE (next_usage_insns) == INSN_LIST
5087		&& (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
5088	&& need_for_split_p (potential_reload_hard_regs, regno + i)
5089	&& split_reg (before_p, regno + i, insn, next_usage_insns))
5090    res = true;
5091  return res;
5092}
5093
5094/* Check only registers living at the current program point in the
5095   current EBB.	 */
5096static bitmap_head live_regs;
5097
5098/* Update live info in EBB given by its HEAD and TAIL insns after
5099   inheritance/split transformation.  The function removes dead moves
5100   too.	 */
5101static void
5102update_ebb_live_info (rtx_insn *head, rtx_insn *tail)
5103{
5104  unsigned int j;
5105  int i, regno;
5106  bool live_p;
5107  rtx_insn *prev_insn;
5108  rtx set;
5109  bool remove_p;
5110  basic_block last_bb, prev_bb, curr_bb;
5111  bitmap_iterator bi;
5112  struct lra_insn_reg *reg;
5113  edge e;
5114  edge_iterator ei;
5115
5116  last_bb = BLOCK_FOR_INSN (tail);
5117  prev_bb = NULL;
5118  for (curr_insn = tail;
5119       curr_insn != PREV_INSN (head);
5120       curr_insn = prev_insn)
5121    {
5122      prev_insn = PREV_INSN (curr_insn);
5123      /* We need to process empty blocks too.  They contain
5124	 NOTE_INSN_BASIC_BLOCK referring for the basic block.  */
5125      if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
5126	continue;
5127      curr_bb = BLOCK_FOR_INSN (curr_insn);
5128      if (curr_bb != prev_bb)
5129	{
5130	  if (prev_bb != NULL)
5131	    {
5132	      /* Update df_get_live_in (prev_bb):  */
5133	      EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
5134		if (bitmap_bit_p (&live_regs, j))
5135		  bitmap_set_bit (df_get_live_in (prev_bb), j);
5136		else
5137		  bitmap_clear_bit (df_get_live_in (prev_bb), j);
5138	    }
5139	  if (curr_bb != last_bb)
5140	    {
5141	      /* Update df_get_live_out (curr_bb):  */
5142	      EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
5143		{
5144		  live_p = bitmap_bit_p (&live_regs, j);
5145		  if (! live_p)
5146		    FOR_EACH_EDGE (e, ei, curr_bb->succs)
5147		      if (bitmap_bit_p (df_get_live_in (e->dest), j))
5148			{
5149			  live_p = true;
5150			  break;
5151			}
5152		  if (live_p)
5153		    bitmap_set_bit (df_get_live_out (curr_bb), j);
5154		  else
5155		    bitmap_clear_bit (df_get_live_out (curr_bb), j);
5156		}
5157	    }
5158	  prev_bb = curr_bb;
5159	  bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
5160	}
5161      if (! NONDEBUG_INSN_P (curr_insn))
5162	continue;
5163      curr_id = lra_get_insn_recog_data (curr_insn);
5164      curr_static_id = curr_id->insn_static_data;
5165      remove_p = false;
5166      if ((set = single_set (curr_insn)) != NULL_RTX
5167	  && REG_P (SET_DEST (set))
5168	  && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
5169	  && SET_DEST (set) != pic_offset_table_rtx
5170	  && bitmap_bit_p (&check_only_regs, regno)
5171	  && ! bitmap_bit_p (&live_regs, regno))
5172	remove_p = true;
5173      /* See which defined values die here.  */
5174      for (reg = curr_id->regs; reg != NULL; reg = reg->next)
5175	if (reg->type == OP_OUT && ! reg->subreg_p)
5176	  bitmap_clear_bit (&live_regs, reg->regno);
5177      for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
5178	if (reg->type == OP_OUT && ! reg->subreg_p)
5179	  bitmap_clear_bit (&live_regs, reg->regno);
5180      if (curr_id->arg_hard_regs != NULL)
5181	/* Make clobbered argument hard registers die.  */
5182	for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
5183	  if (regno >= FIRST_PSEUDO_REGISTER)
5184	    bitmap_clear_bit (&live_regs, regno - FIRST_PSEUDO_REGISTER);
5185      /* Mark each used value as live.  */
5186      for (reg = curr_id->regs; reg != NULL; reg = reg->next)
5187	if (reg->type != OP_OUT
5188	    && bitmap_bit_p (&check_only_regs, reg->regno))
5189	  bitmap_set_bit (&live_regs, reg->regno);
5190      for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
5191	if (reg->type != OP_OUT
5192	    && bitmap_bit_p (&check_only_regs, reg->regno))
5193	  bitmap_set_bit (&live_regs, reg->regno);
5194      if (curr_id->arg_hard_regs != NULL)
5195	/* Make used argument hard registers live.  */
5196	for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
5197	  if (regno < FIRST_PSEUDO_REGISTER
5198	      && bitmap_bit_p (&check_only_regs, regno))
5199	    bitmap_set_bit (&live_regs, regno);
5200      /* It is quite important to remove dead move insns because it
5201	 means removing dead store.  We don't need to process them for
5202	 constraints.  */
5203      if (remove_p)
5204	{
5205	  if (lra_dump_file != NULL)
5206	    {
5207	      fprintf (lra_dump_file, "	    Removing dead insn:\n ");
5208	      dump_insn_slim (lra_dump_file, curr_insn);
5209	    }
5210	  lra_set_insn_deleted (curr_insn);
5211	}
5212    }
5213}
5214
5215/* The structure describes info to do an inheritance for the current
5216   insn.  We need to collect such info first before doing the
5217   transformations because the transformations change the insn
5218   internal representation.  */
5219struct to_inherit
5220{
5221  /* Original regno.  */
5222  int regno;
5223  /* Subsequent insns which can inherit original reg value.  */
5224  rtx insns;
5225};
5226
5227/* Array containing all info for doing inheritance from the current
5228   insn.  */
5229static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
5230
5231/* Number elements in the previous array.  */
5232static int to_inherit_num;
5233
5234/* Add inheritance info REGNO and INSNS. Their meaning is described in
5235   structure to_inherit.  */
5236static void
5237add_to_inherit (int regno, rtx insns)
5238{
5239  int i;
5240
5241  for (i = 0; i < to_inherit_num; i++)
5242    if (to_inherit[i].regno == regno)
5243      return;
5244  lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
5245  to_inherit[to_inherit_num].regno = regno;
5246  to_inherit[to_inherit_num++].insns = insns;
5247}
5248
5249/* Return the last non-debug insn in basic block BB, or the block begin
5250   note if none.  */
5251static rtx_insn *
5252get_last_insertion_point (basic_block bb)
5253{
5254  rtx_insn *insn;
5255
5256  FOR_BB_INSNS_REVERSE (bb, insn)
5257    if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
5258      return insn;
5259  gcc_unreachable ();
5260}
5261
5262/* Set up RES by registers living on edges FROM except the edge (FROM,
5263   TO) or by registers set up in a jump insn in BB FROM.  */
5264static void
5265get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
5266{
5267  rtx_insn *last;
5268  struct lra_insn_reg *reg;
5269  edge e;
5270  edge_iterator ei;
5271
5272  lra_assert (to != NULL);
5273  bitmap_clear (res);
5274  FOR_EACH_EDGE (e, ei, from->succs)
5275    if (e->dest != to)
5276      bitmap_ior_into (res, df_get_live_in (e->dest));
5277  last = get_last_insertion_point (from);
5278  if (! JUMP_P (last))
5279    return;
5280  curr_id = lra_get_insn_recog_data (last);
5281  for (reg = curr_id->regs; reg != NULL; reg = reg->next)
5282    if (reg->type != OP_IN)
5283      bitmap_set_bit (res, reg->regno);
5284}
5285
5286/* Used as a temporary results of some bitmap calculations.  */
5287static bitmap_head temp_bitmap;
5288
5289/* We split for reloads of small class of hard regs.  The following
5290   defines how many hard regs the class should have to be qualified as
5291   small.  The code is mostly oriented to x86/x86-64 architecture
5292   where some insns need to use only specific register or pair of
5293   registers and these register can live in RTL explicitly, e.g. for
5294   parameter passing.  */
5295static const int max_small_class_regs_num = 2;
5296
5297/* Do inheritance/split transformations in EBB starting with HEAD and
5298   finishing on TAIL.  We process EBB insns in the reverse order.
5299   Return true if we did any inheritance/split transformation in the
5300   EBB.
5301
5302   We should avoid excessive splitting which results in worse code
5303   because of inaccurate cost calculations for spilling new split
5304   pseudos in such case.  To achieve this we do splitting only if
5305   register pressure is high in given basic block and there are reload
5306   pseudos requiring hard registers.  We could do more register
5307   pressure calculations at any given program point to avoid necessary
5308   splitting even more but it is to expensive and the current approach
5309   works well enough.  */
5310static bool
5311inherit_in_ebb (rtx_insn *head, rtx_insn *tail)
5312{
5313  int i, src_regno, dst_regno, nregs;
5314  bool change_p, succ_p, update_reloads_num_p;
5315  rtx_insn *prev_insn, *last_insn;
5316  rtx next_usage_insns, set;
5317  enum reg_class cl;
5318  struct lra_insn_reg *reg;
5319  basic_block last_processed_bb, curr_bb = NULL;
5320  HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
5321  bitmap to_process;
5322  unsigned int j;
5323  bitmap_iterator bi;
5324  bool head_p, after_p;
5325
5326  change_p = false;
5327  curr_usage_insns_check++;
5328  reloads_num = calls_num = 0;
5329  bitmap_clear (&check_only_regs);
5330  last_processed_bb = NULL;
5331  CLEAR_HARD_REG_SET (potential_reload_hard_regs);
5332  COPY_HARD_REG_SET (live_hard_regs, eliminable_regset);
5333  IOR_HARD_REG_SET (live_hard_regs, lra_no_alloc_regs);
5334  /* We don't process new insns generated in the loop.	*/
5335  for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
5336    {
5337      prev_insn = PREV_INSN (curr_insn);
5338      if (BLOCK_FOR_INSN (curr_insn) != NULL)
5339	curr_bb = BLOCK_FOR_INSN (curr_insn);
5340      if (last_processed_bb != curr_bb)
5341	{
5342	  /* We are at the end of BB.  Add qualified living
5343	     pseudos for potential splitting.  */
5344	  to_process = df_get_live_out (curr_bb);
5345	  if (last_processed_bb != NULL)
5346	    {
5347	      /* We are somewhere in the middle of EBB.	 */
5348	      get_live_on_other_edges (curr_bb, last_processed_bb,
5349				       &temp_bitmap);
5350	      to_process = &temp_bitmap;
5351	    }
5352	  last_processed_bb = curr_bb;
5353	  last_insn = get_last_insertion_point (curr_bb);
5354	  after_p = (! JUMP_P (last_insn)
5355		     && (! CALL_P (last_insn)
5356			 || (find_reg_note (last_insn,
5357					   REG_NORETURN, NULL_RTX) == NULL_RTX
5358			     && ! SIBLING_CALL_P (last_insn))));
5359	  CLEAR_HARD_REG_SET (potential_reload_hard_regs);
5360	  EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
5361	    {
5362	      if ((int) j >= lra_constraint_new_regno_start)
5363		break;
5364	      if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
5365		{
5366		  if (j < FIRST_PSEUDO_REGISTER)
5367		    SET_HARD_REG_BIT (live_hard_regs, j);
5368		  else
5369		    add_to_hard_reg_set (&live_hard_regs,
5370					 PSEUDO_REGNO_MODE (j),
5371					 reg_renumber[j]);
5372		  setup_next_usage_insn (j, last_insn, reloads_num, after_p);
5373		}
5374	    }
5375	}
5376      src_regno = dst_regno = -1;
5377      if (NONDEBUG_INSN_P (curr_insn)
5378	  && (set = single_set (curr_insn)) != NULL_RTX
5379	  && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
5380	{
5381	  src_regno = REGNO (SET_SRC (set));
5382	  dst_regno = REGNO (SET_DEST (set));
5383	}
5384      update_reloads_num_p = true;
5385      if (src_regno < lra_constraint_new_regno_start
5386	  && src_regno >= FIRST_PSEUDO_REGISTER
5387	  && reg_renumber[src_regno] < 0
5388	  && dst_regno >= lra_constraint_new_regno_start
5389	  && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
5390	{
5391	  /* 'reload_pseudo <- original_pseudo'.  */
5392	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
5393	    reloads_num++;
5394	  update_reloads_num_p = false;
5395	  succ_p = false;
5396	  if (usage_insns[src_regno].check == curr_usage_insns_check
5397	      && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
5398	    succ_p = inherit_reload_reg (false, src_regno, cl,
5399					 curr_insn, next_usage_insns);
5400	  if (succ_p)
5401	    change_p = true;
5402	  else
5403	    setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
5404	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
5405	    IOR_HARD_REG_SET (potential_reload_hard_regs,
5406			      reg_class_contents[cl]);
5407	}
5408      else if (src_regno >= lra_constraint_new_regno_start
5409	       && dst_regno < lra_constraint_new_regno_start
5410	       && dst_regno >= FIRST_PSEUDO_REGISTER
5411	       && reg_renumber[dst_regno] < 0
5412	       && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
5413	       && usage_insns[dst_regno].check == curr_usage_insns_check
5414	       && (next_usage_insns
5415		   = usage_insns[dst_regno].insns) != NULL_RTX)
5416	{
5417	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
5418	    reloads_num++;
5419	  update_reloads_num_p = false;
5420	  /* 'original_pseudo <- reload_pseudo'.  */
5421	  if (! JUMP_P (curr_insn)
5422	      && inherit_reload_reg (true, dst_regno, cl,
5423				     curr_insn, next_usage_insns))
5424	    change_p = true;
5425	  /* Invalidate.  */
5426	  usage_insns[dst_regno].check = 0;
5427	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
5428	    IOR_HARD_REG_SET (potential_reload_hard_regs,
5429			      reg_class_contents[cl]);
5430	}
5431      else if (INSN_P (curr_insn))
5432	{
5433	  int iter;
5434	  int max_uid = get_max_uid ();
5435
5436	  curr_id = lra_get_insn_recog_data (curr_insn);
5437	  curr_static_id = curr_id->insn_static_data;
5438	  to_inherit_num = 0;
5439	  /* Process insn definitions.	*/
5440	  for (iter = 0; iter < 2; iter++)
5441	    for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
5442		 reg != NULL;
5443		 reg = reg->next)
5444	      if (reg->type != OP_IN
5445		  && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
5446		{
5447		  if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
5448		      && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
5449		      && usage_insns[dst_regno].check == curr_usage_insns_check
5450		      && (next_usage_insns
5451			  = usage_insns[dst_regno].insns) != NULL_RTX)
5452		    {
5453		      struct lra_insn_reg *r;
5454
5455		      for (r = curr_id->regs; r != NULL; r = r->next)
5456			if (r->type != OP_OUT && r->regno == dst_regno)
5457			  break;
5458		      /* Don't do inheritance if the pseudo is also
5459			 used in the insn.  */
5460		      if (r == NULL)
5461			/* We can not do inheritance right now
5462			   because the current insn reg info (chain
5463			   regs) can change after that.  */
5464			add_to_inherit (dst_regno, next_usage_insns);
5465		    }
5466		  /* We can not process one reg twice here because of
5467		     usage_insns invalidation.  */
5468		  if ((dst_regno < FIRST_PSEUDO_REGISTER
5469		       || reg_renumber[dst_regno] >= 0)
5470		      && ! reg->subreg_p && reg->type != OP_IN)
5471		    {
5472		      HARD_REG_SET s;
5473
5474		      if (split_if_necessary (dst_regno, reg->biggest_mode,
5475					      potential_reload_hard_regs,
5476					      false, curr_insn, max_uid))
5477			change_p = true;
5478		      CLEAR_HARD_REG_SET (s);
5479		      if (dst_regno < FIRST_PSEUDO_REGISTER)
5480			add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
5481		      else
5482			add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
5483					     reg_renumber[dst_regno]);
5484		      AND_COMPL_HARD_REG_SET (live_hard_regs, s);
5485		    }
5486		  /* We should invalidate potential inheritance or
5487		     splitting for the current insn usages to the next
5488		     usage insns (see code below) as the output pseudo
5489		     prevents this.  */
5490		  if ((dst_regno >= FIRST_PSEUDO_REGISTER
5491		       && reg_renumber[dst_regno] < 0)
5492		      || (reg->type == OP_OUT && ! reg->subreg_p
5493			  && (dst_regno < FIRST_PSEUDO_REGISTER
5494			      || reg_renumber[dst_regno] >= 0)))
5495		    {
5496		      /* Invalidate and mark definitions.  */
5497		      if (dst_regno >= FIRST_PSEUDO_REGISTER)
5498			usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
5499		      else
5500			{
5501			  nregs = hard_regno_nregs[dst_regno][reg->biggest_mode];
5502			  for (i = 0; i < nregs; i++)
5503			    usage_insns[dst_regno + i].check
5504			      = -(int) INSN_UID (curr_insn);
5505			}
5506		    }
5507		}
5508	  /* Process clobbered call regs.  */
5509	  if (curr_id->arg_hard_regs != NULL)
5510	    for (i = 0; (dst_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
5511	      if (dst_regno >= FIRST_PSEUDO_REGISTER)
5512		usage_insns[dst_regno - FIRST_PSEUDO_REGISTER].check
5513		  = -(int) INSN_UID (curr_insn);
5514	  if (! JUMP_P (curr_insn))
5515	    for (i = 0; i < to_inherit_num; i++)
5516	      if (inherit_reload_reg (true, to_inherit[i].regno,
5517				      ALL_REGS, curr_insn,
5518				      to_inherit[i].insns))
5519	      change_p = true;
5520	  if (CALL_P (curr_insn))
5521	    {
5522	      rtx cheap, pat, dest;
5523	      rtx_insn *restore;
5524	      int regno, hard_regno;
5525
5526	      calls_num++;
5527	      if ((cheap = find_reg_note (curr_insn,
5528					  REG_RETURNED, NULL_RTX)) != NULL_RTX
5529		  && ((cheap = XEXP (cheap, 0)), true)
5530		  && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
5531		  && (hard_regno = reg_renumber[regno]) >= 0
5532		  /* If there are pending saves/restores, the
5533		     optimization is not worth.	 */
5534		  && usage_insns[regno].calls_num == calls_num - 1
5535		  && TEST_HARD_REG_BIT (call_used_reg_set, hard_regno))
5536		{
5537		  /* Restore the pseudo from the call result as
5538		     REG_RETURNED note says that the pseudo value is
5539		     in the call result and the pseudo is an argument
5540		     of the call.  */
5541		  pat = PATTERN (curr_insn);
5542		  if (GET_CODE (pat) == PARALLEL)
5543		    pat = XVECEXP (pat, 0, 0);
5544		  dest = SET_DEST (pat);
5545		  /* For multiple return values dest is PARALLEL.
5546		     Currently we handle only single return value case.  */
5547		  if (REG_P (dest))
5548		    {
5549		      start_sequence ();
5550		      emit_move_insn (cheap, copy_rtx (dest));
5551		      restore = get_insns ();
5552		      end_sequence ();
5553		      lra_process_new_insns (curr_insn, NULL, restore,
5554					     "Inserting call parameter restore");
5555		      /* We don't need to save/restore of the pseudo from
5556			 this call.	 */
5557		      usage_insns[regno].calls_num = calls_num;
5558		      bitmap_set_bit (&check_only_regs, regno);
5559		    }
5560		}
5561	    }
5562	  to_inherit_num = 0;
5563	  /* Process insn usages.  */
5564	  for (iter = 0; iter < 2; iter++)
5565	    for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
5566		 reg != NULL;
5567		 reg = reg->next)
5568	      if ((reg->type != OP_OUT
5569		   || (reg->type == OP_OUT && reg->subreg_p))
5570		  && (src_regno = reg->regno) < lra_constraint_new_regno_start)
5571		{
5572		  if (src_regno >= FIRST_PSEUDO_REGISTER
5573		      && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
5574		    {
5575		      if (usage_insns[src_regno].check == curr_usage_insns_check
5576			  && (next_usage_insns
5577			      = usage_insns[src_regno].insns) != NULL_RTX
5578			  && NONDEBUG_INSN_P (curr_insn))
5579			add_to_inherit (src_regno, next_usage_insns);
5580		      else if (usage_insns[src_regno].check
5581			       != -(int) INSN_UID (curr_insn))
5582			/* Add usages but only if the reg is not set up
5583			   in the same insn.  */
5584			add_next_usage_insn (src_regno, curr_insn, reloads_num);
5585		    }
5586		  else if (src_regno < FIRST_PSEUDO_REGISTER
5587			   || reg_renumber[src_regno] >= 0)
5588		    {
5589		      bool before_p;
5590		      rtx use_insn = curr_insn;
5591
5592		      before_p = (JUMP_P (curr_insn)
5593				  || (CALL_P (curr_insn) && reg->type == OP_IN));
5594		      if (NONDEBUG_INSN_P (curr_insn)
5595			  && (! JUMP_P (curr_insn) || reg->type == OP_IN)
5596			  && split_if_necessary (src_regno, reg->biggest_mode,
5597						 potential_reload_hard_regs,
5598						 before_p, curr_insn, max_uid))
5599			{
5600			  if (reg->subreg_p)
5601			    lra_risky_transformations_p = true;
5602			  change_p = true;
5603			  /* Invalidate. */
5604			  usage_insns[src_regno].check = 0;
5605			  if (before_p)
5606			    use_insn = PREV_INSN (curr_insn);
5607			}
5608		      if (NONDEBUG_INSN_P (curr_insn))
5609			{
5610			  if (src_regno < FIRST_PSEUDO_REGISTER)
5611			    add_to_hard_reg_set (&live_hard_regs,
5612						 reg->biggest_mode, src_regno);
5613			  else
5614			    add_to_hard_reg_set (&live_hard_regs,
5615						 PSEUDO_REGNO_MODE (src_regno),
5616						 reg_renumber[src_regno]);
5617			}
5618		      add_next_usage_insn (src_regno, use_insn, reloads_num);
5619		    }
5620		}
5621	  /* Process used call regs.  */
5622	  if (curr_id->arg_hard_regs != NULL)
5623	    for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
5624	      if (src_regno < FIRST_PSEUDO_REGISTER)
5625		{
5626	           SET_HARD_REG_BIT (live_hard_regs, src_regno);
5627	           add_next_usage_insn (src_regno, curr_insn, reloads_num);
5628		}
5629	  for (i = 0; i < to_inherit_num; i++)
5630	    {
5631	      src_regno = to_inherit[i].regno;
5632	      if (inherit_reload_reg (false, src_regno, ALL_REGS,
5633				      curr_insn, to_inherit[i].insns))
5634		change_p = true;
5635	      else
5636		setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
5637	    }
5638	}
5639      if (update_reloads_num_p
5640	  && NONDEBUG_INSN_P (curr_insn)
5641          && (set = single_set (curr_insn)) != NULL_RTX)
5642	{
5643	  int regno = -1;
5644	  if ((REG_P (SET_DEST (set))
5645	       && (regno = REGNO (SET_DEST (set))) >= lra_constraint_new_regno_start
5646	       && reg_renumber[regno] < 0
5647	       && (cl = lra_get_allocno_class (regno)) != NO_REGS)
5648	      || (REG_P (SET_SRC (set))
5649	          && (regno = REGNO (SET_SRC (set))) >= lra_constraint_new_regno_start
5650	          && reg_renumber[regno] < 0
5651	          && (cl = lra_get_allocno_class (regno)) != NO_REGS))
5652	    {
5653	      if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
5654		reloads_num++;
5655	      if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
5656		IOR_HARD_REG_SET (potential_reload_hard_regs,
5657	                          reg_class_contents[cl]);
5658	    }
5659	}
5660      /* We reached the start of the current basic block.  */
5661      if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
5662	  || BLOCK_FOR_INSN (prev_insn) != curr_bb)
5663	{
5664	  /* We reached the beginning of the current block -- do
5665	     rest of spliting in the current BB.  */
5666	  to_process = df_get_live_in (curr_bb);
5667	  if (BLOCK_FOR_INSN (head) != curr_bb)
5668	    {
5669	      /* We are somewhere in the middle of EBB.	 */
5670	      get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
5671				       curr_bb, &temp_bitmap);
5672	      to_process = &temp_bitmap;
5673	    }
5674	  head_p = true;
5675	  EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
5676	    {
5677	      if ((int) j >= lra_constraint_new_regno_start)
5678		break;
5679	      if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
5680		  && usage_insns[j].check == curr_usage_insns_check
5681		  && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
5682		{
5683		  if (need_for_split_p (potential_reload_hard_regs, j))
5684		    {
5685		      if (lra_dump_file != NULL && head_p)
5686			{
5687			  fprintf (lra_dump_file,
5688				   "  ----------------------------------\n");
5689			  head_p = false;
5690			}
5691		      if (split_reg (false, j, bb_note (curr_bb),
5692				     next_usage_insns))
5693			change_p = true;
5694		    }
5695		  usage_insns[j].check = 0;
5696		}
5697	    }
5698	}
5699    }
5700  return change_p;
5701}
5702
5703/* This value affects EBB forming.  If probability of edge from EBB to
5704   a BB is not greater than the following value, we don't add the BB
5705   to EBB.  */
5706#define EBB_PROBABILITY_CUTOFF \
5707  ((REG_BR_PROB_BASE * LRA_INHERITANCE_EBB_PROBABILITY_CUTOFF) / 100)
5708
5709/* Current number of inheritance/split iteration.  */
5710int lra_inheritance_iter;
5711
5712/* Entry function for inheritance/split pass.  */
5713void
5714lra_inheritance (void)
5715{
5716  int i;
5717  basic_block bb, start_bb;
5718  edge e;
5719
5720  lra_inheritance_iter++;
5721  if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
5722    return;
5723  timevar_push (TV_LRA_INHERITANCE);
5724  if (lra_dump_file != NULL)
5725    fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
5726	     lra_inheritance_iter);
5727  curr_usage_insns_check = 0;
5728  usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
5729  for (i = 0; i < lra_constraint_new_regno_start; i++)
5730    usage_insns[i].check = 0;
5731  bitmap_initialize (&check_only_regs, &reg_obstack);
5732  bitmap_initialize (&live_regs, &reg_obstack);
5733  bitmap_initialize (&temp_bitmap, &reg_obstack);
5734  bitmap_initialize (&ebb_global_regs, &reg_obstack);
5735  FOR_EACH_BB_FN (bb, cfun)
5736    {
5737      start_bb = bb;
5738      if (lra_dump_file != NULL)
5739	fprintf (lra_dump_file, "EBB");
5740      /* Form a EBB starting with BB.  */
5741      bitmap_clear (&ebb_global_regs);
5742      bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
5743      for (;;)
5744	{
5745	  if (lra_dump_file != NULL)
5746	    fprintf (lra_dump_file, " %d", bb->index);
5747	  if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5748	      || LABEL_P (BB_HEAD (bb->next_bb)))
5749	    break;
5750	  e = find_fallthru_edge (bb->succs);
5751	  if (! e)
5752	    break;
5753	  if (e->probability < EBB_PROBABILITY_CUTOFF)
5754	    break;
5755	  bb = bb->next_bb;
5756	}
5757      bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
5758      if (lra_dump_file != NULL)
5759	fprintf (lra_dump_file, "\n");
5760      if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
5761	/* Remember that the EBB head and tail can change in
5762	   inherit_in_ebb.  */
5763	update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
5764    }
5765  bitmap_clear (&ebb_global_regs);
5766  bitmap_clear (&temp_bitmap);
5767  bitmap_clear (&live_regs);
5768  bitmap_clear (&check_only_regs);
5769  free (usage_insns);
5770
5771  timevar_pop (TV_LRA_INHERITANCE);
5772}
5773
5774
5775
5776/* This page contains code to undo failed inheritance/split
5777   transformations.  */
5778
5779/* Current number of iteration undoing inheritance/split.  */
5780int lra_undo_inheritance_iter;
5781
5782/* Fix BB live info LIVE after removing pseudos created on pass doing
5783   inheritance/split which are REMOVED_PSEUDOS.	 */
5784static void
5785fix_bb_live_info (bitmap live, bitmap removed_pseudos)
5786{
5787  unsigned int regno;
5788  bitmap_iterator bi;
5789
5790  EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
5791    if (bitmap_clear_bit (live, regno))
5792      bitmap_set_bit (live, lra_reg_info[regno].restore_regno);
5793}
5794
5795/* Return regno of the (subreg of) REG. Otherwise, return a negative
5796   number.  */
5797static int
5798get_regno (rtx reg)
5799{
5800  if (GET_CODE (reg) == SUBREG)
5801    reg = SUBREG_REG (reg);
5802  if (REG_P (reg))
5803    return REGNO (reg);
5804  return -1;
5805}
5806
5807/* Delete a move INSN with destination reg DREGNO and a previous
5808   clobber insn with the same regno.  The inheritance/split code can
5809   generate moves with preceding clobber and when we delete such moves
5810   we should delete the clobber insn too to keep the correct life
5811   info.  */
5812static void
5813delete_move_and_clobber (rtx_insn *insn, int dregno)
5814{
5815  rtx_insn *prev_insn = PREV_INSN (insn);
5816
5817  lra_set_insn_deleted (insn);
5818  lra_assert (dregno > 0);
5819  if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
5820      && GET_CODE (PATTERN (prev_insn)) == CLOBBER
5821      && dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
5822    lra_set_insn_deleted (prev_insn);
5823}
5824
5825/* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
5826   return true if we did any change.  The undo transformations for
5827   inheritance looks like
5828      i <- i2
5829      p <- i	  =>   p <- i2
5830   or removing
5831      p <- i, i <- p, and i <- i3
5832   where p is original pseudo from which inheritance pseudo i was
5833   created, i and i3 are removed inheritance pseudos, i2 is another
5834   not removed inheritance pseudo.  All split pseudos or other
5835   occurrences of removed inheritance pseudos are changed on the
5836   corresponding original pseudos.
5837
5838   The function also schedules insns changed and created during
5839   inheritance/split pass for processing by the subsequent constraint
5840   pass.  */
5841static bool
5842remove_inheritance_pseudos (bitmap remove_pseudos)
5843{
5844  basic_block bb;
5845  int regno, sregno, prev_sregno, dregno, restore_regno;
5846  rtx set, prev_set;
5847  rtx_insn *prev_insn;
5848  bool change_p, done_p;
5849
5850  change_p = ! bitmap_empty_p (remove_pseudos);
5851  /* We can not finish the function right away if CHANGE_P is true
5852     because we need to marks insns affected by previous
5853     inheritance/split pass for processing by the subsequent
5854     constraint pass.  */
5855  FOR_EACH_BB_FN (bb, cfun)
5856    {
5857      fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
5858      fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
5859      FOR_BB_INSNS_REVERSE (bb, curr_insn)
5860	{
5861	  if (! INSN_P (curr_insn))
5862	    continue;
5863	  done_p = false;
5864	  sregno = dregno = -1;
5865	  if (change_p && NONDEBUG_INSN_P (curr_insn)
5866	      && (set = single_set (curr_insn)) != NULL_RTX)
5867	    {
5868	      dregno = get_regno (SET_DEST (set));
5869	      sregno = get_regno (SET_SRC (set));
5870	    }
5871
5872	  if (sregno >= 0 && dregno >= 0)
5873	    {
5874	      if ((bitmap_bit_p (remove_pseudos, sregno)
5875		   && (lra_reg_info[sregno].restore_regno == dregno
5876		       || (bitmap_bit_p (remove_pseudos, dregno)
5877			   && (lra_reg_info[sregno].restore_regno
5878			       == lra_reg_info[dregno].restore_regno))))
5879		  || (bitmap_bit_p (remove_pseudos, dregno)
5880		      && lra_reg_info[dregno].restore_regno == sregno))
5881		/* One of the following cases:
5882		     original <- removed inheritance pseudo
5883		     removed inherit pseudo <- another removed inherit pseudo
5884		     removed inherit pseudo <- original pseudo
5885		   Or
5886		     removed_split_pseudo <- original_reg
5887		     original_reg <- removed_split_pseudo */
5888		{
5889		  if (lra_dump_file != NULL)
5890		    {
5891		      fprintf (lra_dump_file, "	   Removing %s:\n",
5892			       bitmap_bit_p (&lra_split_regs, sregno)
5893			       || bitmap_bit_p (&lra_split_regs, dregno)
5894			       ? "split" : "inheritance");
5895		      dump_insn_slim (lra_dump_file, curr_insn);
5896		    }
5897		  delete_move_and_clobber (curr_insn, dregno);
5898		  done_p = true;
5899		}
5900	      else if (bitmap_bit_p (remove_pseudos, sregno)
5901		       && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
5902		{
5903		  /* Search the following pattern:
5904		       inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
5905		       original_pseudo <- inherit_or_split_pseudo1
5906		    where the 2nd insn is the current insn and
5907		    inherit_or_split_pseudo2 is not removed.  If it is found,
5908		    change the current insn onto:
5909		       original_pseudo <- inherit_or_split_pseudo2.  */
5910		  for (prev_insn = PREV_INSN (curr_insn);
5911		       prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
5912		       prev_insn = PREV_INSN (prev_insn))
5913		    ;
5914		  if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
5915		      && (prev_set = single_set (prev_insn)) != NULL_RTX
5916		      /* There should be no subregs in insn we are
5917			 searching because only the original reg might
5918			 be in subreg when we changed the mode of
5919			 load/store for splitting.  */
5920		      && REG_P (SET_DEST (prev_set))
5921		      && REG_P (SET_SRC (prev_set))
5922		      && (int) REGNO (SET_DEST (prev_set)) == sregno
5923		      && ((prev_sregno = REGNO (SET_SRC (prev_set)))
5924			  >= FIRST_PSEUDO_REGISTER)
5925		      /* As we consider chain of inheritance or
5926			 splitting described in above comment we should
5927			 check that sregno and prev_sregno were
5928			 inheritance/split pseudos created from the
5929			 same original regno.  */
5930		      && (lra_reg_info[sregno].restore_regno
5931			  == lra_reg_info[prev_sregno].restore_regno)
5932		      && ! bitmap_bit_p (remove_pseudos, prev_sregno))
5933		    {
5934		      lra_assert (GET_MODE (SET_SRC (prev_set))
5935				  == GET_MODE (regno_reg_rtx[sregno]));
5936		      if (GET_CODE (SET_SRC (set)) == SUBREG)
5937			SUBREG_REG (SET_SRC (set)) = SET_SRC (prev_set);
5938		      else
5939			SET_SRC (set) = SET_SRC (prev_set);
5940		      /* As we are finishing with processing the insn
5941			 here, check the destination too as it might
5942			 inheritance pseudo for another pseudo.  */
5943		      if (bitmap_bit_p (remove_pseudos, dregno)
5944			  && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
5945			  && (restore_regno
5946			      = lra_reg_info[dregno].restore_regno) >= 0)
5947			{
5948			  if (GET_CODE (SET_DEST (set)) == SUBREG)
5949			    SUBREG_REG (SET_DEST (set))
5950			      = regno_reg_rtx[restore_regno];
5951			  else
5952			    SET_DEST (set) = regno_reg_rtx[restore_regno];
5953			}
5954		      lra_push_insn_and_update_insn_regno_info (curr_insn);
5955		      lra_set_used_insn_alternative_by_uid
5956			(INSN_UID (curr_insn), -1);
5957		      done_p = true;
5958		      if (lra_dump_file != NULL)
5959			{
5960			  fprintf (lra_dump_file, "    Change reload insn:\n");
5961			  dump_insn_slim (lra_dump_file, curr_insn);
5962			}
5963		    }
5964		}
5965	    }
5966	  if (! done_p)
5967	    {
5968	      struct lra_insn_reg *reg;
5969	      bool restored_regs_p = false;
5970	      bool kept_regs_p = false;
5971
5972	      curr_id = lra_get_insn_recog_data (curr_insn);
5973	      for (reg = curr_id->regs; reg != NULL; reg = reg->next)
5974		{
5975		  regno = reg->regno;
5976		  restore_regno = lra_reg_info[regno].restore_regno;
5977		  if (restore_regno >= 0)
5978		    {
5979		      if (change_p && bitmap_bit_p (remove_pseudos, regno))
5980			{
5981			  lra_substitute_pseudo_within_insn
5982			    (curr_insn, regno, regno_reg_rtx[restore_regno],
5983			     false);
5984			  restored_regs_p = true;
5985			}
5986		      else
5987			kept_regs_p = true;
5988		    }
5989		}
5990	      if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
5991		{
5992		  /* The instruction has changed since the previous
5993		     constraints pass.  */
5994		  lra_push_insn_and_update_insn_regno_info (curr_insn);
5995		  lra_set_used_insn_alternative_by_uid
5996		    (INSN_UID (curr_insn), -1);
5997		}
5998	      else if (restored_regs_p)
5999		/* The instruction has been restored to the form that
6000		   it had during the previous constraints pass.  */
6001		lra_update_insn_regno_info (curr_insn);
6002	      if (restored_regs_p && lra_dump_file != NULL)
6003		{
6004		  fprintf (lra_dump_file, "   Insn after restoring regs:\n");
6005		  dump_insn_slim (lra_dump_file, curr_insn);
6006		}
6007	    }
6008	}
6009    }
6010  return change_p;
6011}
6012
6013/* If optional reload pseudos failed to get a hard register or was not
6014   inherited, it is better to remove optional reloads.  We do this
6015   transformation after undoing inheritance to figure out necessity to
6016   remove optional reloads easier.  Return true if we do any
6017   change.  */
6018static bool
6019undo_optional_reloads (void)
6020{
6021  bool change_p, keep_p;
6022  unsigned int regno, uid;
6023  bitmap_iterator bi, bi2;
6024  rtx_insn *insn;
6025  rtx set, src, dest;
6026  bitmap_head removed_optional_reload_pseudos, insn_bitmap;
6027
6028  bitmap_initialize (&removed_optional_reload_pseudos, &reg_obstack);
6029  bitmap_copy (&removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
6030  EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
6031    {
6032      keep_p = false;
6033      /* Keep optional reloads from previous subpasses.  */
6034      if (lra_reg_info[regno].restore_regno < 0
6035	  /* If the original pseudo changed its allocation, just
6036	     removing the optional pseudo is dangerous as the original
6037	     pseudo will have longer live range.  */
6038	  || reg_renumber[lra_reg_info[regno].restore_regno] >= 0)
6039	keep_p = true;
6040      else if (reg_renumber[regno] >= 0)
6041	EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
6042	  {
6043	    insn = lra_insn_recog_data[uid]->insn;
6044	    if ((set = single_set (insn)) == NULL_RTX)
6045	      continue;
6046	    src = SET_SRC (set);
6047	    dest = SET_DEST (set);
6048	    if (! REG_P (src) || ! REG_P (dest))
6049	      continue;
6050	    if (REGNO (dest) == regno
6051		/* Ignore insn for optional reloads itself.  */
6052		&& lra_reg_info[regno].restore_regno != (int) REGNO (src)
6053		/* Check only inheritance on last inheritance pass.  */
6054		&& (int) REGNO (src) >= new_regno_start
6055		/* Check that the optional reload was inherited.  */
6056		&& bitmap_bit_p (&lra_inheritance_pseudos, REGNO (src)))
6057	      {
6058		keep_p = true;
6059		break;
6060	      }
6061	  }
6062      if (keep_p)
6063	{
6064	  bitmap_clear_bit (&removed_optional_reload_pseudos, regno);
6065	  if (lra_dump_file != NULL)
6066	    fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
6067	}
6068    }
6069  change_p = ! bitmap_empty_p (&removed_optional_reload_pseudos);
6070  bitmap_initialize (&insn_bitmap, &reg_obstack);
6071  EXECUTE_IF_SET_IN_BITMAP (&removed_optional_reload_pseudos, 0, regno, bi)
6072    {
6073      if (lra_dump_file != NULL)
6074	fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
6075      bitmap_copy (&insn_bitmap, &lra_reg_info[regno].insn_bitmap);
6076      EXECUTE_IF_SET_IN_BITMAP (&insn_bitmap, 0, uid, bi2)
6077	{
6078	  insn = lra_insn_recog_data[uid]->insn;
6079	  if ((set = single_set (insn)) != NULL_RTX)
6080	    {
6081	      src = SET_SRC (set);
6082	      dest = SET_DEST (set);
6083	      if (REG_P (src) && REG_P (dest)
6084		  && ((REGNO (src) == regno
6085		       && (lra_reg_info[regno].restore_regno
6086			   == (int) REGNO (dest)))
6087		      || (REGNO (dest) == regno
6088			  && (lra_reg_info[regno].restore_regno
6089			      == (int) REGNO (src)))))
6090		{
6091		  if (lra_dump_file != NULL)
6092		    {
6093		      fprintf (lra_dump_file, "  Deleting move %u\n",
6094			       INSN_UID (insn));
6095		      dump_insn_slim (lra_dump_file, insn);
6096		    }
6097		  delete_move_and_clobber (insn, REGNO (dest));
6098		  continue;
6099		}
6100	      /* We should not worry about generation memory-memory
6101		 moves here as if the corresponding inheritance did
6102		 not work (inheritance pseudo did not get a hard reg),
6103		 we remove the inheritance pseudo and the optional
6104		 reload.  */
6105	    }
6106	  lra_substitute_pseudo_within_insn
6107	    (insn, regno, regno_reg_rtx[lra_reg_info[regno].restore_regno],
6108	     false);
6109	  lra_update_insn_regno_info (insn);
6110	  if (lra_dump_file != NULL)
6111	    {
6112	      fprintf (lra_dump_file,
6113		       "  Restoring original insn:\n");
6114	      dump_insn_slim (lra_dump_file, insn);
6115	    }
6116	}
6117    }
6118  /* Clear restore_regnos.  */
6119  EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
6120    lra_reg_info[regno].restore_regno = -1;
6121  bitmap_clear (&insn_bitmap);
6122  bitmap_clear (&removed_optional_reload_pseudos);
6123  return change_p;
6124}
6125
6126/* Entry function for undoing inheritance/split transformation.	 Return true
6127   if we did any RTL change in this pass.  */
6128bool
6129lra_undo_inheritance (void)
6130{
6131  unsigned int regno;
6132  int restore_regno, hard_regno;
6133  int n_all_inherit, n_inherit, n_all_split, n_split;
6134  bitmap_head remove_pseudos;
6135  bitmap_iterator bi;
6136  bool change_p;
6137
6138  lra_undo_inheritance_iter++;
6139  if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
6140    return false;
6141  if (lra_dump_file != NULL)
6142    fprintf (lra_dump_file,
6143	     "\n********** Undoing inheritance #%d: **********\n\n",
6144	     lra_undo_inheritance_iter);
6145  bitmap_initialize (&remove_pseudos, &reg_obstack);
6146  n_inherit = n_all_inherit = 0;
6147  EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
6148    if (lra_reg_info[regno].restore_regno >= 0)
6149      {
6150	n_all_inherit++;
6151	if (reg_renumber[regno] < 0
6152	    /* If the original pseudo changed its allocation, just
6153	       removing inheritance is dangerous as for changing
6154	       allocation we used shorter live-ranges.  */
6155	    && reg_renumber[lra_reg_info[regno].restore_regno] < 0)
6156	  bitmap_set_bit (&remove_pseudos, regno);
6157	else
6158	  n_inherit++;
6159      }
6160  if (lra_dump_file != NULL && n_all_inherit != 0)
6161    fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
6162	     n_inherit, n_all_inherit,
6163	     (double) n_inherit / n_all_inherit * 100);
6164  n_split = n_all_split = 0;
6165  EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
6166    if ((restore_regno = lra_reg_info[regno].restore_regno) >= 0)
6167      {
6168	n_all_split++;
6169	hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
6170		      ? reg_renumber[restore_regno] : restore_regno);
6171	if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
6172	  bitmap_set_bit (&remove_pseudos, regno);
6173	else
6174	  {
6175	    n_split++;
6176	    if (lra_dump_file != NULL)
6177	      fprintf (lra_dump_file, "	     Keep split r%d (orig=r%d)\n",
6178		       regno, restore_regno);
6179	  }
6180      }
6181  if (lra_dump_file != NULL && n_all_split != 0)
6182    fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
6183	     n_split, n_all_split,
6184	     (double) n_split / n_all_split * 100);
6185  change_p = remove_inheritance_pseudos (&remove_pseudos);
6186  bitmap_clear (&remove_pseudos);
6187  /* Clear restore_regnos.  */
6188  EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
6189    lra_reg_info[regno].restore_regno = -1;
6190  EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
6191    lra_reg_info[regno].restore_regno = -1;
6192  change_p = undo_optional_reloads () || change_p;
6193  return change_p;
6194}
6195