1/* IRA processing allocno lives to build allocno live ranges.
2   Copyright (C) 2006-2015 Free Software Foundation, Inc.
3   Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "regs.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "target.h"
29#include "flags.h"
30#include "except.h"
31#include "hard-reg-set.h"
32#include "predict.h"
33#include "vec.h"
34#include "hashtab.h"
35#include "hash-set.h"
36#include "machmode.h"
37#include "input.h"
38#include "function.h"
39#include "basic-block.h"
40#include "insn-config.h"
41#include "recog.h"
42#include "diagnostic-core.h"
43#include "params.h"
44#include "df.h"
45#include "sbitmap.h"
46#include "sparseset.h"
47#include "ira-int.h"
48
49/* The code in this file is similar to one in global but the code
50   works on the allocno basis and creates live ranges instead of
51   pseudo-register conflicts.  */
52
53/* Program points are enumerated by numbers from range
54   0..IRA_MAX_POINT-1.  There are approximately two times more program
55   points than insns.  Program points are places in the program where
56   liveness info can be changed.  In most general case (there are more
57   complicated cases too) some program points correspond to places
58   where input operand dies and other ones correspond to places where
59   output operands are born.  */
60int ira_max_point;
61
62/* Arrays of size IRA_MAX_POINT mapping a program point to the allocno
63   live ranges with given start/finish point.  */
64live_range_t *ira_start_point_ranges, *ira_finish_point_ranges;
65
66/* Number of the current program point.  */
67static int curr_point;
68
69/* Point where register pressure excess started or -1 if there is no
70   register pressure excess.  Excess pressure for a register class at
71   some point means that there are more allocnos of given register
72   class living at the point than number of hard-registers of the
73   class available for the allocation.  It is defined only for
74   pressure classes.  */
75static int high_pressure_start_point[N_REG_CLASSES];
76
77/* Objects live at current point in the scan.  */
78static sparseset objects_live;
79
80/* A temporary bitmap used in functions that wish to avoid visiting an allocno
81   multiple times.  */
82static sparseset allocnos_processed;
83
84/* Set of hard regs (except eliminable ones) currently live.  */
85static HARD_REG_SET hard_regs_live;
86
87/* The loop tree node corresponding to the current basic block.  */
88static ira_loop_tree_node_t curr_bb_node;
89
90/* The number of the last processed call.  */
91static int last_call_num;
92/* The number of last call at which given allocno was saved.  */
93static int *allocno_saved_at_call;
94
95/* The value of get_preferred_alternatives for the current instruction,
96   supplemental to recog_data.  */
97static alternative_mask preferred_alternatives;
98
99/* Record the birth of hard register REGNO, updating hard_regs_live and
100   hard reg conflict information for living allocnos.  */
101static void
102make_hard_regno_born (int regno)
103{
104  unsigned int i;
105
106  SET_HARD_REG_BIT (hard_regs_live, regno);
107  EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
108    {
109      ira_object_t obj = ira_object_id_map[i];
110
111      SET_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (obj), regno);
112      SET_HARD_REG_BIT (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), regno);
113    }
114}
115
116/* Process the death of hard register REGNO.  This updates
117   hard_regs_live.  */
118static void
119make_hard_regno_dead (int regno)
120{
121  CLEAR_HARD_REG_BIT (hard_regs_live, regno);
122}
123
124/* Record the birth of object OBJ.  Set a bit for it in objects_live,
125   start a new live range for it if necessary and update hard register
126   conflicts.  */
127static void
128make_object_born (ira_object_t obj)
129{
130  live_range_t lr = OBJECT_LIVE_RANGES (obj);
131
132  sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
133  IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), hard_regs_live);
134  IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), hard_regs_live);
135
136  if (lr == NULL
137      || (lr->finish != curr_point && lr->finish + 1 != curr_point))
138    ira_add_live_range_to_object (obj, curr_point, -1);
139}
140
141/* Update ALLOCNO_EXCESS_PRESSURE_POINTS_NUM for the allocno
142   associated with object OBJ.  */
143static void
144update_allocno_pressure_excess_length (ira_object_t obj)
145{
146  ira_allocno_t a = OBJECT_ALLOCNO (obj);
147  int start, i;
148  enum reg_class aclass, pclass, cl;
149  live_range_t p;
150
151  aclass = ALLOCNO_CLASS (a);
152  pclass = ira_pressure_class_translate[aclass];
153  for (i = 0;
154       (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
155       i++)
156    {
157      if (! ira_reg_pressure_class_p[cl])
158	continue;
159      if (high_pressure_start_point[cl] < 0)
160	continue;
161      p = OBJECT_LIVE_RANGES (obj);
162      ira_assert (p != NULL);
163      start = (high_pressure_start_point[cl] > p->start
164	       ? high_pressure_start_point[cl] : p->start);
165      ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) += curr_point - start + 1;
166    }
167}
168
169/* Process the death of object OBJ, which is associated with allocno
170   A.  This finishes the current live range for it.  */
171static void
172make_object_dead (ira_object_t obj)
173{
174  live_range_t lr;
175
176  sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (obj));
177  lr = OBJECT_LIVE_RANGES (obj);
178  ira_assert (lr != NULL);
179  lr->finish = curr_point;
180  update_allocno_pressure_excess_length (obj);
181}
182
183/* The current register pressures for each pressure class for the current
184   basic block.  */
185static int curr_reg_pressure[N_REG_CLASSES];
186
187/* Record that register pressure for PCLASS increased by N registers.
188   Update the current register pressure, maximal register pressure for
189   the current BB and the start point of the register pressure
190   excess.  */
191static void
192inc_register_pressure (enum reg_class pclass, int n)
193{
194  int i;
195  enum reg_class cl;
196
197  for (i = 0;
198       (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
199       i++)
200    {
201      if (! ira_reg_pressure_class_p[cl])
202	continue;
203      curr_reg_pressure[cl] += n;
204      if (high_pressure_start_point[cl] < 0
205	  && (curr_reg_pressure[cl] > ira_class_hard_regs_num[cl]))
206	high_pressure_start_point[cl] = curr_point;
207      if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl])
208	curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl];
209    }
210}
211
212/* Record that register pressure for PCLASS has decreased by NREGS
213   registers; update current register pressure, start point of the
214   register pressure excess, and register pressure excess length for
215   living allocnos.  */
216
217static void
218dec_register_pressure (enum reg_class pclass, int nregs)
219{
220  int i;
221  unsigned int j;
222  enum reg_class cl;
223  bool set_p = false;
224
225  for (i = 0;
226       (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
227       i++)
228    {
229      if (! ira_reg_pressure_class_p[cl])
230	continue;
231      curr_reg_pressure[cl] -= nregs;
232      ira_assert (curr_reg_pressure[cl] >= 0);
233      if (high_pressure_start_point[cl] >= 0
234	  && curr_reg_pressure[cl] <= ira_class_hard_regs_num[cl])
235	set_p = true;
236    }
237  if (set_p)
238    {
239      EXECUTE_IF_SET_IN_SPARSESET (objects_live, j)
240	update_allocno_pressure_excess_length (ira_object_id_map[j]);
241      for (i = 0;
242	   (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
243	   i++)
244	{
245	  if (! ira_reg_pressure_class_p[cl])
246	    continue;
247	  if (high_pressure_start_point[cl] >= 0
248	      && curr_reg_pressure[cl] <= ira_class_hard_regs_num[cl])
249	    high_pressure_start_point[cl] = -1;
250	}
251    }
252}
253
254/* Determine from the objects_live bitmap whether REGNO is currently live,
255   and occupies only one object.  Return false if we have no information.  */
256static bool
257pseudo_regno_single_word_and_live_p (int regno)
258{
259  ira_allocno_t a = ira_curr_regno_allocno_map[regno];
260  ira_object_t obj;
261
262  if (a == NULL)
263    return false;
264  if (ALLOCNO_NUM_OBJECTS (a) > 1)
265    return false;
266
267  obj = ALLOCNO_OBJECT (a, 0);
268
269  return sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj));
270}
271
272/* Mark the pseudo register REGNO as live.  Update all information about
273   live ranges and register pressure.  */
274static void
275mark_pseudo_regno_live (int regno)
276{
277  ira_allocno_t a = ira_curr_regno_allocno_map[regno];
278  enum reg_class pclass;
279  int i, n, nregs;
280
281  if (a == NULL)
282    return;
283
284  /* Invalidate because it is referenced.  */
285  allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
286
287  n = ALLOCNO_NUM_OBJECTS (a);
288  pclass = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
289  nregs = ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)];
290  if (n > 1)
291    {
292      /* We track every subobject separately.  */
293      gcc_assert (nregs == n);
294      nregs = 1;
295    }
296
297  for (i = 0; i < n; i++)
298    {
299      ira_object_t obj = ALLOCNO_OBJECT (a, i);
300
301      if (sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
302	continue;
303
304      inc_register_pressure (pclass, nregs);
305      make_object_born (obj);
306    }
307}
308
309/* Like mark_pseudo_regno_live, but try to only mark one subword of
310   the pseudo as live.  SUBWORD indicates which; a value of 0
311   indicates the low part.  */
312static void
313mark_pseudo_regno_subword_live (int regno, int subword)
314{
315  ira_allocno_t a = ira_curr_regno_allocno_map[regno];
316  int n;
317  enum reg_class pclass;
318  ira_object_t obj;
319
320  if (a == NULL)
321    return;
322
323  /* Invalidate because it is referenced.  */
324  allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
325
326  n = ALLOCNO_NUM_OBJECTS (a);
327  if (n == 1)
328    {
329      mark_pseudo_regno_live (regno);
330      return;
331    }
332
333  pclass = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
334  gcc_assert
335    (n == ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)]);
336  obj = ALLOCNO_OBJECT (a, subword);
337
338  if (sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
339    return;
340
341  inc_register_pressure (pclass, 1);
342  make_object_born (obj);
343}
344
345/* Mark the register REG as live.  Store a 1 in hard_regs_live for
346   this register, record how many consecutive hardware registers it
347   actually needs.  */
348static void
349mark_hard_reg_live (rtx reg)
350{
351  int regno = REGNO (reg);
352
353  if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
354    {
355      int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
356      enum reg_class aclass, pclass;
357
358      while (regno < last)
359	{
360	  if (! TEST_HARD_REG_BIT (hard_regs_live, regno)
361	      && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
362	    {
363	      aclass = ira_hard_regno_allocno_class[regno];
364	      pclass = ira_pressure_class_translate[aclass];
365	      inc_register_pressure (pclass, 1);
366	      make_hard_regno_born (regno);
367	    }
368	  regno++;
369	}
370    }
371}
372
373/* Mark a pseudo, or one of its subwords, as live.  REGNO is the pseudo's
374   register number; ORIG_REG is the access in the insn, which may be a
375   subreg.  */
376static void
377mark_pseudo_reg_live (rtx orig_reg, unsigned regno)
378{
379  if (df_read_modify_subreg_p (orig_reg))
380    {
381      mark_pseudo_regno_subword_live (regno,
382				      subreg_lowpart_p (orig_reg) ? 0 : 1);
383    }
384  else
385    mark_pseudo_regno_live (regno);
386}
387
388/* Mark the register referenced by use or def REF as live.  */
389static void
390mark_ref_live (df_ref ref)
391{
392  rtx reg = DF_REF_REG (ref);
393  rtx orig_reg = reg;
394
395  if (GET_CODE (reg) == SUBREG)
396    reg = SUBREG_REG (reg);
397
398  if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
399    mark_pseudo_reg_live (orig_reg, REGNO (reg));
400  else
401    mark_hard_reg_live (reg);
402}
403
404/* Mark the pseudo register REGNO as dead.  Update all information about
405   live ranges and register pressure.  */
406static void
407mark_pseudo_regno_dead (int regno)
408{
409  ira_allocno_t a = ira_curr_regno_allocno_map[regno];
410  int n, i, nregs;
411  enum reg_class cl;
412
413  if (a == NULL)
414    return;
415
416  /* Invalidate because it is referenced.  */
417  allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
418
419  n = ALLOCNO_NUM_OBJECTS (a);
420  cl = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
421  nregs = ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)];
422  if (n > 1)
423    {
424      /* We track every subobject separately.  */
425      gcc_assert (nregs == n);
426      nregs = 1;
427    }
428  for (i = 0; i < n; i++)
429    {
430      ira_object_t obj = ALLOCNO_OBJECT (a, i);
431      if (!sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
432	continue;
433
434      dec_register_pressure (cl, nregs);
435      make_object_dead (obj);
436    }
437}
438
439/* Like mark_pseudo_regno_dead, but called when we know that only part of the
440   register dies.  SUBWORD indicates which; a value of 0 indicates the low part.  */
441static void
442mark_pseudo_regno_subword_dead (int regno, int subword)
443{
444  ira_allocno_t a = ira_curr_regno_allocno_map[regno];
445  int n;
446  enum reg_class cl;
447  ira_object_t obj;
448
449  if (a == NULL)
450    return;
451
452  /* Invalidate because it is referenced.  */
453  allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
454
455  n = ALLOCNO_NUM_OBJECTS (a);
456  if (n == 1)
457    /* The allocno as a whole doesn't die in this case.  */
458    return;
459
460  cl = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
461  gcc_assert
462    (n == ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)]);
463
464  obj = ALLOCNO_OBJECT (a, subword);
465  if (!sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
466    return;
467
468  dec_register_pressure (cl, 1);
469  make_object_dead (obj);
470}
471
472/* Mark the hard register REG as dead.  Store a 0 in hard_regs_live for the
473   register.  */
474static void
475mark_hard_reg_dead (rtx reg)
476{
477  int regno = REGNO (reg);
478
479  if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
480    {
481      int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
482      enum reg_class aclass, pclass;
483
484      while (regno < last)
485	{
486	  if (TEST_HARD_REG_BIT (hard_regs_live, regno))
487	    {
488	      aclass = ira_hard_regno_allocno_class[regno];
489	      pclass = ira_pressure_class_translate[aclass];
490	      dec_register_pressure (pclass, 1);
491	      make_hard_regno_dead (regno);
492	    }
493	  regno++;
494	}
495    }
496}
497
498/* Mark a pseudo, or one of its subwords, as dead.  REGNO is the pseudo's
499   register number; ORIG_REG is the access in the insn, which may be a
500   subreg.  */
501static void
502mark_pseudo_reg_dead (rtx orig_reg, unsigned regno)
503{
504  if (df_read_modify_subreg_p (orig_reg))
505    {
506      mark_pseudo_regno_subword_dead (regno,
507				      subreg_lowpart_p (orig_reg) ? 0 : 1);
508    }
509  else
510    mark_pseudo_regno_dead (regno);
511}
512
513/* Mark the register referenced by definition DEF as dead, if the
514   definition is a total one.  */
515static void
516mark_ref_dead (df_ref def)
517{
518  rtx reg = DF_REF_REG (def);
519  rtx orig_reg = reg;
520
521  if (DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL))
522    return;
523
524  if (GET_CODE (reg) == SUBREG)
525    reg = SUBREG_REG (reg);
526
527  if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL)
528      && (GET_CODE (orig_reg) != SUBREG
529	  || REGNO (reg) < FIRST_PSEUDO_REGISTER
530	  || !df_read_modify_subreg_p (orig_reg)))
531    return;
532
533  if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
534    mark_pseudo_reg_dead (orig_reg, REGNO (reg));
535  else
536    mark_hard_reg_dead (reg);
537}
538
539/* If REG is a pseudo or a subreg of it, and the class of its allocno
540   intersects CL, make a conflict with pseudo DREG.  ORIG_DREG is the
541   rtx actually accessed, it may be identical to DREG or a subreg of it.
542   Advance the current program point before making the conflict if
543   ADVANCE_P.  Return TRUE if we will need to advance the current
544   program point.  */
545static bool
546make_pseudo_conflict (rtx reg, enum reg_class cl, rtx dreg, rtx orig_dreg,
547		      bool advance_p)
548{
549  rtx orig_reg = reg;
550  ira_allocno_t a;
551
552  if (GET_CODE (reg) == SUBREG)
553    reg = SUBREG_REG (reg);
554
555  if (! REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
556    return advance_p;
557
558  a = ira_curr_regno_allocno_map[REGNO (reg)];
559  if (! reg_classes_intersect_p (cl, ALLOCNO_CLASS (a)))
560    return advance_p;
561
562  if (advance_p)
563    curr_point++;
564
565  mark_pseudo_reg_live (orig_reg, REGNO (reg));
566  mark_pseudo_reg_live (orig_dreg, REGNO (dreg));
567  mark_pseudo_reg_dead (orig_reg, REGNO (reg));
568  mark_pseudo_reg_dead (orig_dreg, REGNO (dreg));
569
570  return false;
571}
572
573/* Check and make if necessary conflicts for pseudo DREG of class
574   DEF_CL of the current insn with input operand USE of class USE_CL.
575   ORIG_DREG is the rtx actually accessed, it may be identical to
576   DREG or a subreg of it.  Advance the current program point before
577   making the conflict if ADVANCE_P.  Return TRUE if we will need to
578   advance the current program point.  */
579static bool
580check_and_make_def_use_conflict (rtx dreg, rtx orig_dreg,
581				 enum reg_class def_cl, int use,
582				 enum reg_class use_cl, bool advance_p)
583{
584  if (! reg_classes_intersect_p (def_cl, use_cl))
585    return advance_p;
586
587  advance_p = make_pseudo_conflict (recog_data.operand[use],
588				    use_cl, dreg, orig_dreg, advance_p);
589
590  /* Reload may end up swapping commutative operands, so you
591     have to take both orderings into account.  The
592     constraints for the two operands can be completely
593     different.  (Indeed, if the constraints for the two
594     operands are the same for all alternatives, there's no
595     point marking them as commutative.)  */
596  if (use < recog_data.n_operands - 1
597      && recog_data.constraints[use][0] == '%')
598    advance_p
599      = make_pseudo_conflict (recog_data.operand[use + 1],
600			      use_cl, dreg, orig_dreg, advance_p);
601  if (use >= 1
602      && recog_data.constraints[use - 1][0] == '%')
603    advance_p
604      = make_pseudo_conflict (recog_data.operand[use - 1],
605			      use_cl, dreg, orig_dreg, advance_p);
606  return advance_p;
607}
608
609/* Check and make if necessary conflicts for definition DEF of class
610   DEF_CL of the current insn with input operands.  Process only
611   constraints of alternative ALT.  */
612static void
613check_and_make_def_conflict (int alt, int def, enum reg_class def_cl)
614{
615  int use, use_match;
616  ira_allocno_t a;
617  enum reg_class use_cl, acl;
618  bool advance_p;
619  rtx dreg = recog_data.operand[def];
620  rtx orig_dreg = dreg;
621
622  if (def_cl == NO_REGS)
623    return;
624
625  if (GET_CODE (dreg) == SUBREG)
626    dreg = SUBREG_REG (dreg);
627
628  if (! REG_P (dreg) || REGNO (dreg) < FIRST_PSEUDO_REGISTER)
629    return;
630
631  a = ira_curr_regno_allocno_map[REGNO (dreg)];
632  acl = ALLOCNO_CLASS (a);
633  if (! reg_classes_intersect_p (acl, def_cl))
634    return;
635
636  advance_p = true;
637
638  int n_operands = recog_data.n_operands;
639  const operand_alternative *op_alt = &recog_op_alt[alt * n_operands];
640  for (use = 0; use < n_operands; use++)
641    {
642      int alt1;
643
644      if (use == def || recog_data.operand_type[use] == OP_OUT)
645	continue;
646
647      if (op_alt[use].anything_ok)
648	use_cl = ALL_REGS;
649      else
650	use_cl = op_alt[use].cl;
651
652      /* If there's any alternative that allows USE to match DEF, do not
653	 record a conflict.  If that causes us to create an invalid
654	 instruction due to the earlyclobber, reload must fix it up.  */
655      for (alt1 = 0; alt1 < recog_data.n_alternatives; alt1++)
656	{
657	  if (!TEST_BIT (preferred_alternatives, alt1))
658	    continue;
659	  const operand_alternative *op_alt1
660	    = &recog_op_alt[alt1 * n_operands];
661	  if (op_alt1[use].matches == def
662	      || (use < n_operands - 1
663		  && recog_data.constraints[use][0] == '%'
664		  && op_alt1[use + 1].matches == def)
665	      || (use >= 1
666		  && recog_data.constraints[use - 1][0] == '%'
667		  && op_alt1[use - 1].matches == def))
668	    break;
669	}
670
671      if (alt1 < recog_data.n_alternatives)
672	continue;
673
674      advance_p = check_and_make_def_use_conflict (dreg, orig_dreg, def_cl,
675						   use, use_cl, advance_p);
676
677      if ((use_match = op_alt[use].matches) >= 0)
678	{
679	  if (use_match == def)
680	    continue;
681
682	  if (op_alt[use_match].anything_ok)
683	    use_cl = ALL_REGS;
684	  else
685	    use_cl = op_alt[use_match].cl;
686	  advance_p = check_and_make_def_use_conflict (dreg, orig_dreg, def_cl,
687						       use, use_cl, advance_p);
688	}
689    }
690}
691
692/* Make conflicts of early clobber pseudo registers of the current
693   insn with its inputs.  Avoid introducing unnecessary conflicts by
694   checking classes of the constraints and pseudos because otherwise
695   significant code degradation is possible for some targets.  */
696static void
697make_early_clobber_and_input_conflicts (void)
698{
699  int alt;
700  int def, def_match;
701  enum reg_class def_cl;
702
703  int n_alternatives = recog_data.n_alternatives;
704  int n_operands = recog_data.n_operands;
705  const operand_alternative *op_alt = recog_op_alt;
706  for (alt = 0; alt < n_alternatives; alt++, op_alt += n_operands)
707    if (TEST_BIT (preferred_alternatives, alt))
708      for (def = 0; def < n_operands; def++)
709	{
710	  def_cl = NO_REGS;
711	  if (op_alt[def].earlyclobber)
712	    {
713	      if (op_alt[def].anything_ok)
714		def_cl = ALL_REGS;
715	      else
716		def_cl = op_alt[def].cl;
717	      check_and_make_def_conflict (alt, def, def_cl);
718	    }
719	  if ((def_match = op_alt[def].matches) >= 0
720	      && (op_alt[def_match].earlyclobber
721		  || op_alt[def].earlyclobber))
722	    {
723	      if (op_alt[def_match].anything_ok)
724		def_cl = ALL_REGS;
725	      else
726		def_cl = op_alt[def_match].cl;
727	      check_and_make_def_conflict (alt, def, def_cl);
728	    }
729	}
730}
731
732/* Mark early clobber hard registers of the current INSN as live (if
733   LIVE_P) or dead.  Return true if there are such registers.  */
734static bool
735mark_hard_reg_early_clobbers (rtx_insn *insn, bool live_p)
736{
737  df_ref def;
738  bool set_p = false;
739
740  FOR_EACH_INSN_DEF (def, insn)
741    if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
742      {
743	rtx dreg = DF_REF_REG (def);
744
745	if (GET_CODE (dreg) == SUBREG)
746	  dreg = SUBREG_REG (dreg);
747	if (! REG_P (dreg) || REGNO (dreg) >= FIRST_PSEUDO_REGISTER)
748	  continue;
749
750	/* Hard register clobbers are believed to be early clobber
751	   because there is no way to say that non-operand hard
752	   register clobbers are not early ones.  */
753	if (live_p)
754	  mark_ref_live (def);
755	else
756	  mark_ref_dead (def);
757	set_p = true;
758      }
759
760  return set_p;
761}
762
763/* Checks that CONSTRAINTS permits to use only one hard register.  If
764   it is so, the function returns the class of the hard register.
765   Otherwise it returns NO_REGS.  */
766static enum reg_class
767single_reg_class (const char *constraints, rtx op, rtx equiv_const)
768{
769  int c;
770  enum reg_class cl, next_cl;
771  enum constraint_num cn;
772
773  cl = NO_REGS;
774  alternative_mask preferred = preferred_alternatives;
775  for (; (c = *constraints); constraints += CONSTRAINT_LEN (c, constraints))
776    if (c == '#')
777      preferred &= ~ALTERNATIVE_BIT (0);
778    else if (c == ',')
779      preferred >>= 1;
780    else if (preferred & 1)
781      switch (c)
782	{
783	case 'g':
784	  return NO_REGS;
785
786	default:
787	  /* ??? Is this the best way to handle memory constraints?  */
788	  cn = lookup_constraint (constraints);
789	  if (insn_extra_memory_constraint (cn)
790	      || insn_extra_address_constraint (cn))
791	    return NO_REGS;
792	  if (constraint_satisfied_p (op, cn)
793	      || (equiv_const != NULL_RTX
794		  && CONSTANT_P (equiv_const)
795		  && constraint_satisfied_p (equiv_const, cn)))
796	    return NO_REGS;
797	  next_cl = reg_class_for_constraint (cn);
798	  if (next_cl == NO_REGS)
799	    break;
800	  if (cl == NO_REGS
801	      ? ira_class_singleton[next_cl][GET_MODE (op)] < 0
802	      : (ira_class_singleton[cl][GET_MODE (op)]
803		 != ira_class_singleton[next_cl][GET_MODE (op)]))
804	    return NO_REGS;
805	  cl = next_cl;
806	  break;
807
808	case '0': case '1': case '2': case '3': case '4':
809	case '5': case '6': case '7': case '8': case '9':
810	  next_cl
811	    = single_reg_class (recog_data.constraints[c - '0'],
812				recog_data.operand[c - '0'], NULL_RTX);
813	  if (cl == NO_REGS
814	      ? ira_class_singleton[next_cl][GET_MODE (op)] < 0
815	      : (ira_class_singleton[cl][GET_MODE (op)]
816		 != ira_class_singleton[next_cl][GET_MODE (op)]))
817	    return NO_REGS;
818	  cl = next_cl;
819	  break;
820	}
821  return cl;
822}
823
824/* The function checks that operand OP_NUM of the current insn can use
825   only one hard register.  If it is so, the function returns the
826   class of the hard register.  Otherwise it returns NO_REGS.  */
827static enum reg_class
828single_reg_operand_class (int op_num)
829{
830  if (op_num < 0 || recog_data.n_alternatives == 0)
831    return NO_REGS;
832  return single_reg_class (recog_data.constraints[op_num],
833			   recog_data.operand[op_num], NULL_RTX);
834}
835
836/* The function sets up hard register set *SET to hard registers which
837   might be used by insn reloads because the constraints are too
838   strict.  */
839void
840ira_implicitly_set_insn_hard_regs (HARD_REG_SET *set,
841				   alternative_mask preferred)
842{
843  int i, c, regno = 0;
844  enum reg_class cl;
845  rtx op;
846  machine_mode mode;
847
848  CLEAR_HARD_REG_SET (*set);
849  for (i = 0; i < recog_data.n_operands; i++)
850    {
851      op = recog_data.operand[i];
852
853      if (GET_CODE (op) == SUBREG)
854	op = SUBREG_REG (op);
855
856      if (GET_CODE (op) == SCRATCH
857	  || (REG_P (op) && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER))
858	{
859	  const char *p = recog_data.constraints[i];
860
861	  mode = (GET_CODE (op) == SCRATCH
862		  ? GET_MODE (op) : PSEUDO_REGNO_MODE (regno));
863	  cl = NO_REGS;
864	  for (; (c = *p); p += CONSTRAINT_LEN (c, p))
865	    if (c == '#')
866	      preferred &= ~ALTERNATIVE_BIT (0);
867	    else if (c == ',')
868	      preferred >>= 1;
869	    else if (preferred & 1)
870	      {
871		cl = reg_class_for_constraint (lookup_constraint (p));
872		if (cl != NO_REGS)
873		  {
874		    /* There is no register pressure problem if all of the
875		       regs in this class are fixed.  */
876		    int regno = ira_class_singleton[cl][mode];
877		    if (regno >= 0)
878		      add_to_hard_reg_set (set, mode, regno);
879		  }
880	      }
881	}
882    }
883}
884/* Processes input operands, if IN_P, or output operands otherwise of
885   the current insn with FREQ to find allocno which can use only one
886   hard register and makes other currently living allocnos conflicting
887   with the hard register.  */
888static void
889process_single_reg_class_operands (bool in_p, int freq)
890{
891  int i, regno;
892  unsigned int px;
893  enum reg_class cl;
894  rtx operand;
895  ira_allocno_t operand_a, a;
896
897  for (i = 0; i < recog_data.n_operands; i++)
898    {
899      operand = recog_data.operand[i];
900      if (in_p && recog_data.operand_type[i] != OP_IN
901	  && recog_data.operand_type[i] != OP_INOUT)
902	continue;
903      if (! in_p && recog_data.operand_type[i] != OP_OUT
904	  && recog_data.operand_type[i] != OP_INOUT)
905	continue;
906      cl = single_reg_operand_class (i);
907      if (cl == NO_REGS)
908	continue;
909
910      operand_a = NULL;
911
912      if (GET_CODE (operand) == SUBREG)
913	operand = SUBREG_REG (operand);
914
915      if (REG_P (operand)
916	  && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER)
917	{
918	  enum reg_class aclass;
919
920	  operand_a = ira_curr_regno_allocno_map[regno];
921	  aclass = ALLOCNO_CLASS (operand_a);
922	  if (ira_class_subset_p[cl][aclass])
923	    {
924	      /* View the desired allocation of OPERAND as:
925
926		    (REG:YMODE YREGNO),
927
928		 a simplification of:
929
930		    (subreg:YMODE (reg:XMODE XREGNO) OFFSET).  */
931	      machine_mode ymode, xmode;
932	      int xregno, yregno;
933	      HOST_WIDE_INT offset;
934
935	      xmode = recog_data.operand_mode[i];
936	      xregno = ira_class_singleton[cl][xmode];
937	      gcc_assert (xregno >= 0);
938	      ymode = ALLOCNO_MODE (operand_a);
939	      offset = subreg_lowpart_offset (ymode, xmode);
940	      yregno = simplify_subreg_regno (xregno, xmode, offset, ymode);
941	      if (yregno >= 0
942		  && ira_class_hard_reg_index[aclass][yregno] >= 0)
943		{
944		  int cost;
945
946		  ira_allocate_and_set_costs
947		    (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a),
948		     aclass, 0);
949		  ira_init_register_move_cost_if_necessary (xmode);
950		  cost = freq * (in_p
951				 ? ira_register_move_cost[xmode][aclass][cl]
952				 : ira_register_move_cost[xmode][cl][aclass]);
953		  ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a)
954		    [ira_class_hard_reg_index[aclass][yregno]] -= cost;
955		}
956	    }
957	}
958
959      EXECUTE_IF_SET_IN_SPARSESET (objects_live, px)
960        {
961	  ira_object_t obj = ira_object_id_map[px];
962	  a = OBJECT_ALLOCNO (obj);
963	  if (a != operand_a)
964	    {
965	      /* We could increase costs of A instead of making it
966		 conflicting with the hard register.  But it works worse
967		 because it will be spilled in reload in anyway.  */
968	      IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
969				reg_class_contents[cl]);
970	      IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
971				reg_class_contents[cl]);
972	    }
973	}
974    }
975}
976
977/* Return true when one of the predecessor edges of BB is marked with
978   EDGE_ABNORMAL_CALL or EDGE_EH.  */
979static bool
980bb_has_abnormal_call_pred (basic_block bb)
981{
982  edge e;
983  edge_iterator ei;
984
985  FOR_EACH_EDGE (e, ei, bb->preds)
986    {
987      if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
988	return true;
989    }
990  return false;
991}
992
993/* Look through the CALL_INSN_FUNCTION_USAGE of a call insn INSN, and see if
994   we find a SET rtx that we can use to deduce that a register can be cheaply
995   caller-saved.  Return such a register, or NULL_RTX if none is found.  */
996static rtx
997find_call_crossed_cheap_reg (rtx_insn *insn)
998{
999  rtx cheap_reg = NULL_RTX;
1000  rtx exp = CALL_INSN_FUNCTION_USAGE (insn);
1001
1002  while (exp != NULL)
1003    {
1004      rtx x = XEXP (exp, 0);
1005      if (GET_CODE (x) == SET)
1006	{
1007	  exp = x;
1008	  break;
1009	}
1010      exp = XEXP (exp, 1);
1011    }
1012  if (exp != NULL)
1013    {
1014      basic_block bb = BLOCK_FOR_INSN (insn);
1015      rtx reg = SET_SRC (exp);
1016      rtx_insn *prev = PREV_INSN (insn);
1017      while (prev && !(INSN_P (prev)
1018		       && BLOCK_FOR_INSN (prev) != bb))
1019	{
1020	  if (NONDEBUG_INSN_P (prev))
1021	    {
1022	      rtx set = single_set (prev);
1023
1024	      if (set && rtx_equal_p (SET_DEST (set), reg))
1025		{
1026		  rtx src = SET_SRC (set);
1027		  if (!REG_P (src) || HARD_REGISTER_P (src)
1028		      || !pseudo_regno_single_word_and_live_p (REGNO (src)))
1029		    break;
1030		  if (!modified_between_p (src, prev, insn))
1031		    cheap_reg = src;
1032		  break;
1033		}
1034	      if (set && rtx_equal_p (SET_SRC (set), reg))
1035		{
1036		  rtx dest = SET_DEST (set);
1037		  if (!REG_P (dest) || HARD_REGISTER_P (dest)
1038		      || !pseudo_regno_single_word_and_live_p (REGNO (dest)))
1039		    break;
1040		  if (!modified_between_p (dest, prev, insn))
1041		    cheap_reg = dest;
1042		  break;
1043		}
1044
1045	      if (reg_overlap_mentioned_p (reg, PATTERN (prev)))
1046		break;
1047	    }
1048	  prev = PREV_INSN (prev);
1049	}
1050    }
1051  return cheap_reg;
1052}
1053
1054/* Process insns of the basic block given by its LOOP_TREE_NODE to
1055   update allocno live ranges, allocno hard register conflicts,
1056   intersected calls, and register pressure info for allocnos for the
1057   basic block for and regions containing the basic block.  */
1058static void
1059process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
1060{
1061  int i, freq;
1062  unsigned int j;
1063  basic_block bb;
1064  rtx_insn *insn;
1065  bitmap_iterator bi;
1066  bitmap reg_live_out;
1067  unsigned int px;
1068  bool set_p;
1069
1070  bb = loop_tree_node->bb;
1071  if (bb != NULL)
1072    {
1073      for (i = 0; i < ira_pressure_classes_num; i++)
1074	{
1075	  curr_reg_pressure[ira_pressure_classes[i]] = 0;
1076	  high_pressure_start_point[ira_pressure_classes[i]] = -1;
1077	}
1078      curr_bb_node = loop_tree_node;
1079      reg_live_out = df_get_live_out (bb);
1080      sparseset_clear (objects_live);
1081      REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_out);
1082      AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
1083      AND_COMPL_HARD_REG_SET (hard_regs_live, ira_no_alloc_regs);
1084      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1085	if (TEST_HARD_REG_BIT (hard_regs_live, i))
1086	  {
1087	    enum reg_class aclass, pclass, cl;
1088
1089	    aclass = ira_allocno_class_translate[REGNO_REG_CLASS (i)];
1090	    pclass = ira_pressure_class_translate[aclass];
1091	    for (j = 0;
1092		 (cl = ira_reg_class_super_classes[pclass][j])
1093		   != LIM_REG_CLASSES;
1094		 j++)
1095	      {
1096		if (! ira_reg_pressure_class_p[cl])
1097		  continue;
1098		curr_reg_pressure[cl]++;
1099		if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl])
1100		  curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl];
1101		ira_assert (curr_reg_pressure[cl]
1102			    <= ira_class_hard_regs_num[cl]);
1103	      }
1104	  }
1105      EXECUTE_IF_SET_IN_BITMAP (reg_live_out, FIRST_PSEUDO_REGISTER, j, bi)
1106	mark_pseudo_regno_live (j);
1107
1108      freq = REG_FREQ_FROM_BB (bb);
1109      if (freq == 0)
1110	freq = 1;
1111
1112      /* Invalidate all allocno_saved_at_call entries.  */
1113      last_call_num++;
1114
1115      /* Scan the code of this basic block, noting which allocnos and
1116	 hard regs are born or die.
1117
1118	 Note that this loop treats uninitialized values as live until
1119	 the beginning of the block.  For example, if an instruction
1120	 uses (reg:DI foo), and only (subreg:SI (reg:DI foo) 0) is ever
1121	 set, FOO will remain live until the beginning of the block.
1122	 Likewise if FOO is not set at all.  This is unnecessarily
1123	 pessimistic, but it probably doesn't matter much in practice.  */
1124      FOR_BB_INSNS_REVERSE (bb, insn)
1125	{
1126	  ira_allocno_t a;
1127	  df_ref def, use;
1128	  bool call_p;
1129
1130	  if (!NONDEBUG_INSN_P (insn))
1131	    continue;
1132
1133	  if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1134	    fprintf (ira_dump_file, "   Insn %u(l%d): point = %d\n",
1135		     INSN_UID (insn), loop_tree_node->parent->loop_num,
1136		     curr_point);
1137
1138	  call_p = CALL_P (insn);
1139#ifdef REAL_PIC_OFFSET_TABLE_REGNUM
1140	  int regno;
1141	  bool clear_pic_use_conflict_p = false;
1142	  /* Processing insn usage in call insn can create conflict
1143	     with pic pseudo and pic hard reg and that is wrong.
1144	     Check this situation and fix it at the end of the insn
1145	     processing.  */
1146	  if (call_p && pic_offset_table_rtx != NULL_RTX
1147	      && (regno = REGNO (pic_offset_table_rtx)) >= FIRST_PSEUDO_REGISTER
1148	      && (a = ira_curr_regno_allocno_map[regno]) != NULL)
1149	    clear_pic_use_conflict_p
1150		= (find_regno_fusage (insn, USE, REAL_PIC_OFFSET_TABLE_REGNUM)
1151		   && ! TEST_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS
1152					   (ALLOCNO_OBJECT (a, 0)),
1153					   REAL_PIC_OFFSET_TABLE_REGNUM));
1154#endif
1155
1156	  /* Mark each defined value as live.  We need to do this for
1157	     unused values because they still conflict with quantities
1158	     that are live at the time of the definition.
1159
1160	     Ignore DF_REF_MAY_CLOBBERs on a call instruction.  Such
1161	     references represent the effect of the called function
1162	     on a call-clobbered register.  Marking the register as
1163	     live would stop us from allocating it to a call-crossing
1164	     allocno.  */
1165	  FOR_EACH_INSN_DEF (def, insn)
1166	    if (!call_p || !DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1167	      mark_ref_live (def);
1168
1169	  /* If INSN has multiple outputs, then any value used in one
1170	     of the outputs conflicts with the other outputs.  Model this
1171	     by making the used value live during the output phase.
1172
1173	     It is unsafe to use !single_set here since it will ignore
1174	     an unused output.  Just because an output is unused does
1175	     not mean the compiler can assume the side effect will not
1176	     occur.  Consider if ALLOCNO appears in the address of an
1177	     output and we reload the output.  If we allocate ALLOCNO
1178	     to the same hard register as an unused output we could
1179	     set the hard register before the output reload insn.  */
1180	  if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1181	    FOR_EACH_INSN_USE (use, insn)
1182	      {
1183		int i;
1184		rtx reg;
1185
1186		reg = DF_REF_REG (use);
1187		for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
1188		  {
1189		    rtx set;
1190
1191		    set = XVECEXP (PATTERN (insn), 0, i);
1192		    if (GET_CODE (set) == SET
1193			&& reg_overlap_mentioned_p (reg, SET_DEST (set)))
1194		      {
1195			/* After the previous loop, this is a no-op if
1196			   REG is contained within SET_DEST (SET).  */
1197			mark_ref_live (use);
1198			break;
1199		      }
1200		  }
1201	      }
1202
1203	  extract_insn (insn);
1204	  preferred_alternatives = get_preferred_alternatives (insn);
1205	  preprocess_constraints (insn);
1206	  process_single_reg_class_operands (false, freq);
1207
1208	  /* See which defined values die here.  */
1209	  FOR_EACH_INSN_DEF (def, insn)
1210	    if (!call_p || !DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1211	      mark_ref_dead (def);
1212
1213	  if (call_p)
1214	    {
1215	      /* Try to find a SET in the CALL_INSN_FUNCTION_USAGE, and from
1216		 there, try to find a pseudo that is live across the call but
1217		 can be cheaply reconstructed from the return value.  */
1218	      rtx cheap_reg = find_call_crossed_cheap_reg (insn);
1219	      if (cheap_reg != NULL_RTX)
1220		add_reg_note (insn, REG_RETURNED, cheap_reg);
1221
1222	      last_call_num++;
1223	      sparseset_clear (allocnos_processed);
1224	      /* The current set of live allocnos are live across the call.  */
1225	      EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
1226	        {
1227		  ira_object_t obj = ira_object_id_map[i];
1228		  a = OBJECT_ALLOCNO (obj);
1229		  int num = ALLOCNO_NUM (a);
1230		  HARD_REG_SET this_call_used_reg_set;
1231
1232		  get_call_reg_set_usage (insn, &this_call_used_reg_set,
1233					  call_used_reg_set);
1234
1235		  /* Don't allocate allocnos that cross setjmps or any
1236		     call, if this function receives a nonlocal
1237		     goto.  */
1238		  if (cfun->has_nonlocal_label
1239		      || find_reg_note (insn, REG_SETJMP,
1240					NULL_RTX) != NULL_RTX)
1241		    {
1242		      SET_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj));
1243		      SET_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj));
1244		    }
1245		  if (can_throw_internal (insn))
1246		    {
1247		      IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
1248					this_call_used_reg_set);
1249		      IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
1250					this_call_used_reg_set);
1251		    }
1252
1253		  if (sparseset_bit_p (allocnos_processed, num))
1254		    continue;
1255		  sparseset_set_bit (allocnos_processed, num);
1256
1257		  if (allocno_saved_at_call[num] != last_call_num)
1258		    /* Here we are mimicking caller-save.c behavior
1259		       which does not save hard register at a call if
1260		       it was saved on previous call in the same basic
1261		       block and the hard register was not mentioned
1262		       between the two calls.  */
1263		    ALLOCNO_CALL_FREQ (a) += freq;
1264		  /* Mark it as saved at the next call.  */
1265		  allocno_saved_at_call[num] = last_call_num + 1;
1266		  ALLOCNO_CALLS_CROSSED_NUM (a)++;
1267		  IOR_HARD_REG_SET (ALLOCNO_CROSSED_CALLS_CLOBBERED_REGS (a),
1268				    this_call_used_reg_set);
1269		  if (cheap_reg != NULL_RTX
1270		      && ALLOCNO_REGNO (a) == (int) REGNO (cheap_reg))
1271		    ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a)++;
1272		}
1273	    }
1274
1275	  make_early_clobber_and_input_conflicts ();
1276
1277	  curr_point++;
1278
1279	  /* Mark each used value as live.  */
1280	  FOR_EACH_INSN_USE (use, insn)
1281	    mark_ref_live (use);
1282
1283	  process_single_reg_class_operands (true, freq);
1284
1285	  set_p = mark_hard_reg_early_clobbers (insn, true);
1286
1287	  if (set_p)
1288	    {
1289	      mark_hard_reg_early_clobbers (insn, false);
1290
1291	      /* Mark each hard reg as live again.  For example, a
1292		 hard register can be in clobber and in an insn
1293		 input.  */
1294	      FOR_EACH_INSN_USE (use, insn)
1295		{
1296		  rtx ureg = DF_REF_REG (use);
1297
1298		  if (GET_CODE (ureg) == SUBREG)
1299		    ureg = SUBREG_REG (ureg);
1300		  if (! REG_P (ureg) || REGNO (ureg) >= FIRST_PSEUDO_REGISTER)
1301		    continue;
1302
1303		  mark_ref_live (use);
1304		}
1305	    }
1306
1307#ifdef REAL_PIC_OFFSET_TABLE_REGNUM
1308	  if (clear_pic_use_conflict_p)
1309	    {
1310	      regno = REGNO (pic_offset_table_rtx);
1311	      a = ira_curr_regno_allocno_map[regno];
1312	      CLEAR_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (ALLOCNO_OBJECT (a, 0)),
1313				  REAL_PIC_OFFSET_TABLE_REGNUM);
1314	      CLEAR_HARD_REG_BIT (OBJECT_TOTAL_CONFLICT_HARD_REGS
1315				  (ALLOCNO_OBJECT (a, 0)),
1316				  REAL_PIC_OFFSET_TABLE_REGNUM);
1317	    }
1318#endif
1319	  curr_point++;
1320	}
1321
1322#ifdef EH_RETURN_DATA_REGNO
1323      if (bb_has_eh_pred (bb))
1324	for (j = 0; ; ++j)
1325	  {
1326	    unsigned int regno = EH_RETURN_DATA_REGNO (j);
1327	    if (regno == INVALID_REGNUM)
1328	      break;
1329	    make_hard_regno_born (regno);
1330	  }
1331#endif
1332
1333      /* Allocnos can't go in stack regs at the start of a basic block
1334	 that is reached by an abnormal edge. Likewise for call
1335	 clobbered regs, because caller-save, fixup_abnormal_edges and
1336	 possibly the table driven EH machinery are not quite ready to
1337	 handle such allocnos live across such edges.  */
1338      if (bb_has_abnormal_pred (bb))
1339	{
1340#ifdef STACK_REGS
1341	  EXECUTE_IF_SET_IN_SPARSESET (objects_live, px)
1342	    {
1343	      ira_allocno_t a = OBJECT_ALLOCNO (ira_object_id_map[px]);
1344
1345	      ALLOCNO_NO_STACK_REG_P (a) = true;
1346	      ALLOCNO_TOTAL_NO_STACK_REG_P (a) = true;
1347	    }
1348	  for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++)
1349	    make_hard_regno_born (px);
1350#endif
1351	  /* No need to record conflicts for call clobbered regs if we
1352	     have nonlocal labels around, as we don't ever try to
1353	     allocate such regs in this case.  */
1354	  if (!cfun->has_nonlocal_label && bb_has_abnormal_call_pred (bb))
1355	    for (px = 0; px < FIRST_PSEUDO_REGISTER; px++)
1356	      if (call_used_regs[px])
1357		make_hard_regno_born (px);
1358	}
1359
1360      EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
1361	make_object_dead (ira_object_id_map[i]);
1362
1363      curr_point++;
1364
1365    }
1366  /* Propagate register pressure to upper loop tree nodes.  */
1367  if (loop_tree_node != ira_loop_tree_root)
1368    for (i = 0; i < ira_pressure_classes_num; i++)
1369      {
1370	enum reg_class pclass;
1371
1372	pclass = ira_pressure_classes[i];
1373	if (loop_tree_node->reg_pressure[pclass]
1374	    > loop_tree_node->parent->reg_pressure[pclass])
1375	  loop_tree_node->parent->reg_pressure[pclass]
1376	    = loop_tree_node->reg_pressure[pclass];
1377      }
1378}
1379
1380/* Create and set up IRA_START_POINT_RANGES and
1381   IRA_FINISH_POINT_RANGES.  */
1382static void
1383create_start_finish_chains (void)
1384{
1385  ira_object_t obj;
1386  ira_object_iterator oi;
1387  live_range_t r;
1388
1389  ira_start_point_ranges
1390    = (live_range_t *) ira_allocate (ira_max_point * sizeof (live_range_t));
1391  memset (ira_start_point_ranges, 0, ira_max_point * sizeof (live_range_t));
1392  ira_finish_point_ranges
1393    = (live_range_t *) ira_allocate (ira_max_point * sizeof (live_range_t));
1394  memset (ira_finish_point_ranges, 0, ira_max_point * sizeof (live_range_t));
1395  FOR_EACH_OBJECT (obj, oi)
1396    for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1397      {
1398	r->start_next = ira_start_point_ranges[r->start];
1399	ira_start_point_ranges[r->start] = r;
1400	r->finish_next = ira_finish_point_ranges[r->finish];
1401 	  ira_finish_point_ranges[r->finish] = r;
1402      }
1403}
1404
1405/* Rebuild IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES after
1406   new live ranges and program points were added as a result if new
1407   insn generation.  */
1408void
1409ira_rebuild_start_finish_chains (void)
1410{
1411  ira_free (ira_finish_point_ranges);
1412  ira_free (ira_start_point_ranges);
1413  create_start_finish_chains ();
1414}
1415
1416/* Compress allocno live ranges by removing program points where
1417   nothing happens.  */
1418static void
1419remove_some_program_points_and_update_live_ranges (void)
1420{
1421  unsigned i;
1422  int n;
1423  int *map;
1424  ira_object_t obj;
1425  ira_object_iterator oi;
1426  live_range_t r, prev_r, next_r;
1427  sbitmap born_or_dead, born, dead;
1428  sbitmap_iterator sbi;
1429  bool born_p, dead_p, prev_born_p, prev_dead_p;
1430
1431  born = sbitmap_alloc (ira_max_point);
1432  dead = sbitmap_alloc (ira_max_point);
1433  bitmap_clear (born);
1434  bitmap_clear (dead);
1435  FOR_EACH_OBJECT (obj, oi)
1436    for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1437      {
1438	ira_assert (r->start <= r->finish);
1439	bitmap_set_bit (born, r->start);
1440	bitmap_set_bit (dead, r->finish);
1441      }
1442
1443  born_or_dead = sbitmap_alloc (ira_max_point);
1444  bitmap_ior (born_or_dead, born, dead);
1445  map = (int *) ira_allocate (sizeof (int) * ira_max_point);
1446  n = -1;
1447  prev_born_p = prev_dead_p = false;
1448  EXECUTE_IF_SET_IN_BITMAP (born_or_dead, 0, i, sbi)
1449    {
1450      born_p = bitmap_bit_p (born, i);
1451      dead_p = bitmap_bit_p (dead, i);
1452      if ((prev_born_p && ! prev_dead_p && born_p && ! dead_p)
1453	  || (prev_dead_p && ! prev_born_p && dead_p && ! born_p))
1454	map[i] = n;
1455      else
1456	map[i] = ++n;
1457      prev_born_p = born_p;
1458      prev_dead_p = dead_p;
1459    }
1460  sbitmap_free (born_or_dead);
1461  sbitmap_free (born);
1462  sbitmap_free (dead);
1463  n++;
1464  if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1465    fprintf (ira_dump_file, "Compressing live ranges: from %d to %d - %d%%\n",
1466	     ira_max_point, n, 100 * n / ira_max_point);
1467  ira_max_point = n;
1468
1469  FOR_EACH_OBJECT (obj, oi)
1470    for (r = OBJECT_LIVE_RANGES (obj), prev_r = NULL; r != NULL; r = next_r)
1471      {
1472	next_r = r->next;
1473	r->start = map[r->start];
1474	r->finish = map[r->finish];
1475	if (prev_r == NULL || prev_r->start > r->finish + 1)
1476	  {
1477	    prev_r = r;
1478	    continue;
1479	  }
1480	prev_r->start = r->start;
1481	prev_r->next = next_r;
1482	ira_finish_live_range (r);
1483      }
1484
1485  ira_free (map);
1486}
1487
1488/* Print live ranges R to file F.  */
1489void
1490ira_print_live_range_list (FILE *f, live_range_t r)
1491{
1492  for (; r != NULL; r = r->next)
1493    fprintf (f, " [%d..%d]", r->start, r->finish);
1494  fprintf (f, "\n");
1495}
1496
1497DEBUG_FUNCTION void
1498debug (live_range &ref)
1499{
1500  ira_print_live_range_list (stderr, &ref);
1501}
1502
1503DEBUG_FUNCTION void
1504debug (live_range *ptr)
1505{
1506  if (ptr)
1507    debug (*ptr);
1508  else
1509    fprintf (stderr, "<nil>\n");
1510}
1511
1512/* Print live ranges R to stderr.  */
1513void
1514ira_debug_live_range_list (live_range_t r)
1515{
1516  ira_print_live_range_list (stderr, r);
1517}
1518
1519/* Print live ranges of object OBJ to file F.  */
1520static void
1521print_object_live_ranges (FILE *f, ira_object_t obj)
1522{
1523  ira_print_live_range_list (f, OBJECT_LIVE_RANGES (obj));
1524}
1525
1526/* Print live ranges of allocno A to file F.  */
1527static void
1528print_allocno_live_ranges (FILE *f, ira_allocno_t a)
1529{
1530  int n = ALLOCNO_NUM_OBJECTS (a);
1531  int i;
1532
1533  for (i = 0; i < n; i++)
1534    {
1535      fprintf (f, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1536      if (n > 1)
1537	fprintf (f, " [%d]", i);
1538      fprintf (f, "):");
1539      print_object_live_ranges (f, ALLOCNO_OBJECT (a, i));
1540    }
1541}
1542
1543/* Print live ranges of allocno A to stderr.  */
1544void
1545ira_debug_allocno_live_ranges (ira_allocno_t a)
1546{
1547  print_allocno_live_ranges (stderr, a);
1548}
1549
1550/* Print live ranges of all allocnos to file F.  */
1551static void
1552print_live_ranges (FILE *f)
1553{
1554  ira_allocno_t a;
1555  ira_allocno_iterator ai;
1556
1557  FOR_EACH_ALLOCNO (a, ai)
1558    print_allocno_live_ranges (f, a);
1559}
1560
1561/* Print live ranges of all allocnos to stderr.  */
1562void
1563ira_debug_live_ranges (void)
1564{
1565  print_live_ranges (stderr);
1566}
1567
1568/* The main entry function creates live ranges, set up
1569   CONFLICT_HARD_REGS and TOTAL_CONFLICT_HARD_REGS for objects, and
1570   calculate register pressure info.  */
1571void
1572ira_create_allocno_live_ranges (void)
1573{
1574  objects_live = sparseset_alloc (ira_objects_num);
1575  allocnos_processed = sparseset_alloc (ira_allocnos_num);
1576  curr_point = 0;
1577  last_call_num = 0;
1578  allocno_saved_at_call
1579    = (int *) ira_allocate (ira_allocnos_num * sizeof (int));
1580  memset (allocno_saved_at_call, 0, ira_allocnos_num * sizeof (int));
1581  ira_traverse_loop_tree (true, ira_loop_tree_root, NULL,
1582			  process_bb_node_lives);
1583  ira_max_point = curr_point;
1584  create_start_finish_chains ();
1585  if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1586    print_live_ranges (ira_dump_file);
1587  /* Clean up.  */
1588  ira_free (allocno_saved_at_call);
1589  sparseset_free (objects_live);
1590  sparseset_free (allocnos_processed);
1591}
1592
1593/* Compress allocno live ranges.  */
1594void
1595ira_compress_allocno_live_ranges (void)
1596{
1597  remove_some_program_points_and_update_live_ranges ();
1598  ira_rebuild_start_finish_chains ();
1599  if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1600    {
1601      fprintf (ira_dump_file, "Ranges after the compression:\n");
1602      print_live_ranges (ira_dump_file);
1603    }
1604}
1605
1606/* Free arrays IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES.  */
1607void
1608ira_finish_allocno_live_ranges (void)
1609{
1610  ira_free (ira_finish_point_ranges);
1611  ira_free (ira_start_point_ranges);
1612}
1613