1/* Standard problems for dataflow support routines.
2   Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3   Free Software Foundation, Inc.
4   Originally contributed by Michael P. Hayes
5             (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6   Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7             and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
13Software Foundation; either version 2, or (at your option) any later
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
22along with GCC; see the file COPYING.  If not, write to the Free
23Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2402110-1301, USA.  */
25
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "rtl.h"
31#include "tm_p.h"
32#include "insn-config.h"
33#include "recog.h"
34#include "function.h"
35#include "regs.h"
36#include "output.h"
37#include "alloc-pool.h"
38#include "flags.h"
39#include "hard-reg-set.h"
40#include "basic-block.h"
41#include "sbitmap.h"
42#include "bitmap.h"
43#include "timevar.h"
44#include "df.h"
45#include "vecprim.h"
46#include "except.h"
47
48#if 0
49#define REG_DEAD_DEBUGGING
50#endif
51
52#define DF_SPARSE_THRESHOLD 32
53
54static bitmap seen_in_block = NULL;
55static bitmap seen_in_insn = NULL;
56static void df_ri_dump (struct dataflow *, FILE *);
57
58
59/*----------------------------------------------------------------------------
60   Public functions access functions for the dataflow problems.
61----------------------------------------------------------------------------*/
62
63/* Create a du or ud chain from SRC to DST and link it into SRC.   */
64
65struct df_link *
66df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
67{
68  struct df_link *head = DF_REF_CHAIN (src);
69  struct df_link *link = pool_alloc (dflow->block_pool);;
70
71  DF_REF_CHAIN (src) = link;
72  link->next = head;
73  link->ref = dst;
74  return link;
75}
76
77
78/* Delete a du or ud chain for REF.  If LINK is NULL, delete all
79   chains for ref and check to see if the reverse chains can also be
80   deleted.  If LINK is not NULL it must be a link off of ref.  In
81   this case, the other end is not deleted.  */
82
83void
84df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
85{
86  struct df_link *chain = DF_REF_CHAIN (ref);
87  if (link)
88    {
89      /* Link was the first element in the chain.  */
90      if (chain == link)
91	DF_REF_CHAIN (ref) = link->next;
92      else
93	{
94	  /* Link is an internal element in the chain.  */
95	  struct df_link *prev = chain;
96	  while (chain)
97	    {
98	      if (chain == link)
99		{
100		  prev->next = chain->next;
101		  break;
102		}
103	      prev = chain;
104	      chain = chain->next;
105	    }
106	}
107      pool_free (dflow->block_pool, link);
108    }
109  else
110    {
111      /* If chain is NULL here, it was because of a recursive call
112	 when the other flavor of chains was not built.  Just run thru
113	 the entire chain calling the other side and then deleting the
114	 link.  */
115      while (chain)
116	{
117	  struct df_link *next = chain->next;
118	  /* Delete the other side if it exists.  */
119	  df_chain_unlink (dflow, chain->ref, chain);
120	  chain = next;
121	}
122    }
123}
124
125
126/* Copy the du or ud chain starting at FROM_REF and attach it to
127   TO_REF.  */
128
129void
130df_chain_copy (struct dataflow *dflow,
131	       struct df_ref *to_ref,
132	       struct df_link *from_ref)
133{
134  while (from_ref)
135    {
136      df_chain_create (dflow, to_ref, from_ref->ref);
137      from_ref = from_ref->next;
138    }
139}
140
141
142/* Get the live in set for BB no matter what problem happens to be
143   defined.  */
144
145bitmap
146df_get_live_in (struct df *df, basic_block bb)
147{
148  gcc_assert (df->problems_by_index[DF_LR]);
149
150  if (df->problems_by_index[DF_UREC])
151    return DF_RA_LIVE_IN (df, bb);
152  else if (df->problems_by_index[DF_UR])
153    return DF_LIVE_IN (df, bb);
154  else
155    return DF_UPWARD_LIVE_IN (df, bb);
156}
157
158
159/* Get the live out set for BB no matter what problem happens to be
160   defined.  */
161
162bitmap
163df_get_live_out (struct df *df, basic_block bb)
164{
165  gcc_assert (df->problems_by_index[DF_LR]);
166
167  if (df->problems_by_index[DF_UREC])
168    return DF_RA_LIVE_OUT (df, bb);
169  else if (df->problems_by_index[DF_UR])
170    return DF_LIVE_OUT (df, bb);
171  else
172    return DF_UPWARD_LIVE_OUT (df, bb);
173}
174
175
176/*----------------------------------------------------------------------------
177   Utility functions.
178----------------------------------------------------------------------------*/
179
180/* Generic versions to get the void* version of the block info.  Only
181   used inside the problem instance vectors.  */
182
183/* Grow the bb_info array.  */
184
185void
186df_grow_bb_info (struct dataflow *dflow)
187{
188  unsigned int new_size = last_basic_block + 1;
189  if (dflow->block_info_size < new_size)
190    {
191      new_size += new_size / 4;
192      dflow->block_info = xrealloc (dflow->block_info,
193				    new_size *sizeof (void*));
194      memset (dflow->block_info + dflow->block_info_size, 0,
195	      (new_size - dflow->block_info_size) *sizeof (void *));
196      dflow->block_info_size = new_size;
197    }
198}
199
200/* Dump a def-use or use-def chain for REF to FILE.  */
201
202void
203df_chain_dump (struct df_link *link, FILE *file)
204{
205  fprintf (file, "{ ");
206  for (; link; link = link->next)
207    {
208      fprintf (file, "%c%d(bb %d insn %d) ",
209	       DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
210	       DF_REF_ID (link->ref),
211	       DF_REF_BBNO (link->ref),
212	       DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
213    }
214  fprintf (file, "}");
215}
216
217
218/* Print some basic block info as part of df_dump.  */
219
220void
221df_print_bb_index (basic_block bb, FILE *file)
222{
223  edge e;
224  edge_iterator ei;
225
226  fprintf (file, "( ");
227    FOR_EACH_EDGE (e, ei, bb->preds)
228    {
229      basic_block pred = e->src;
230      fprintf (file, "%d ", pred->index);
231    }
232  fprintf (file, ")->[%d]->( ", bb->index);
233  FOR_EACH_EDGE (e, ei, bb->succs)
234    {
235      basic_block succ = e->dest;
236      fprintf (file, "%d ", succ->index);
237    }
238  fprintf (file, ")\n");
239}
240
241
242/* Return a bitmap for REGNO from the cache MAPS.  The bitmap is to
243   contain COUNT bits starting at START.  These bitmaps are not to be
244   changed since there is a cache of them.  */
245
246static inline bitmap
247df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
248{
249  bitmap ids = maps[regno];
250  if (!ids)
251    {
252      unsigned int i;
253      unsigned int end = start + count;;
254      ids = BITMAP_ALLOC (NULL);
255      maps[regno] = ids;
256      for (i = start; i < end; i++)
257	bitmap_set_bit (ids, i);
258    }
259  return ids;
260}
261
262
263/* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
264   up correctly. */
265
266static void
267df_set_seen (void)
268{
269  seen_in_block = BITMAP_ALLOC (NULL);
270  seen_in_insn = BITMAP_ALLOC (NULL);
271}
272
273
274static void
275df_unset_seen (void)
276{
277  BITMAP_FREE (seen_in_block);
278  BITMAP_FREE (seen_in_insn);
279}
280
281
282
283/*----------------------------------------------------------------------------
284   REACHING USES
285
286   Find the locations in the function where each use site for a pseudo
287   can reach backwards.  In and out bitvectors are built for each basic
288   block.  The id field in the ref is used to index into these sets.
289   See df.h for details.
290
291----------------------------------------------------------------------------*/
292
293/* This problem plays a large number of games for the sake of
294   efficiency.
295
296   1) The order of the bits in the bitvectors.  After the scanning
297   phase, all of the uses are sorted.  All of the uses for the reg 0
298   are first, followed by all uses for reg 1 and so on.
299
300   2) There are two kill sets, one if the number of uses is less or
301   equal to DF_SPARSE_THRESHOLD and another if it is greater.
302
303   <= : There is a bitmap for each register, uses_sites[N], that is
304   built on demand.  This bitvector contains a 1 for each use or reg
305   N.
306
307   > : One level of indirection is used to keep from generating long
308   strings of 1 bits in the kill sets.  Bitvectors that are indexed
309   by the regnum are used to represent that there is a killing def
310   for the register.  The confluence and transfer functions use
311   these along with the bitmap_clear_range call to remove ranges of
312   bits without actually generating a knockout vector.
313
314   The kill and sparse_kill and the dense_invalidated_by_call and
315   sparse_invalidated_by call both play this game.  */
316
317/* Private data used to compute the solution for this problem.  These
318   data structures are not accessible outside of this module.  */
319struct df_ru_problem_data
320{
321
322  bitmap *use_sites;            /* Bitmap of uses for each pseudo.  */
323  unsigned int use_sites_size;  /* Size of use_sites.  */
324  /* The set of defs to regs invalidated by call.  */
325  bitmap sparse_invalidated_by_call;
326  /* The set of defs to regs invalidated by call for ru.  */
327  bitmap dense_invalidated_by_call;
328};
329
330/* Get basic block info.  */
331
332struct df_ru_bb_info *
333df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
334{
335  return (struct df_ru_bb_info *) dflow->block_info[index];
336}
337
338
339/* Set basic block info.  */
340
341static void
342df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
343		   struct df_ru_bb_info *bb_info)
344{
345  dflow->block_info[index] = bb_info;
346}
347
348
349/* Free basic block info.  */
350
351static void
352df_ru_free_bb_info (struct dataflow *dflow,
353		    basic_block bb ATTRIBUTE_UNUSED,
354		    void *vbb_info)
355{
356  struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
357  if (bb_info)
358    {
359      BITMAP_FREE (bb_info->kill);
360      BITMAP_FREE (bb_info->sparse_kill);
361      BITMAP_FREE (bb_info->gen);
362      BITMAP_FREE (bb_info->in);
363      BITMAP_FREE (bb_info->out);
364      pool_free (dflow->block_pool, bb_info);
365    }
366}
367
368
369/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
370   not touched unless the block is new.  */
371
372static void
373df_ru_alloc (struct dataflow *dflow,
374	     bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
375	     bitmap all_blocks)
376{
377  unsigned int bb_index;
378  bitmap_iterator bi;
379  unsigned int reg_size = max_reg_num ();
380
381  if (!dflow->block_pool)
382    dflow->block_pool = create_alloc_pool ("df_ru_block pool",
383					   sizeof (struct df_ru_bb_info), 50);
384
385  if (dflow->problem_data)
386    {
387      unsigned int i;
388      struct df_ru_problem_data *problem_data
389	= (struct df_ru_problem_data *) dflow->problem_data;
390
391      for (i = 0; i < problem_data->use_sites_size; i++)
392	{
393	  bitmap bm = problem_data->use_sites[i];
394	  if (bm)
395	    {
396	      BITMAP_FREE (bm);
397	      problem_data->use_sites[i] = NULL;
398	    }
399	}
400
401      if (problem_data->use_sites_size < reg_size)
402	{
403	  problem_data->use_sites
404	    = xrealloc (problem_data->use_sites, reg_size * sizeof (bitmap));
405	  memset (problem_data->use_sites + problem_data->use_sites_size, 0,
406		  (reg_size - problem_data->use_sites_size) * sizeof (bitmap));
407	  problem_data->use_sites_size = reg_size;
408	}
409
410      bitmap_clear (problem_data->sparse_invalidated_by_call);
411      bitmap_clear (problem_data->dense_invalidated_by_call);
412    }
413  else
414    {
415      struct df_ru_problem_data *problem_data = XNEW (struct df_ru_problem_data);
416      dflow->problem_data = problem_data;
417
418      problem_data->use_sites = XCNEWVEC (bitmap, reg_size);
419      problem_data->use_sites_size = reg_size;
420      problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
421      problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
422    }
423
424  df_grow_bb_info (dflow);
425
426  /* Because of the clustering of all def sites for the same pseudo,
427     we have to process all of the blocks before doing the
428     analysis.  */
429
430  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
431    {
432      struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
433      if (bb_info)
434	{
435	  bitmap_clear (bb_info->kill);
436	  bitmap_clear (bb_info->sparse_kill);
437	  bitmap_clear (bb_info->gen);
438	}
439      else
440	{
441	  bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
442	  df_ru_set_bb_info (dflow, bb_index, bb_info);
443	  bb_info->kill = BITMAP_ALLOC (NULL);
444	  bb_info->sparse_kill = BITMAP_ALLOC (NULL);
445	  bb_info->gen = BITMAP_ALLOC (NULL);
446	  bb_info->in = BITMAP_ALLOC (NULL);
447	  bb_info->out = BITMAP_ALLOC (NULL);
448	}
449    }
450}
451
452
453/* Process a list of DEFs for df_ru_bb_local_compute.  */
454
455static void
456df_ru_bb_local_compute_process_def (struct dataflow *dflow,
457				    struct df_ru_bb_info *bb_info,
458				    struct df_ref *def,
459				    enum df_ref_flags top_flag)
460{
461  struct df *df = dflow->df;
462  while (def)
463    {
464      if ((top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
465	  /* If the def is to only part of the reg, it is as if it did
466	     not happen, since some of the bits may get thru.  */
467	  && (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
468	{
469	  unsigned int regno = DF_REF_REGNO (def);
470	  unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
471	  unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
472	  if (!bitmap_bit_p (seen_in_block, regno))
473	    {
474	      /* The first def for regno in the insn, causes the kill
475		 info to be generated.  Do not modify the gen set
476		 because the only values in it are the uses from here
477		 to the top of the block and this def does not effect
478		 them.  */
479	      if (!bitmap_bit_p (seen_in_insn, regno))
480		{
481		  if (n_uses > DF_SPARSE_THRESHOLD)
482		    bitmap_set_bit (bb_info->sparse_kill, regno);
483		  else
484		    {
485		      struct df_ru_problem_data * problem_data
486			= (struct df_ru_problem_data *)dflow->problem_data;
487		      bitmap uses
488			= df_ref_bitmap (problem_data->use_sites, regno,
489				       begin, n_uses);
490		      bitmap_ior_into (bb_info->kill, uses);
491		    }
492		}
493	      bitmap_set_bit (seen_in_insn, regno);
494	    }
495	}
496      def = def->next_ref;
497    }
498}
499
500
501/* Process a list of USEs for df_ru_bb_local_compute.  */
502
503static void
504df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
505				    struct df_ref *use,
506				    enum df_ref_flags top_flag)
507{
508  while (use)
509    {
510      if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
511	{
512	  /* Add use to set of gens in this BB unless we have seen a
513	     def in a previous instruction.  */
514	  unsigned int regno = DF_REF_REGNO (use);
515	  if (!bitmap_bit_p (seen_in_block, regno))
516	    bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
517	}
518      use = use->next_ref;
519    }
520}
521
522/* Compute local reaching use (upward exposed use) info for basic
523   block BB.  USE_INFO->REGS[R] caches the set of uses for register R.  */
524static void
525df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
526{
527  struct df *df = dflow->df;
528  basic_block bb = BASIC_BLOCK (bb_index);
529  struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
530  rtx insn;
531
532  /* Set when a def for regno is seen.  */
533  bitmap_clear (seen_in_block);
534  bitmap_clear (seen_in_insn);
535
536#ifdef EH_USES
537  /* Variables defined in the prolog that are used by the exception
538     handler.  */
539  df_ru_bb_local_compute_process_use (bb_info,
540				      df_get_artificial_uses (df, bb_index),
541				      DF_REF_AT_TOP);
542#endif
543  df_ru_bb_local_compute_process_def (dflow, bb_info,
544				      df_get_artificial_defs (df, bb_index),
545				      DF_REF_AT_TOP);
546
547  FOR_BB_INSNS (bb, insn)
548    {
549      unsigned int uid = INSN_UID (insn);
550      if (!INSN_P (insn))
551	continue;
552
553      df_ru_bb_local_compute_process_use (bb_info,
554					  DF_INSN_UID_USES (df, uid), 0);
555
556      df_ru_bb_local_compute_process_def (dflow, bb_info,
557					  DF_INSN_UID_DEFS (df, uid), 0);
558
559      bitmap_ior_into (seen_in_block, seen_in_insn);
560      bitmap_clear (seen_in_insn);
561    }
562
563  /* Process the hardware registers that are always live.  */
564  df_ru_bb_local_compute_process_use (bb_info,
565				      df_get_artificial_uses (df, bb_index), 0);
566
567  df_ru_bb_local_compute_process_def (dflow, bb_info,
568				      df_get_artificial_defs (df, bb_index), 0);
569}
570
571
572/* Compute local reaching use (upward exposed use) info for each basic
573   block within BLOCKS.  */
574static void
575df_ru_local_compute (struct dataflow *dflow,
576		     bitmap all_blocks,
577		     bitmap rescan_blocks  ATTRIBUTE_UNUSED)
578{
579  struct df *df = dflow->df;
580  unsigned int bb_index;
581  bitmap_iterator bi;
582  unsigned int regno;
583  struct df_ru_problem_data *problem_data
584    = (struct df_ru_problem_data *) dflow->problem_data;
585  bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
586  bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
587
588  df_set_seen ();
589
590  if (!df->use_info.refs_organized)
591    df_reorganize_refs (&df->use_info);
592
593  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
594    {
595      df_ru_bb_local_compute (dflow, bb_index);
596    }
597
598  /* Set up the knockout bit vectors to be applied across EH_EDGES.  */
599  EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
600    {
601      struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
602      if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
603	bitmap_set_bit (sparse_invalidated, regno);
604      else
605	{
606	  bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
607				       reg_info->begin, reg_info->n_refs);
608	  bitmap_ior_into (dense_invalidated, defs);
609	}
610    }
611
612  df_unset_seen ();
613}
614
615
616/* Initialize the solution bit vectors for problem.  */
617
618static void
619df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
620{
621  unsigned int bb_index;
622  bitmap_iterator bi;
623
624  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
625    {
626      struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
627      bitmap_copy (bb_info->in, bb_info->gen);
628      bitmap_clear (bb_info->out);
629    }
630}
631
632
633/* Out of target gets or of in of source.  */
634
635static void
636df_ru_confluence_n (struct dataflow *dflow, edge e)
637{
638  bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
639  bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
640
641  if (e->flags & EDGE_EH)
642    {
643      struct df_ru_problem_data *problem_data
644	= (struct df_ru_problem_data *) dflow->problem_data;
645      bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
646      bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
647      struct df *df = dflow->df;
648      bitmap_iterator bi;
649      unsigned int regno;
650      bitmap tmp = BITMAP_ALLOC (NULL);
651
652      bitmap_copy (tmp, op2);
653      bitmap_and_compl_into (tmp, dense_invalidated);
654
655      EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
656	{
657 	  bitmap_clear_range (tmp,
658 			      DF_REG_USE_GET (df, regno)->begin,
659 			      DF_REG_USE_GET (df, regno)->n_refs);
660	}
661      bitmap_ior_into (op1, tmp);
662      BITMAP_FREE (tmp);
663    }
664  else
665    bitmap_ior_into (op1, op2);
666}
667
668
669/* Transfer function.  */
670
671static bool
672df_ru_transfer_function (struct dataflow *dflow, int bb_index)
673{
674  struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
675  unsigned int regno;
676  bitmap_iterator bi;
677  bitmap in = bb_info->in;
678  bitmap out = bb_info->out;
679  bitmap gen = bb_info->gen;
680  bitmap kill = bb_info->kill;
681  bitmap sparse_kill = bb_info->sparse_kill;
682
683  if (bitmap_empty_p (sparse_kill))
684    return  bitmap_ior_and_compl (in, gen, out, kill);
685  else
686    {
687      struct df *df = dflow->df;
688      bool changed = false;
689      bitmap tmp = BITMAP_ALLOC (NULL);
690      bitmap_copy (tmp, out);
691      EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
692	{
693	  bitmap_clear_range (tmp,
694 			      DF_REG_USE_GET (df, regno)->begin,
695 			      DF_REG_USE_GET (df, regno)->n_refs);
696	}
697      bitmap_and_compl_into (tmp, kill);
698      bitmap_ior_into (tmp, gen);
699      changed = !bitmap_equal_p (tmp, in);
700      if (changed)
701	{
702	  BITMAP_FREE (in);
703	  bb_info->in = tmp;
704	}
705      else
706	BITMAP_FREE (tmp);
707      return changed;
708    }
709}
710
711
712/* Free all storage associated with the problem.  */
713
714static void
715df_ru_free (struct dataflow *dflow)
716{
717  unsigned int i;
718  struct df_ru_problem_data *problem_data
719    = (struct df_ru_problem_data *) dflow->problem_data;
720
721  if (problem_data)
722    {
723      for (i = 0; i < dflow->block_info_size; i++)
724	{
725	  struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
726	  if (bb_info)
727	    {
728	      BITMAP_FREE (bb_info->kill);
729	      BITMAP_FREE (bb_info->sparse_kill);
730	      BITMAP_FREE (bb_info->gen);
731	      BITMAP_FREE (bb_info->in);
732	      BITMAP_FREE (bb_info->out);
733	    }
734	}
735
736      free_alloc_pool (dflow->block_pool);
737
738      for (i = 0; i < problem_data->use_sites_size; i++)
739	{
740	  bitmap bm = problem_data->use_sites[i];
741	  if (bm)
742	    BITMAP_FREE (bm);
743	}
744
745      free (problem_data->use_sites);
746      BITMAP_FREE (problem_data->sparse_invalidated_by_call);
747      BITMAP_FREE (problem_data->dense_invalidated_by_call);
748
749      dflow->block_info_size = 0;
750      free (dflow->block_info);
751      free (dflow->problem_data);
752    }
753  free (dflow);
754}
755
756
757/* Debugging info.  */
758
759static void
760df_ru_dump (struct dataflow *dflow, FILE *file)
761{
762  basic_block bb;
763  struct df *df = dflow->df;
764  struct df_ru_problem_data *problem_data
765    = (struct df_ru_problem_data *) dflow->problem_data;
766  unsigned int m = max_reg_num ();
767  unsigned int regno;
768
769  if (!dflow->block_info)
770    return;
771
772  fprintf (file, "Reaching uses:\n");
773
774  fprintf (file, "  sparse invalidated \t");
775  dump_bitmap (file, problem_data->sparse_invalidated_by_call);
776  fprintf (file, "  dense invalidated \t");
777  dump_bitmap (file, problem_data->dense_invalidated_by_call);
778
779  for (regno = 0; regno < m; regno++)
780    if (DF_REG_USE_GET (df, regno)->n_refs)
781      fprintf (file, "%d[%d,%d] ", regno,
782	       DF_REG_USE_GET (df, regno)->begin,
783	       DF_REG_USE_GET (df, regno)->n_refs);
784  fprintf (file, "\n");
785
786  FOR_ALL_BB (bb)
787    {
788      struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
789      df_print_bb_index (bb, file);
790
791      if (!bb_info->in)
792	continue;
793
794      fprintf (file, "  in  \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
795      dump_bitmap (file, bb_info->in);
796      fprintf (file, "  gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
797      dump_bitmap (file, bb_info->gen);
798      fprintf (file, "  kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
799      dump_bitmap (file, bb_info->kill);
800      fprintf (file, "  out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
801      dump_bitmap (file, bb_info->out);
802    }
803}
804
805/* All of the information associated with every instance of the problem.  */
806
807static struct df_problem problem_RU =
808{
809  DF_RU,                      /* Problem id.  */
810  DF_BACKWARD,                /* Direction.  */
811  df_ru_alloc,                /* Allocate the problem specific data.  */
812  NULL,                       /* Reset global information.  */
813  df_ru_free_bb_info,         /* Free basic block info.  */
814  df_ru_local_compute,        /* Local compute function.  */
815  df_ru_init_solution,        /* Init the solution specific data.  */
816  df_iterative_dataflow,      /* Iterative solver.  */
817  NULL,                       /* Confluence operator 0.  */
818  df_ru_confluence_n,         /* Confluence operator n.  */
819  df_ru_transfer_function,    /* Transfer function.  */
820  NULL,                       /* Finalize function.  */
821  df_ru_free,                 /* Free all of the problem information.  */
822  df_ru_dump,                 /* Debugging.  */
823  NULL,                       /* Dependent problem.  */
824  0                           /* Changeable flags.  */
825};
826
827
828
829/* Create a new DATAFLOW instance and add it to an existing instance
830   of DF.  The returned structure is what is used to get at the
831   solution.  */
832
833struct dataflow *
834df_ru_add_problem (struct df *df, int flags)
835{
836  return df_add_problem (df, &problem_RU, flags);
837}
838
839
840/*----------------------------------------------------------------------------
841   REACHING DEFINITIONS
842
843   Find the locations in the function where each definition site for a
844   pseudo reaches.  In and out bitvectors are built for each basic
845   block.  The id field in the ref is used to index into these sets.
846   See df.h for details.
847   ----------------------------------------------------------------------------*/
848
849/* See the comment at the top of the Reaching Uses problem for how the
850   uses are represented in the kill sets. The same games are played
851   here for the defs.  */
852
853/* Private data used to compute the solution for this problem.  These
854   data structures are not accessible outside of this module.  */
855struct df_rd_problem_data
856{
857  /* If the number of defs for regnum N is less than
858     DF_SPARSE_THRESHOLD, uses_sites[N] contains a mask of the all of
859     the defs of reg N indexed by the id in the ref structure.  If
860     there are more than DF_SPARSE_THRESHOLD defs for regnum N a
861     different mechanism is used to mask the def.  */
862  bitmap *def_sites;            /* Bitmap of defs for each pseudo.  */
863  unsigned int def_sites_size;  /* Size of def_sites.  */
864  /* The set of defs to regs invalidated by call.  */
865  bitmap sparse_invalidated_by_call;
866  /* The set of defs to regs invalidate by call for rd.  */
867  bitmap dense_invalidated_by_call;
868};
869
870/* Get basic block info.  */
871
872struct df_rd_bb_info *
873df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
874{
875  return (struct df_rd_bb_info *) dflow->block_info[index];
876}
877
878
879/* Set basic block info.  */
880
881static void
882df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
883		   struct df_rd_bb_info *bb_info)
884{
885  dflow->block_info[index] = bb_info;
886}
887
888
889/* Free basic block info.  */
890
891static void
892df_rd_free_bb_info (struct dataflow *dflow,
893		    basic_block bb ATTRIBUTE_UNUSED,
894		    void *vbb_info)
895{
896  struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
897  if (bb_info)
898    {
899      BITMAP_FREE (bb_info->kill);
900      BITMAP_FREE (bb_info->sparse_kill);
901      BITMAP_FREE (bb_info->gen);
902      BITMAP_FREE (bb_info->in);
903      BITMAP_FREE (bb_info->out);
904      pool_free (dflow->block_pool, bb_info);
905    }
906}
907
908
909/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
910   not touched unless the block is new.  */
911
912static void
913df_rd_alloc (struct dataflow *dflow,
914	     bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
915	     bitmap all_blocks)
916{
917  unsigned int bb_index;
918  bitmap_iterator bi;
919  unsigned int reg_size = max_reg_num ();
920
921  if (!dflow->block_pool)
922    dflow->block_pool = create_alloc_pool ("df_rd_block pool",
923					   sizeof (struct df_rd_bb_info), 50);
924
925  if (dflow->problem_data)
926    {
927      unsigned int i;
928      struct df_rd_problem_data *problem_data
929	= (struct df_rd_problem_data *) dflow->problem_data;
930
931      for (i = 0; i < problem_data->def_sites_size; i++)
932	{
933	  bitmap bm = problem_data->def_sites[i];
934	  if (bm)
935	    {
936	      BITMAP_FREE (bm);
937	      problem_data->def_sites[i] = NULL;
938	    }
939	}
940
941      if (problem_data->def_sites_size < reg_size)
942	{
943	  problem_data->def_sites
944	    = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
945	  memset (problem_data->def_sites + problem_data->def_sites_size, 0,
946		  (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
947	  problem_data->def_sites_size = reg_size;
948	}
949
950      bitmap_clear (problem_data->sparse_invalidated_by_call);
951      bitmap_clear (problem_data->dense_invalidated_by_call);
952    }
953  else
954    {
955      struct df_rd_problem_data *problem_data = XNEW (struct df_rd_problem_data);
956      dflow->problem_data = problem_data;
957
958      problem_data->def_sites = XCNEWVEC (bitmap, reg_size);
959      problem_data->def_sites_size = reg_size;
960      problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
961      problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
962    }
963
964  df_grow_bb_info (dflow);
965
966  /* Because of the clustering of all use sites for the same pseudo,
967     we have to process all of the blocks before doing the
968     analysis.  */
969
970  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
971    {
972      struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
973      if (bb_info)
974	{
975	  bitmap_clear (bb_info->kill);
976	  bitmap_clear (bb_info->sparse_kill);
977	  bitmap_clear (bb_info->gen);
978	}
979      else
980	{
981	  bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
982	  df_rd_set_bb_info (dflow, bb_index, bb_info);
983	  bb_info->kill = BITMAP_ALLOC (NULL);
984	  bb_info->sparse_kill = BITMAP_ALLOC (NULL);
985	  bb_info->gen = BITMAP_ALLOC (NULL);
986	  bb_info->in = BITMAP_ALLOC (NULL);
987	  bb_info->out = BITMAP_ALLOC (NULL);
988	}
989    }
990}
991
992
993/* Process a list of DEFs for df_rd_bb_local_compute.  */
994
995static void
996df_rd_bb_local_compute_process_def (struct dataflow *dflow,
997				    struct df_rd_bb_info *bb_info,
998				    struct df_ref *def,
999				    enum df_ref_flags top_flag)
1000{
1001  struct df *df = dflow->df;
1002  while (def)
1003    {
1004      if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
1005	{
1006	  unsigned int regno = DF_REF_REGNO (def);
1007	  unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
1008	  unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
1009
1010	  /* Only the last def(s) for a regno in the block has any
1011	     effect.  */
1012	  if (!bitmap_bit_p (seen_in_block, regno))
1013	    {
1014	      /* The first def for regno in insn gets to knock out the
1015		 defs from other instructions.  */
1016	      if ((!bitmap_bit_p (seen_in_insn, regno))
1017		  /* If the def is to only part of the reg, it does
1018		     not kill the other defs that reach here.  */
1019		  && (!((DF_REF_FLAGS (def) & DF_REF_PARTIAL)
1020			 || (DF_REF_FLAGS (def) & DF_REF_MAY_CLOBBER))))
1021		{
1022		  if (n_defs > DF_SPARSE_THRESHOLD)
1023		    {
1024		      bitmap_set_bit (bb_info->sparse_kill, regno);
1025		      bitmap_clear_range(bb_info->gen, begin, n_defs);
1026		    }
1027		  else
1028		    {
1029		      struct df_rd_problem_data * problem_data
1030			= (struct df_rd_problem_data *)dflow->problem_data;
1031		      bitmap defs = df_ref_bitmap (problem_data->def_sites,
1032						   regno, begin, n_defs);
1033		      bitmap_ior_into (bb_info->kill, defs);
1034		      bitmap_and_compl_into (bb_info->gen, defs);
1035		    }
1036		}
1037
1038	      bitmap_set_bit (seen_in_insn, regno);
1039	      /* All defs for regno in the instruction may be put into
1040		 the gen set.  */
1041	      if (!(DF_REF_FLAGS (def)
1042		     & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
1043		bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
1044	    }
1045	}
1046      def = def->next_ref;
1047    }
1048}
1049
1050/* Compute local reaching def info for basic block BB.  */
1051
1052static void
1053df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1054{
1055  struct df *df = dflow->df;
1056  basic_block bb = BASIC_BLOCK (bb_index);
1057  struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1058  rtx insn;
1059
1060  bitmap_clear (seen_in_block);
1061  bitmap_clear (seen_in_insn);
1062
1063  df_rd_bb_local_compute_process_def (dflow, bb_info,
1064				      df_get_artificial_defs (df, bb_index), 0);
1065
1066  FOR_BB_INSNS_REVERSE (bb, insn)
1067    {
1068      unsigned int uid = INSN_UID (insn);
1069
1070      if (!INSN_P (insn))
1071	continue;
1072
1073      df_rd_bb_local_compute_process_def (dflow, bb_info,
1074					  DF_INSN_UID_DEFS (df, uid), 0);
1075
1076      /* This complex dance with the two bitmaps is required because
1077	 instructions can assign twice to the same pseudo.  This
1078	 generally happens with calls that will have one def for the
1079	 result and another def for the clobber.  If only one vector
1080	 is used and the clobber goes first, the result will be
1081	 lost.  */
1082      bitmap_ior_into (seen_in_block, seen_in_insn);
1083      bitmap_clear (seen_in_insn);
1084    }
1085
1086  /* Process the artificial defs at the top of the block last since we
1087     are going backwards through the block and these are logically at
1088     the start.  */
1089  df_rd_bb_local_compute_process_def (dflow, bb_info,
1090				      df_get_artificial_defs (df, bb_index),
1091				      DF_REF_AT_TOP);
1092}
1093
1094
1095/* Compute local reaching def info for each basic block within BLOCKS.  */
1096
1097static void
1098df_rd_local_compute (struct dataflow *dflow,
1099		     bitmap all_blocks,
1100		     bitmap rescan_blocks  ATTRIBUTE_UNUSED)
1101{
1102  struct df *df = dflow->df;
1103  unsigned int bb_index;
1104  bitmap_iterator bi;
1105  unsigned int regno;
1106  struct df_rd_problem_data *problem_data
1107    = (struct df_rd_problem_data *) dflow->problem_data;
1108  bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1109  bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1110
1111  df_set_seen ();
1112
1113  if (!df->def_info.refs_organized)
1114    df_reorganize_refs (&df->def_info);
1115
1116  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1117    {
1118      df_rd_bb_local_compute (dflow, bb_index);
1119    }
1120
1121  /* Set up the knockout bit vectors to be applied across EH_EDGES.  */
1122  EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
1123    {
1124      struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
1125      if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
1126	{
1127	  bitmap_set_bit (sparse_invalidated, regno);
1128	}
1129      else
1130	{
1131	  bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
1132				       reg_info->begin, reg_info->n_refs);
1133	  bitmap_ior_into (dense_invalidated, defs);
1134	}
1135    }
1136  df_unset_seen ();
1137}
1138
1139
1140/* Initialize the solution bit vectors for problem.  */
1141
1142static void
1143df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
1144{
1145  unsigned int bb_index;
1146  bitmap_iterator bi;
1147
1148  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1149    {
1150      struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1151
1152      bitmap_copy (bb_info->out, bb_info->gen);
1153      bitmap_clear (bb_info->in);
1154    }
1155}
1156
1157/* In of target gets or of out of source.  */
1158
1159static void
1160df_rd_confluence_n (struct dataflow *dflow, edge e)
1161{
1162  bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
1163  bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
1164
1165  if (e->flags & EDGE_EH)
1166    {
1167      struct df_rd_problem_data *problem_data
1168	= (struct df_rd_problem_data *) dflow->problem_data;
1169      bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1170      bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1171      struct df *df = dflow->df;
1172      bitmap_iterator bi;
1173      unsigned int regno;
1174      bitmap tmp = BITMAP_ALLOC (NULL);
1175
1176      bitmap_copy (tmp, op2);
1177      bitmap_and_compl_into (tmp, dense_invalidated);
1178
1179      EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
1180 	{
1181 	  bitmap_clear_range (tmp,
1182 			      DF_REG_DEF_GET (df, regno)->begin,
1183 			      DF_REG_DEF_GET (df, regno)->n_refs);
1184	}
1185      bitmap_ior_into (op1, tmp);
1186      BITMAP_FREE (tmp);
1187    }
1188  else
1189    bitmap_ior_into (op1, op2);
1190}
1191
1192
1193/* Transfer function.  */
1194
1195static bool
1196df_rd_transfer_function (struct dataflow *dflow, int bb_index)
1197{
1198  struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1199  unsigned int regno;
1200  bitmap_iterator bi;
1201  bitmap in = bb_info->in;
1202  bitmap out = bb_info->out;
1203  bitmap gen = bb_info->gen;
1204  bitmap kill = bb_info->kill;
1205  bitmap sparse_kill = bb_info->sparse_kill;
1206
1207  if (bitmap_empty_p (sparse_kill))
1208    return  bitmap_ior_and_compl (out, gen, in, kill);
1209  else
1210    {
1211      struct df *df = dflow->df;
1212      bool changed = false;
1213      bitmap tmp = BITMAP_ALLOC (NULL);
1214      bitmap_copy (tmp, in);
1215      EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
1216	{
1217	  bitmap_clear_range (tmp,
1218			      DF_REG_DEF_GET (df, regno)->begin,
1219			      DF_REG_DEF_GET (df, regno)->n_refs);
1220	}
1221      bitmap_and_compl_into (tmp, kill);
1222      bitmap_ior_into (tmp, gen);
1223      changed = !bitmap_equal_p (tmp, out);
1224      if (changed)
1225	{
1226	  BITMAP_FREE (out);
1227	  bb_info->out = tmp;
1228	}
1229      else
1230	  BITMAP_FREE (tmp);
1231      return changed;
1232    }
1233}
1234
1235
1236/* Free all storage associated with the problem.  */
1237
1238static void
1239df_rd_free (struct dataflow *dflow)
1240{
1241  unsigned int i;
1242  struct df_rd_problem_data *problem_data
1243    = (struct df_rd_problem_data *) dflow->problem_data;
1244
1245  if (problem_data)
1246    {
1247      for (i = 0; i < dflow->block_info_size; i++)
1248	{
1249	  struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
1250	  if (bb_info)
1251	    {
1252	      BITMAP_FREE (bb_info->kill);
1253	      BITMAP_FREE (bb_info->sparse_kill);
1254	      BITMAP_FREE (bb_info->gen);
1255	      BITMAP_FREE (bb_info->in);
1256	      BITMAP_FREE (bb_info->out);
1257	    }
1258	}
1259
1260      free_alloc_pool (dflow->block_pool);
1261
1262      for (i = 0; i < problem_data->def_sites_size; i++)
1263	{
1264	  bitmap bm = problem_data->def_sites[i];
1265	  if (bm)
1266	    BITMAP_FREE (bm);
1267	}
1268
1269      free (problem_data->def_sites);
1270      BITMAP_FREE (problem_data->sparse_invalidated_by_call);
1271      BITMAP_FREE (problem_data->dense_invalidated_by_call);
1272
1273      dflow->block_info_size = 0;
1274      free (dflow->block_info);
1275      free (dflow->problem_data);
1276    }
1277  free (dflow);
1278}
1279
1280
1281/* Debugging info.  */
1282
1283static void
1284df_rd_dump (struct dataflow *dflow, FILE *file)
1285{
1286  struct df *df = dflow->df;
1287  basic_block bb;
1288  struct df_rd_problem_data *problem_data
1289    = (struct df_rd_problem_data *) dflow->problem_data;
1290  unsigned int m = max_reg_num ();
1291  unsigned int regno;
1292
1293  if (!dflow->block_info)
1294    return;
1295
1296  fprintf (file, "Reaching defs:\n\n");
1297
1298  fprintf (file, "  sparse invalidated \t");
1299  dump_bitmap (file, problem_data->sparse_invalidated_by_call);
1300  fprintf (file, "  dense invalidated \t");
1301  dump_bitmap (file, problem_data->dense_invalidated_by_call);
1302
1303  for (regno = 0; regno < m; regno++)
1304    if (DF_REG_DEF_GET (df, regno)->n_refs)
1305      fprintf (file, "%d[%d,%d] ", regno,
1306	       DF_REG_DEF_GET (df, regno)->begin,
1307	       DF_REG_DEF_GET (df, regno)->n_refs);
1308  fprintf (file, "\n");
1309
1310  FOR_ALL_BB (bb)
1311    {
1312      struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
1313      df_print_bb_index (bb, file);
1314
1315      if (!bb_info->in)
1316	continue;
1317
1318      fprintf (file, "  in  \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
1319      dump_bitmap (file, bb_info->in);
1320      fprintf (file, "  gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
1321      dump_bitmap (file, bb_info->gen);
1322      fprintf (file, "  kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
1323      dump_bitmap (file, bb_info->kill);
1324      fprintf (file, "  out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
1325      dump_bitmap (file, bb_info->out);
1326    }
1327}
1328
1329/* All of the information associated with every instance of the problem.  */
1330
1331static struct df_problem problem_RD =
1332{
1333  DF_RD,                      /* Problem id.  */
1334  DF_FORWARD,                 /* Direction.  */
1335  df_rd_alloc,                /* Allocate the problem specific data.  */
1336  NULL,                       /* Reset global information.  */
1337  df_rd_free_bb_info,         /* Free basic block info.  */
1338  df_rd_local_compute,        /* Local compute function.  */
1339  df_rd_init_solution,        /* Init the solution specific data.  */
1340  df_iterative_dataflow,      /* Iterative solver.  */
1341  NULL,                       /* Confluence operator 0.  */
1342  df_rd_confluence_n,         /* Confluence operator n.  */
1343  df_rd_transfer_function,    /* Transfer function.  */
1344  NULL,                       /* Finalize function.  */
1345  df_rd_free,                 /* Free all of the problem information.  */
1346  df_rd_dump,                 /* Debugging.  */
1347  NULL,                       /* Dependent problem.  */
1348  0                           /* Changeable flags.  */
1349};
1350
1351
1352
1353/* Create a new DATAFLOW instance and add it to an existing instance
1354   of DF.  The returned structure is what is used to get at the
1355   solution.  */
1356
1357struct dataflow *
1358df_rd_add_problem (struct df *df, int flags)
1359{
1360  return df_add_problem (df, &problem_RD, flags);
1361}
1362
1363
1364
1365/*----------------------------------------------------------------------------
1366   LIVE REGISTERS
1367
1368   Find the locations in the function where any use of a pseudo can
1369   reach in the backwards direction.  In and out bitvectors are built
1370   for each basic block.  The regnum is used to index into these sets.
1371   See df.h for details.
1372   ----------------------------------------------------------------------------*/
1373
1374/* Get basic block info.  */
1375
1376struct df_lr_bb_info *
1377df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
1378{
1379  return (struct df_lr_bb_info *) dflow->block_info[index];
1380}
1381
1382
1383/* Set basic block info.  */
1384
1385static void
1386df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
1387		   struct df_lr_bb_info *bb_info)
1388{
1389  dflow->block_info[index] = bb_info;
1390}
1391
1392
1393/* Free basic block info.  */
1394
1395static void
1396df_lr_free_bb_info (struct dataflow *dflow,
1397		    basic_block bb ATTRIBUTE_UNUSED,
1398		    void *vbb_info)
1399{
1400  struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
1401  if (bb_info)
1402    {
1403      BITMAP_FREE (bb_info->use);
1404      BITMAP_FREE (bb_info->def);
1405      BITMAP_FREE (bb_info->in);
1406      BITMAP_FREE (bb_info->out);
1407      pool_free (dflow->block_pool, bb_info);
1408    }
1409}
1410
1411
1412/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1413   not touched unless the block is new.  */
1414
1415static void
1416df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
1417	     bitmap all_blocks ATTRIBUTE_UNUSED)
1418{
1419  unsigned int bb_index;
1420  bitmap_iterator bi;
1421
1422  if (!dflow->block_pool)
1423    dflow->block_pool = create_alloc_pool ("df_lr_block pool",
1424					   sizeof (struct df_lr_bb_info), 50);
1425
1426  df_grow_bb_info (dflow);
1427
1428  EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1429    {
1430      struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1431      if (bb_info)
1432	{
1433	  bitmap_clear (bb_info->def);
1434	  bitmap_clear (bb_info->use);
1435	}
1436      else
1437	{
1438	  bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
1439	  df_lr_set_bb_info (dflow, bb_index, bb_info);
1440	  bb_info->use = BITMAP_ALLOC (NULL);
1441	  bb_info->def = BITMAP_ALLOC (NULL);
1442	  bb_info->in = BITMAP_ALLOC (NULL);
1443	  bb_info->out = BITMAP_ALLOC (NULL);
1444	}
1445    }
1446}
1447
1448
1449/* Compute local live register info for basic block BB.  */
1450
1451static void
1452df_lr_bb_local_compute (struct dataflow *dflow,
1453			struct df *df, unsigned int bb_index)
1454{
1455  basic_block bb = BASIC_BLOCK (bb_index);
1456  struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1457  rtx insn;
1458  struct df_ref *def;
1459  struct df_ref *use;
1460
1461  /* Process the registers set in an exception handler.  */
1462  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1463    if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
1464	&& (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
1465      {
1466	unsigned int dregno = DF_REF_REGNO (def);
1467	bitmap_set_bit (bb_info->def, dregno);
1468	bitmap_clear_bit (bb_info->use, dregno);
1469      }
1470
1471  /* Process the hardware registers that are always live.  */
1472  for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1473    /* Add use to set of uses in this BB.  */
1474    if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
1475      bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1476
1477  FOR_BB_INSNS_REVERSE (bb, insn)
1478    {
1479      unsigned int uid = INSN_UID (insn);
1480
1481      if (!INSN_P (insn))
1482	continue;
1483
1484      if (CALL_P (insn))
1485	{
1486	  for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
1487	    {
1488	      unsigned int dregno = DF_REF_REGNO (def);
1489
1490	      if (dregno >= FIRST_PSEUDO_REGISTER
1491		  || !(SIBLING_CALL_P (insn)
1492		       && bitmap_bit_p (df->exit_block_uses, dregno)
1493		       && !refers_to_regno_p (dregno, dregno+1,
1494					      current_function_return_rtx,
1495					      (rtx *)0)))
1496		{
1497		  /* If the def is to only part of the reg, it does
1498		     not kill the other defs that reach here.  */
1499		  if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
1500		    {
1501		      bitmap_set_bit (bb_info->def, dregno);
1502		      bitmap_clear_bit (bb_info->use, dregno);
1503		    }
1504		}
1505	    }
1506	}
1507      else
1508	{
1509	  for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
1510	    {
1511	      unsigned int dregno = DF_REF_REGNO (def);
1512
1513	      if (DF_INSN_CONTAINS_ASM (df, insn)
1514		  && dregno < FIRST_PSEUDO_REGISTER)
1515		{
1516		  unsigned int i;
1517		  unsigned int end = dregno
1518		    + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
1519		  for (i = dregno; i <= end; ++i)
1520		    regs_asm_clobbered[i] = 1;
1521		}
1522	      /* If the def is to only part of the reg, it does
1523		     not kill the other defs that reach here.  */
1524	      if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
1525		{
1526		  bitmap_set_bit (bb_info->def, dregno);
1527		  bitmap_clear_bit (bb_info->use, dregno);
1528		}
1529	    }
1530	}
1531
1532      for (use = DF_INSN_UID_USES (df, uid); use; use = use->next_ref)
1533	/* Add use to set of uses in this BB.  */
1534	bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1535    }
1536
1537  /* Process the registers set in an exception handler.  */
1538  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1539    if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
1540	&& (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
1541      {
1542	unsigned int dregno = DF_REF_REGNO (def);
1543	bitmap_set_bit (bb_info->def, dregno);
1544	bitmap_clear_bit (bb_info->use, dregno);
1545      }
1546
1547#ifdef EH_USES
1548  /* Process the uses that are live into an exception handler.  */
1549  for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1550    /* Add use to set of uses in this BB.  */
1551    if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
1552      bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1553#endif
1554}
1555
1556
1557/* Compute local live register info for each basic block within BLOCKS.  */
1558
1559static void
1560df_lr_local_compute (struct dataflow *dflow,
1561		     bitmap all_blocks,
1562		     bitmap rescan_blocks)
1563{
1564  struct df *df = dflow->df;
1565  unsigned int bb_index;
1566  bitmap_iterator bi;
1567
1568  /* Assume that the stack pointer is unchanging if alloca hasn't
1569     been used.  */
1570  if (bitmap_equal_p (all_blocks, rescan_blocks))
1571    memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
1572
1573  bitmap_clear (df->hardware_regs_used);
1574
1575  /* The all-important stack pointer must always be live.  */
1576  bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
1577
1578  /* Before reload, there are a few registers that must be forced
1579     live everywhere -- which might not already be the case for
1580     blocks within infinite loops.  */
1581  if (!reload_completed)
1582    {
1583      /* Any reference to any pseudo before reload is a potential
1584	 reference of the frame pointer.  */
1585      bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
1586
1587#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1588      /* Pseudos with argument area equivalences may require
1589	 reloading via the argument pointer.  */
1590      if (fixed_regs[ARG_POINTER_REGNUM])
1591	bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
1592#endif
1593
1594      /* Any constant, or pseudo with constant equivalences, may
1595	 require reloading from memory using the pic register.  */
1596      if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1597	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1598	bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
1599    }
1600
1601  if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
1602    {
1603      /* The exit block is special for this problem and its bits are
1604	 computed from thin air.  */
1605      struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
1606      bitmap_copy (bb_info->use, df->exit_block_uses);
1607    }
1608
1609  EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1610    {
1611      if (bb_index == EXIT_BLOCK)
1612	continue;
1613      df_lr_bb_local_compute (dflow, df, bb_index);
1614    }
1615}
1616
1617
1618/* Initialize the solution vectors.  */
1619
1620static void
1621df_lr_init (struct dataflow *dflow, bitmap all_blocks)
1622{
1623  unsigned int bb_index;
1624  bitmap_iterator bi;
1625
1626  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1627    {
1628      struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1629      bitmap_copy (bb_info->in, bb_info->use);
1630      bitmap_clear (bb_info->out);
1631    }
1632}
1633
1634
1635/* Confluence function that processes infinite loops.  This might be a
1636   noreturn function that throws.  And even if it isn't, getting the
1637   unwind info right helps debugging.  */
1638static void
1639df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
1640{
1641  struct df *df = dflow->df;
1642
1643  bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
1644  if (bb != EXIT_BLOCK_PTR)
1645    bitmap_copy (op1, df->hardware_regs_used);
1646}
1647
1648
1649/* Confluence function that ignores fake edges.  */
1650
1651static void
1652df_lr_confluence_n (struct dataflow *dflow, edge e)
1653{
1654  bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
1655  bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
1656
1657  /* Call-clobbered registers die across exception and call edges.  */
1658  /* ??? Abnormal call edges ignored for the moment, as this gets
1659     confused by sibling call edges, which crashes reg-stack.  */
1660  if (e->flags & EDGE_EH)
1661    bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
1662  else
1663    bitmap_ior_into (op1, op2);
1664
1665  bitmap_ior_into (op1, dflow->df->hardware_regs_used);
1666}
1667
1668
1669/* Transfer function.  */
1670
1671static bool
1672df_lr_transfer_function (struct dataflow *dflow, int bb_index)
1673{
1674  struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1675  bitmap in = bb_info->in;
1676  bitmap out = bb_info->out;
1677  bitmap use = bb_info->use;
1678  bitmap def = bb_info->def;
1679
1680  return bitmap_ior_and_compl (in, use, out, def);
1681}
1682
1683
1684/* Free all storage associated with the problem.  */
1685
1686static void
1687df_lr_free (struct dataflow *dflow)
1688{
1689  if (dflow->block_info)
1690    {
1691      unsigned int i;
1692      for (i = 0; i < dflow->block_info_size; i++)
1693	{
1694	  struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
1695	  if (bb_info)
1696	    {
1697	      BITMAP_FREE (bb_info->use);
1698	      BITMAP_FREE (bb_info->def);
1699	      BITMAP_FREE (bb_info->in);
1700	      BITMAP_FREE (bb_info->out);
1701	    }
1702	}
1703      free_alloc_pool (dflow->block_pool);
1704
1705      dflow->block_info_size = 0;
1706      free (dflow->block_info);
1707    }
1708
1709  free (dflow->problem_data);
1710  free (dflow);
1711}
1712
1713
1714/* Debugging info.  */
1715
1716static void
1717df_lr_dump (struct dataflow *dflow, FILE *file)
1718{
1719  basic_block bb;
1720
1721  if (!dflow->block_info)
1722    return;
1723
1724  fprintf (file, "Live Registers:\n");
1725  FOR_ALL_BB (bb)
1726    {
1727      struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
1728      df_print_bb_index (bb, file);
1729
1730      if (!bb_info->in)
1731	continue;
1732
1733      fprintf (file, "  in  \t");
1734      dump_bitmap (file, bb_info->in);
1735      fprintf (file, "  use \t");
1736      dump_bitmap (file, bb_info->use);
1737      fprintf (file, "  def \t");
1738      dump_bitmap (file, bb_info->def);
1739      fprintf (file, "  out \t");
1740      dump_bitmap (file, bb_info->out);
1741    }
1742}
1743
1744/* All of the information associated with every instance of the problem.  */
1745
1746static struct df_problem problem_LR =
1747{
1748  DF_LR,                      /* Problem id.  */
1749  DF_BACKWARD,                /* Direction.  */
1750  df_lr_alloc,                /* Allocate the problem specific data.  */
1751  NULL,                       /* Reset global information.  */
1752  df_lr_free_bb_info,         /* Free basic block info.  */
1753  df_lr_local_compute,        /* Local compute function.  */
1754  df_lr_init,                 /* Init the solution specific data.  */
1755  df_iterative_dataflow,      /* Iterative solver.  */
1756  df_lr_confluence_0,         /* Confluence operator 0.  */
1757  df_lr_confluence_n,         /* Confluence operator n.  */
1758  df_lr_transfer_function,    /* Transfer function.  */
1759  NULL,                       /* Finalize function.  */
1760  df_lr_free,                 /* Free all of the problem information.  */
1761  df_lr_dump,                 /* Debugging.  */
1762  NULL,                       /* Dependent problem.  */
1763  0
1764};
1765
1766
1767/* Create a new DATAFLOW instance and add it to an existing instance
1768   of DF.  The returned structure is what is used to get at the
1769   solution.  */
1770
1771struct dataflow *
1772df_lr_add_problem (struct df *df, int flags)
1773{
1774  return df_add_problem (df, &problem_LR, flags);
1775}
1776
1777
1778
1779/*----------------------------------------------------------------------------
1780   UNINITIALIZED REGISTERS
1781
1782   Find the set of uses for registers that are reachable from the entry
1783   block without passing thru a definition.  In and out bitvectors are built
1784   for each basic block.  The regnum is used to index into these sets.
1785   See df.h for details.
1786----------------------------------------------------------------------------*/
1787
1788/* Get basic block info.  */
1789
1790struct df_ur_bb_info *
1791df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
1792{
1793  return (struct df_ur_bb_info *) dflow->block_info[index];
1794}
1795
1796
1797/* Set basic block info.  */
1798
1799static void
1800df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
1801		   struct df_ur_bb_info *bb_info)
1802{
1803  dflow->block_info[index] = bb_info;
1804}
1805
1806
1807/* Free basic block info.  */
1808
1809static void
1810df_ur_free_bb_info (struct dataflow *dflow,
1811		    basic_block bb ATTRIBUTE_UNUSED,
1812		    void *vbb_info)
1813{
1814  struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
1815  if (bb_info)
1816    {
1817      BITMAP_FREE (bb_info->gen);
1818      BITMAP_FREE (bb_info->kill);
1819      BITMAP_FREE (bb_info->in);
1820      BITMAP_FREE (bb_info->out);
1821      pool_free (dflow->block_pool, bb_info);
1822    }
1823}
1824
1825
1826/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1827   not touched unless the block is new.  */
1828
1829static void
1830df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
1831	     bitmap all_blocks ATTRIBUTE_UNUSED)
1832{
1833  unsigned int bb_index;
1834  bitmap_iterator bi;
1835
1836  if (!dflow->block_pool)
1837    dflow->block_pool = create_alloc_pool ("df_ur_block pool",
1838					   sizeof (struct df_ur_bb_info), 100);
1839
1840  df_grow_bb_info (dflow);
1841
1842  EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1843    {
1844      struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1845      if (bb_info)
1846	{
1847	  bitmap_clear (bb_info->kill);
1848	  bitmap_clear (bb_info->gen);
1849	}
1850      else
1851	{
1852	  bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
1853	  df_ur_set_bb_info (dflow, bb_index, bb_info);
1854	  bb_info->kill = BITMAP_ALLOC (NULL);
1855	  bb_info->gen = BITMAP_ALLOC (NULL);
1856	  bb_info->in = BITMAP_ALLOC (NULL);
1857	  bb_info->out = BITMAP_ALLOC (NULL);
1858	}
1859    }
1860}
1861
1862
1863/* Compute local uninitialized register info for basic block BB.  */
1864
1865static void
1866df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1867{
1868  struct df *df = dflow->df;
1869  basic_block bb = BASIC_BLOCK (bb_index);
1870  struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1871  rtx insn;
1872  struct df_ref *def;
1873
1874  bitmap_clear (seen_in_block);
1875  bitmap_clear (seen_in_insn);
1876
1877  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1878    if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
1879      {
1880	unsigned int regno = DF_REF_REGNO (def);
1881	if (!bitmap_bit_p (seen_in_block, regno))
1882	  {
1883	    bitmap_set_bit (seen_in_block, regno);
1884	    bitmap_set_bit (bb_info->gen, regno);
1885	  }
1886      }
1887
1888  FOR_BB_INSNS_REVERSE (bb, insn)
1889    {
1890      unsigned int uid = INSN_UID (insn);
1891      if (!INSN_P (insn))
1892	continue;
1893
1894      for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
1895	{
1896	  unsigned int regno = DF_REF_REGNO (def);
1897	  /* Only the last def counts.  */
1898	  if (!bitmap_bit_p (seen_in_block, regno))
1899	    {
1900	      bitmap_set_bit (seen_in_insn, regno);
1901
1902	      if (DF_REF_FLAGS (def)
1903		  & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
1904		{
1905		  /* Only must clobbers for the entire reg destroy the
1906		     value.  */
1907		  if ((DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
1908		      && (!DF_REF_FLAGS (def) & DF_REF_PARTIAL))
1909		    bitmap_set_bit (bb_info->kill, regno);
1910		}
1911	      else
1912		bitmap_set_bit (bb_info->gen, regno);
1913	    }
1914	}
1915      bitmap_ior_into (seen_in_block, seen_in_insn);
1916      bitmap_clear (seen_in_insn);
1917    }
1918
1919  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1920    if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
1921      {
1922	unsigned int regno = DF_REF_REGNO (def);
1923	if (!bitmap_bit_p (seen_in_block, regno))
1924	  {
1925	    bitmap_set_bit (seen_in_block, regno);
1926	    bitmap_set_bit (bb_info->gen, regno);
1927	  }
1928      }
1929}
1930
1931
1932/* Compute local uninitialized register info.  */
1933
1934static void
1935df_ur_local_compute (struct dataflow *dflow,
1936		     bitmap all_blocks ATTRIBUTE_UNUSED,
1937		     bitmap rescan_blocks)
1938{
1939  unsigned int bb_index;
1940  bitmap_iterator bi;
1941
1942  df_set_seen ();
1943
1944  EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1945    {
1946      df_ur_bb_local_compute (dflow, bb_index);
1947    }
1948
1949  df_unset_seen ();
1950}
1951
1952
1953/* Initialize the solution vectors.  */
1954
1955static void
1956df_ur_init (struct dataflow *dflow, bitmap all_blocks)
1957{
1958  unsigned int bb_index;
1959  bitmap_iterator bi;
1960
1961  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1962    {
1963      struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1964
1965      bitmap_copy (bb_info->out, bb_info->gen);
1966      bitmap_clear (bb_info->in);
1967    }
1968}
1969
1970
1971/* Or in the stack regs, hard regs and early clobber regs into the the
1972   ur_in sets of all of the blocks.  */
1973
1974static void
1975df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
1976{
1977  struct df *df = dflow->df;
1978  struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
1979  bitmap tmp = BITMAP_ALLOC (NULL);
1980  bitmap_iterator bi;
1981  unsigned int bb_index;
1982
1983  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1984    {
1985      struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1986      struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
1987
1988      /* No register may reach a location where it is not used.  Thus
1989	 we trim the rr result to the places where it is used.  */
1990      bitmap_and_into (bb_info->in, bb_lr_info->in);
1991      bitmap_and_into (bb_info->out, bb_lr_info->out);
1992
1993#if 1
1994      /* Hard registers may still stick in the ur_out set, but not
1995	 be in the ur_in set, if their only mention was in a call
1996	 in this block.  This is because a call kills in the lr
1997	 problem but does not kill in the ur problem.  To clean
1998	 this up, we execute the transfer function on the lr_in
1999	 set and then use that to knock bits out of ur_out.  */
2000      bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2001			    bb_info->kill);
2002      bitmap_and_into (bb_info->out, tmp);
2003#endif
2004    }
2005
2006  BITMAP_FREE (tmp);
2007}
2008
2009
2010/* Confluence function that ignores fake edges.  */
2011
2012static void
2013df_ur_confluence_n (struct dataflow *dflow, edge e)
2014{
2015  bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
2016  bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
2017
2018  if (e->flags & EDGE_FAKE)
2019    return;
2020
2021  bitmap_ior_into (op1, op2);
2022}
2023
2024
2025/* Transfer function.  */
2026
2027static bool
2028df_ur_transfer_function (struct dataflow *dflow, int bb_index)
2029{
2030  struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
2031  bitmap in = bb_info->in;
2032  bitmap out = bb_info->out;
2033  bitmap gen = bb_info->gen;
2034  bitmap kill = bb_info->kill;
2035
2036  return bitmap_ior_and_compl (out, gen, in, kill);
2037}
2038
2039
2040/* Free all storage associated with the problem.  */
2041
2042static void
2043df_ur_free (struct dataflow *dflow)
2044{
2045  if (dflow->block_info)
2046    {
2047      unsigned int i;
2048
2049      for (i = 0; i < dflow->block_info_size; i++)
2050	{
2051	  struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
2052	  if (bb_info)
2053	    {
2054	      BITMAP_FREE (bb_info->gen);
2055	      BITMAP_FREE (bb_info->kill);
2056	      BITMAP_FREE (bb_info->in);
2057	      BITMAP_FREE (bb_info->out);
2058	    }
2059	}
2060
2061      free_alloc_pool (dflow->block_pool);
2062      dflow->block_info_size = 0;
2063      free (dflow->block_info);
2064    }
2065  free (dflow);
2066}
2067
2068
2069/* Debugging info.  */
2070
2071static void
2072df_ur_dump (struct dataflow *dflow, FILE *file)
2073{
2074  basic_block bb;
2075
2076  if (!dflow->block_info)
2077    return;
2078
2079  fprintf (file, "Undefined regs:\n");
2080
2081  FOR_ALL_BB (bb)
2082    {
2083      struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
2084      df_print_bb_index (bb, file);
2085
2086      if (!bb_info->in)
2087	continue;
2088
2089      fprintf (file, "  in  \t");
2090      dump_bitmap (file, bb_info->in);
2091      fprintf (file, "  gen \t");
2092      dump_bitmap (file, bb_info->gen);
2093      fprintf (file, "  kill\t");
2094      dump_bitmap (file, bb_info->kill);
2095      fprintf (file, "  out \t");
2096      dump_bitmap (file, bb_info->out);
2097    }
2098}
2099
2100/* All of the information associated with every instance of the problem.  */
2101
2102static struct df_problem problem_UR =
2103{
2104  DF_UR,                      /* Problem id.  */
2105  DF_FORWARD,                 /* Direction.  */
2106  df_ur_alloc,                /* Allocate the problem specific data.  */
2107  NULL,                       /* Reset global information.  */
2108  df_ur_free_bb_info,         /* Free basic block info.  */
2109  df_ur_local_compute,        /* Local compute function.  */
2110  df_ur_init,                 /* Init the solution specific data.  */
2111  df_iterative_dataflow,      /* Iterative solver.  */
2112  NULL,                       /* Confluence operator 0.  */
2113  df_ur_confluence_n,         /* Confluence operator n.  */
2114  df_ur_transfer_function,    /* Transfer function.  */
2115  df_ur_local_finalize,       /* Finalize function.  */
2116  df_ur_free,                 /* Free all of the problem information.  */
2117  df_ur_dump,                 /* Debugging.  */
2118  df_lr_add_problem,          /* Dependent problem.  */
2119  0                           /* Changeable flags.  */
2120};
2121
2122
2123/* Create a new DATAFLOW instance and add it to an existing instance
2124   of DF.  The returned structure is what is used to get at the
2125   solution.  */
2126
2127struct dataflow *
2128df_ur_add_problem (struct df *df, int flags)
2129{
2130  return df_add_problem (df, &problem_UR, flags);
2131}
2132
2133
2134
2135/*----------------------------------------------------------------------------
2136   UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2137
2138   Find the set of uses for registers that are reachable from the entry
2139   block without passing thru a definition.  In and out bitvectors are built
2140   for each basic block.  The regnum is used to index into these sets.
2141   See df.h for details.
2142
2143   This is a variant of the UR problem above that has a lot of special
2144   features just for the register allocation phase.  This problem
2145   should go a away if someone would fix the interference graph.
2146
2147   ----------------------------------------------------------------------------*/
2148
2149/* Private data used to compute the solution for this problem.  These
2150   data structures are not accessible outside of this module.  */
2151struct df_urec_problem_data
2152{
2153  bool earlyclobbers_found;     /* True if any instruction contains an
2154				   earlyclobber.  */
2155#ifdef STACK_REGS
2156  bitmap stack_regs;		/* Registers that may be allocated to a STACK_REGS.  */
2157#endif
2158};
2159
2160
2161/* Get basic block info.  */
2162
2163struct df_urec_bb_info *
2164df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
2165{
2166  return (struct df_urec_bb_info *) dflow->block_info[index];
2167}
2168
2169
2170/* Set basic block info.  */
2171
2172static void
2173df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
2174		   struct df_urec_bb_info *bb_info)
2175{
2176  dflow->block_info[index] = bb_info;
2177}
2178
2179
2180/* Free basic block info.  */
2181
2182static void
2183df_urec_free_bb_info (struct dataflow *dflow,
2184		      basic_block bb ATTRIBUTE_UNUSED,
2185		      void *vbb_info)
2186{
2187  struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
2188  if (bb_info)
2189    {
2190      BITMAP_FREE (bb_info->gen);
2191      BITMAP_FREE (bb_info->kill);
2192      BITMAP_FREE (bb_info->in);
2193      BITMAP_FREE (bb_info->out);
2194      BITMAP_FREE (bb_info->earlyclobber);
2195      pool_free (dflow->block_pool, bb_info);
2196    }
2197}
2198
2199
2200/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2201   not touched unless the block is new.  */
2202
2203static void
2204df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
2205	       bitmap all_blocks ATTRIBUTE_UNUSED)
2206
2207{
2208  unsigned int bb_index;
2209  bitmap_iterator bi;
2210  struct df_urec_problem_data *problem_data
2211    = (struct df_urec_problem_data *) dflow->problem_data;
2212
2213  if (!dflow->block_pool)
2214    dflow->block_pool = create_alloc_pool ("df_urec_block pool",
2215					   sizeof (struct df_urec_bb_info), 50);
2216
2217  if (!dflow->problem_data)
2218    {
2219      problem_data = XNEW (struct df_urec_problem_data);
2220      dflow->problem_data = problem_data;
2221    }
2222  problem_data->earlyclobbers_found = false;
2223
2224  df_grow_bb_info (dflow);
2225
2226  EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
2227    {
2228      struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2229      if (bb_info)
2230	{
2231	  bitmap_clear (bb_info->kill);
2232	  bitmap_clear (bb_info->gen);
2233	  bitmap_clear (bb_info->earlyclobber);
2234	}
2235      else
2236	{
2237	  bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
2238	  df_urec_set_bb_info (dflow, bb_index, bb_info);
2239	  bb_info->kill = BITMAP_ALLOC (NULL);
2240	  bb_info->gen = BITMAP_ALLOC (NULL);
2241	  bb_info->in = BITMAP_ALLOC (NULL);
2242	  bb_info->out = BITMAP_ALLOC (NULL);
2243	  bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2244	}
2245    }
2246}
2247
2248
2249/* The function modifies local info for register REG being changed in
2250   SETTER.  DATA is used to pass the current basic block info.  */
2251
2252static void
2253df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
2254{
2255  int regno;
2256  int endregno;
2257  int i;
2258  struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2259
2260  if (GET_CODE (reg) == SUBREG)
2261    reg = SUBREG_REG (reg);
2262
2263  if (!REG_P (reg))
2264    return;
2265
2266
2267  endregno = regno = REGNO (reg);
2268  if (regno < FIRST_PSEUDO_REGISTER)
2269    {
2270      endregno +=hard_regno_nregs[regno][GET_MODE (reg)];
2271
2272      for (i = regno; i < endregno; i++)
2273	{
2274	  bitmap_set_bit (bb_info->kill, i);
2275
2276	  if (GET_CODE (setter) != CLOBBER)
2277	    bitmap_set_bit (bb_info->gen, i);
2278	  else
2279	    bitmap_clear_bit (bb_info->gen, i);
2280	}
2281    }
2282  else
2283    {
2284      bitmap_set_bit (bb_info->kill, regno);
2285
2286      if (GET_CODE (setter) != CLOBBER)
2287	bitmap_set_bit (bb_info->gen, regno);
2288      else
2289	bitmap_clear_bit (bb_info->gen, regno);
2290    }
2291}
2292/* Classes of registers which could be early clobbered in the current
2293   insn.  */
2294
2295static VEC(int,heap) *earlyclobber_regclass;
2296
2297/* This function finds and stores register classes that could be early
2298   clobbered in INSN.  If any earlyclobber classes are found, the function
2299   returns TRUE, in all other cases it returns FALSE.  */
2300
2301static bool
2302df_urec_check_earlyclobber (rtx insn)
2303{
2304  int opno;
2305  bool found = false;
2306
2307  extract_insn (insn);
2308
2309  VEC_truncate (int, earlyclobber_regclass, 0);
2310  for (opno = 0; opno < recog_data.n_operands; opno++)
2311    {
2312      char c;
2313      bool amp_p;
2314      int i;
2315      enum reg_class class;
2316      const char *p = recog_data.constraints[opno];
2317
2318      class = NO_REGS;
2319      amp_p = false;
2320      for (;;)
2321	{
2322	  c = *p;
2323	  switch (c)
2324	    {
2325	    case '=':  case '+':  case '?':
2326	    case '#':  case '!':
2327	    case '*':  case '%':
2328	    case 'm':  case '<':  case '>':  case 'V':  case 'o':
2329	    case 'E':  case 'F':  case 'G':  case 'H':
2330	    case 's':  case 'i':  case 'n':
2331	    case 'I':  case 'J':  case 'K':  case 'L':
2332	    case 'M':  case 'N':  case 'O':  case 'P':
2333	    case 'X':
2334	    case '0': case '1':  case '2':  case '3':  case '4':
2335	    case '5': case '6':  case '7':  case '8':  case '9':
2336	      /* These don't say anything we care about.  */
2337	      break;
2338
2339	    case '&':
2340	      amp_p = true;
2341	      break;
2342	    case '\0':
2343	    case ',':
2344	      if (amp_p && class != NO_REGS)
2345		{
2346		  int rc;
2347
2348		  found = true;
2349		  for (i = 0;
2350		       VEC_iterate (int, earlyclobber_regclass, i, rc);
2351		       i++)
2352		    {
2353		      if (rc == (int) class)
2354			goto found_rc;
2355		    }
2356
2357		  /* We use VEC_quick_push here because
2358		     earlyclobber_regclass holds no more than
2359		     N_REG_CLASSES elements. */
2360		  VEC_quick_push (int, earlyclobber_regclass, (int) class);
2361		found_rc:
2362		  ;
2363		}
2364
2365	      amp_p = false;
2366	      class = NO_REGS;
2367	      break;
2368
2369	    case 'r':
2370	      class = GENERAL_REGS;
2371	      break;
2372
2373	    default:
2374	      class = REG_CLASS_FROM_CONSTRAINT (c, p);
2375	      break;
2376	    }
2377	  if (c == '\0')
2378	    break;
2379	  p += CONSTRAINT_LEN (c, p);
2380	}
2381    }
2382
2383  return found;
2384}
2385
2386/* The function checks that pseudo-register *X has a class
2387   intersecting with the class of pseudo-register could be early
2388   clobbered in the same insn.
2389
2390   This function is a no-op if earlyclobber_regclass is empty.
2391
2392   Reload can assign the same hard register to uninitialized
2393   pseudo-register and early clobbered pseudo-register in an insn if
2394   the pseudo-register is used first time in given BB and not lived at
2395   the BB start.  To prevent this we don't change life information for
2396   such pseudo-registers.  */
2397
2398static int
2399df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
2400{
2401  enum reg_class pref_class, alt_class;
2402  int i, regno;
2403  struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2404
2405  if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2406    {
2407      int rc;
2408
2409      regno = REGNO (*x);
2410      if (bitmap_bit_p (bb_info->kill, regno)
2411	  || bitmap_bit_p (bb_info->gen, regno))
2412	return 0;
2413      pref_class = reg_preferred_class (regno);
2414      alt_class = reg_alternate_class (regno);
2415      for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2416	{
2417	  if (reg_classes_intersect_p (rc, pref_class)
2418	      || (rc != NO_REGS
2419		  && reg_classes_intersect_p (rc, alt_class)))
2420	    {
2421	      bitmap_set_bit (bb_info->earlyclobber, regno);
2422	      break;
2423	    }
2424	}
2425    }
2426  return 0;
2427}
2428
2429/* The function processes all pseudo-registers in *X with the aid of
2430   previous function.  */
2431
2432static void
2433df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2434{
2435  for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
2436}
2437
2438
2439/* Compute local uninitialized register info for basic block BB.  */
2440
2441static void
2442df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
2443{
2444  struct df *df = dflow->df;
2445  basic_block bb = BASIC_BLOCK (bb_index);
2446  struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2447  rtx insn;
2448  struct df_ref *def;
2449
2450  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2451    if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2452      {
2453	unsigned int regno = DF_REF_REGNO (def);
2454	bitmap_set_bit (bb_info->gen, regno);
2455      }
2456
2457  FOR_BB_INSNS (bb, insn)
2458    {
2459      if (INSN_P (insn))
2460	{
2461	  note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
2462	  if (df_urec_check_earlyclobber (insn))
2463	    {
2464	      struct df_urec_problem_data *problem_data
2465		= (struct df_urec_problem_data *) dflow->problem_data;
2466	      problem_data->earlyclobbers_found = true;
2467	      note_uses (&PATTERN (insn),
2468			 df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
2469	    }
2470	}
2471    }
2472
2473  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2474    if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
2475      {
2476	unsigned int regno = DF_REF_REGNO (def);
2477	bitmap_set_bit (bb_info->gen, regno);
2478      }
2479
2480}
2481
2482
2483/* Compute local uninitialized register info.  */
2484
2485static void
2486df_urec_local_compute (struct dataflow *dflow,
2487		     bitmap all_blocks ATTRIBUTE_UNUSED,
2488		     bitmap rescan_blocks)
2489{
2490  unsigned int bb_index;
2491  bitmap_iterator bi;
2492#ifdef STACK_REGS
2493  int i;
2494  HARD_REG_SET zero, stack_hard_regs, used;
2495  struct df_urec_problem_data *problem_data
2496    = (struct df_urec_problem_data *) dflow->problem_data;
2497
2498  /* Any register that MAY be allocated to a register stack (like the
2499     387) is treated poorly.  Each such register is marked as being
2500     live everywhere.  This keeps the register allocator and the
2501     subsequent passes from doing anything useful with these values.
2502
2503     FIXME: This seems like an incredibly poor idea.  */
2504
2505  CLEAR_HARD_REG_SET (zero);
2506  CLEAR_HARD_REG_SET (stack_hard_regs);
2507  for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2508    SET_HARD_REG_BIT (stack_hard_regs, i);
2509  problem_data->stack_regs = BITMAP_ALLOC (NULL);
2510  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2511    {
2512      COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2513      IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2514      AND_HARD_REG_SET (used, stack_hard_regs);
2515      GO_IF_HARD_REG_EQUAL (used, zero, skip);
2516      bitmap_set_bit (problem_data->stack_regs, i);
2517    skip:
2518      ;
2519    }
2520#endif
2521
2522  /* We know that earlyclobber_regclass holds no more than
2523    N_REG_CLASSES elements.  See df_urec_check_earlyclobber.  */
2524  earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2525
2526  EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
2527    {
2528      df_urec_bb_local_compute (dflow, bb_index);
2529    }
2530
2531  VEC_free (int, heap, earlyclobber_regclass);
2532}
2533
2534
2535/* Initialize the solution vectors.  */
2536
2537static void
2538df_urec_init (struct dataflow *dflow, bitmap all_blocks)
2539{
2540  unsigned int bb_index;
2541  bitmap_iterator bi;
2542
2543  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2544    {
2545      struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2546
2547      bitmap_copy (bb_info->out, bb_info->gen);
2548      bitmap_clear (bb_info->in);
2549    }
2550}
2551
2552
2553/* Or in the stack regs, hard regs and early clobber regs into the the
2554   ur_in sets of all of the blocks.  */
2555
2556static void
2557df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
2558{
2559  struct df *df = dflow->df;
2560  struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
2561  bitmap tmp = BITMAP_ALLOC (NULL);
2562  bitmap_iterator bi;
2563  unsigned int bb_index;
2564  struct df_urec_problem_data *problem_data
2565    = (struct df_urec_problem_data *) dflow->problem_data;
2566
2567  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2568    {
2569      struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2570      struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
2571
2572      if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
2573	{
2574	  if (problem_data->earlyclobbers_found)
2575	    bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
2576
2577#ifdef STACK_REGS
2578	  /* We can not use the same stack register for uninitialized
2579	     pseudo-register and another living pseudo-register
2580	     because if the uninitialized pseudo-register dies,
2581	     subsequent pass reg-stack will be confused (it will
2582	     believe that the other register dies).  */
2583	  bitmap_ior_into (bb_info->in, problem_data->stack_regs);
2584	  bitmap_ior_into (bb_info->out, problem_data->stack_regs);
2585#endif
2586	}
2587
2588      /* No register may reach a location where it is not used.  Thus
2589	 we trim the rr result to the places where it is used.  */
2590      bitmap_and_into (bb_info->in, bb_lr_info->in);
2591      bitmap_and_into (bb_info->out, bb_lr_info->out);
2592
2593#if 1
2594      /* Hard registers may still stick in the ur_out set, but not
2595	 be in the ur_in set, if their only mention was in a call
2596	 in this block.  This is because a call kills in the lr
2597	 problem but does not kill in the rr problem.  To clean
2598	 this up, we execute the transfer function on the lr_in
2599	 set and then use that to knock bits out of ur_out.  */
2600      bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2601			    bb_info->kill);
2602      bitmap_and_into (bb_info->out, tmp);
2603#endif
2604    }
2605
2606#ifdef STACK_REGS
2607  BITMAP_FREE (problem_data->stack_regs);
2608#endif
2609  BITMAP_FREE (tmp);
2610}
2611
2612
2613/* Confluence function that ignores fake edges.  */
2614
2615static void
2616df_urec_confluence_n (struct dataflow *dflow, edge e)
2617{
2618  bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
2619  bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
2620
2621  if (e->flags & EDGE_FAKE)
2622    return;
2623
2624  bitmap_ior_into (op1, op2);
2625}
2626
2627
2628/* Transfer function.  */
2629
2630static bool
2631df_urec_transfer_function (struct dataflow *dflow, int bb_index)
2632{
2633  struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2634  bitmap in = bb_info->in;
2635  bitmap out = bb_info->out;
2636  bitmap gen = bb_info->gen;
2637  bitmap kill = bb_info->kill;
2638
2639  return bitmap_ior_and_compl (out, gen, in, kill);
2640}
2641
2642
2643/* Free all storage associated with the problem.  */
2644
2645static void
2646df_urec_free (struct dataflow *dflow)
2647{
2648  if (dflow->block_info)
2649    {
2650      unsigned int i;
2651
2652      for (i = 0; i < dflow->block_info_size; i++)
2653	{
2654	  struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
2655	  if (bb_info)
2656	    {
2657	      BITMAP_FREE (bb_info->gen);
2658	      BITMAP_FREE (bb_info->kill);
2659	      BITMAP_FREE (bb_info->in);
2660	      BITMAP_FREE (bb_info->out);
2661	      BITMAP_FREE (bb_info->earlyclobber);
2662	    }
2663	}
2664
2665      free_alloc_pool (dflow->block_pool);
2666
2667      dflow->block_info_size = 0;
2668      free (dflow->block_info);
2669      free (dflow->problem_data);
2670    }
2671  free (dflow);
2672}
2673
2674
2675/* Debugging info.  */
2676
2677static void
2678df_urec_dump (struct dataflow *dflow, FILE *file)
2679{
2680  basic_block bb;
2681
2682  if (!dflow->block_info)
2683    return;
2684
2685  fprintf (file, "Undefined regs:\n");
2686
2687  FOR_ALL_BB (bb)
2688    {
2689      struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
2690      df_print_bb_index (bb, file);
2691
2692      if (!bb_info->in)
2693	continue;
2694
2695      fprintf (file, "  in  \t");
2696      dump_bitmap (file, bb_info->in);
2697      fprintf (file, "  gen \t");
2698      dump_bitmap (file, bb_info->gen);
2699      fprintf (file, "  kill\t");
2700      dump_bitmap (file, bb_info->kill);
2701      fprintf (file, "  ec\t");
2702      dump_bitmap (file, bb_info->earlyclobber);
2703      fprintf (file, "  out \t");
2704      dump_bitmap (file, bb_info->out);
2705    }
2706}
2707
2708/* All of the information associated with every instance of the problem.  */
2709
2710static struct df_problem problem_UREC =
2711{
2712  DF_UREC,                    /* Problem id.  */
2713  DF_FORWARD,                 /* Direction.  */
2714  df_urec_alloc,              /* Allocate the problem specific data.  */
2715  NULL,                       /* Reset global information.  */
2716  df_urec_free_bb_info,       /* Free basic block info.  */
2717  df_urec_local_compute,      /* Local compute function.  */
2718  df_urec_init,               /* Init the solution specific data.  */
2719  df_iterative_dataflow,      /* Iterative solver.  */
2720  NULL,                       /* Confluence operator 0.  */
2721  df_urec_confluence_n,       /* Confluence operator n.  */
2722  df_urec_transfer_function,  /* Transfer function.  */
2723  df_urec_local_finalize,     /* Finalize function.  */
2724  df_urec_free,               /* Free all of the problem information.  */
2725  df_urec_dump,               /* Debugging.  */
2726  df_lr_add_problem,          /* Dependent problem.  */
2727  0                           /* Changeable flags.  */
2728};
2729
2730
2731/* Create a new DATAFLOW instance and add it to an existing instance
2732   of DF.  The returned structure is what is used to get at the
2733   solution.  */
2734
2735struct dataflow *
2736df_urec_add_problem (struct df *df, int flags)
2737{
2738  return df_add_problem (df, &problem_UREC, flags);
2739}
2740
2741
2742
2743/*----------------------------------------------------------------------------
2744   CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2745
2746   Link either the defs to the uses and / or the uses to the defs.
2747
2748   These problems are set up like the other dataflow problems so that
2749   they nicely fit into the framework.  They are much simpler and only
2750   involve a single traversal of instructions and an examination of
2751   the reaching defs information (the dependent problem).
2752----------------------------------------------------------------------------*/
2753
2754/* Create def-use or use-def chains.  */
2755
2756static void
2757df_chain_alloc (struct dataflow *dflow,
2758		bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
2759		bitmap all_blocks ATTRIBUTE_UNUSED)
2760
2761{
2762  struct df *df = dflow->df;
2763  unsigned int i;
2764
2765  /* Wholesale destruction of the old chains.  */
2766  if (dflow->block_pool)
2767    free_alloc_pool (dflow->block_pool);
2768
2769  dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
2770					 sizeof (struct df_link), 100);
2771
2772  if (dflow->flags & DF_DU_CHAIN)
2773    {
2774      if (!df->def_info.refs_organized)
2775	df_reorganize_refs (&df->def_info);
2776
2777      /* Clear out the pointers from the refs.  */
2778      for (i = 0; i < DF_DEFS_SIZE (df); i++)
2779	{
2780	  struct df_ref *ref = df->def_info.refs[i];
2781	  DF_REF_CHAIN (ref) = NULL;
2782	}
2783    }
2784
2785  if (dflow->flags & DF_UD_CHAIN)
2786    {
2787      if (!df->use_info.refs_organized)
2788	df_reorganize_refs (&df->use_info);
2789      for (i = 0; i < DF_USES_SIZE (df); i++)
2790	{
2791	  struct df_ref *ref = df->use_info.refs[i];
2792	  DF_REF_CHAIN (ref) = NULL;
2793	}
2794    }
2795}
2796
2797
2798/* Reset all def_use and use_def chains in INSN.  */
2799
2800static void
2801df_chain_insn_reset (struct dataflow *dflow, rtx insn)
2802{
2803  struct df *df = dflow->df;
2804  unsigned int uid = INSN_UID (insn);
2805  struct df_insn_info *insn_info = NULL;
2806  struct df_ref *ref;
2807
2808  if (uid < df->insns_size)
2809    insn_info = DF_INSN_UID_GET (df, uid);
2810
2811  if (insn_info)
2812    {
2813      if (dflow->flags & DF_DU_CHAIN)
2814	{
2815	  ref = insn_info->defs;
2816	  while (ref)
2817	    {
2818	      ref->chain = NULL;
2819	      ref = ref->next_ref;
2820	    }
2821	}
2822
2823      if (dflow->flags & DF_UD_CHAIN)
2824	{
2825	  ref = insn_info->uses;
2826	  while (ref)
2827	    {
2828	      ref->chain = NULL;
2829	      ref = ref->next_ref;
2830	    }
2831	}
2832    }
2833}
2834
2835
2836/* Reset all def_use and use_def chains in basic block.  */
2837
2838static void
2839df_chain_bb_reset (struct dataflow *dflow, unsigned int bb_index)
2840{
2841  struct df *df = dflow->df;
2842  rtx insn;
2843  basic_block bb = BASIC_BLOCK (bb_index);
2844
2845  /* Some one deleted the basic block out from under us.  */
2846  if (!bb)
2847    return;
2848
2849  FOR_BB_INSNS (bb, insn)
2850    {
2851      if (INSN_P (insn))
2852	{
2853	  /* Record defs within INSN.  */
2854	  df_chain_insn_reset (dflow, insn);
2855	}
2856    }
2857
2858  /* Get rid of any chains in artificial uses or defs.  */
2859  if (dflow->flags & DF_DU_CHAIN)
2860    {
2861      struct df_ref *def;
2862      def = df_get_artificial_defs (df, bb_index);
2863      while (def)
2864	{
2865	  def->chain = NULL;
2866	  def = def->next_ref;
2867	}
2868    }
2869
2870  if (dflow->flags & DF_UD_CHAIN)
2871    {
2872      struct df_ref *use;
2873      use = df_get_artificial_uses (df, bb_index);
2874      while (use)
2875	{
2876	  use->chain = NULL;
2877	  use = use->next_ref;
2878	}
2879    }
2880}
2881
2882
2883/* Reset all of the chains when the set of basic blocks changes.  */
2884
2885
2886static void
2887df_chain_reset (struct dataflow *dflow, bitmap blocks_to_clear)
2888{
2889  bitmap_iterator bi;
2890  unsigned int bb_index;
2891
2892  EXECUTE_IF_SET_IN_BITMAP (blocks_to_clear, 0, bb_index, bi)
2893    {
2894      df_chain_bb_reset (dflow, bb_index);
2895    }
2896
2897  free_alloc_pool (dflow->block_pool);
2898  dflow->block_pool = NULL;
2899}
2900
2901
2902/* Create the chains for a list of USEs.  */
2903
2904static void
2905df_chain_create_bb_process_use (struct dataflow *dflow,
2906				bitmap local_rd,
2907				struct df_ref *use,
2908				enum df_ref_flags top_flag)
2909{
2910  struct df *df = dflow->df;
2911  bitmap_iterator bi;
2912  unsigned int def_index;
2913
2914  while (use)
2915    {
2916      /* Do not want to go through this for an uninitialized var.  */
2917      unsigned int uregno = DF_REF_REGNO (use);
2918      int count = DF_REG_DEF_GET (df, uregno)->n_refs;
2919      if (count)
2920	{
2921	  if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2922	    {
2923	      unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
2924	      unsigned int last_index = first_index + count - 1;
2925
2926	      EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2927		{
2928		  struct df_ref *def;
2929		  if (def_index > last_index)
2930		    break;
2931
2932		  def = DF_DEFS_GET (df, def_index);
2933		  if (dflow->flags & DF_DU_CHAIN)
2934		    df_chain_create (dflow, def, use);
2935		  if (dflow->flags & DF_UD_CHAIN)
2936		    df_chain_create (dflow, use, def);
2937		}
2938	    }
2939	}
2940      use = use->next_ref;
2941    }
2942}
2943
2944/* Reset the storage pool that the def-use or use-def chains have been
2945   allocated in. We do not need to re adjust the pointers in the refs,
2946   these have already been clean out.*/
2947
2948/* Create chains from reaching defs bitmaps for basic block BB.  */
2949static void
2950df_chain_create_bb (struct dataflow *dflow,
2951		    struct dataflow *rd_dflow,
2952		    unsigned int bb_index)
2953{
2954  basic_block bb = BASIC_BLOCK (bb_index);
2955  struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
2956  rtx insn;
2957  bitmap cpy = BITMAP_ALLOC (NULL);
2958  struct df *df = dflow->df;
2959  struct df_ref *def;
2960
2961  bitmap_copy (cpy, bb_info->in);
2962
2963  /* Since we are going forwards, process the artificial uses first
2964     then the artificial defs second.  */
2965
2966#ifdef EH_USES
2967  /* Create the chains for the artificial uses from the EH_USES at the
2968     beginning of the block.  */
2969  df_chain_create_bb_process_use (dflow, cpy,
2970				  df_get_artificial_uses (df, bb->index),
2971				  DF_REF_AT_TOP);
2972#endif
2973
2974  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2975    if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2976      {
2977	unsigned int dregno = DF_REF_REGNO (def);
2978	if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
2979	  bitmap_clear_range (cpy,
2980			      DF_REG_DEF_GET (df, dregno)->begin,
2981			      DF_REG_DEF_GET (df, dregno)->n_refs);
2982	bitmap_set_bit (cpy, DF_REF_ID (def));
2983      }
2984
2985  /* Process the regular instructions next.  */
2986  FOR_BB_INSNS (bb, insn)
2987    {
2988      struct df_ref *def;
2989      unsigned int uid = INSN_UID (insn);
2990
2991      if (!INSN_P (insn))
2992	continue;
2993
2994      /* Now scan the uses and link them up with the defs that remain
2995	 in the cpy vector.  */
2996
2997      df_chain_create_bb_process_use (dflow, cpy,
2998				     DF_INSN_UID_USES (df, uid), 0);
2999
3000      /* Since we are going forwards, process the defs second.  This
3001         pass only changes the bits in cpy.  */
3002      for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
3003	{
3004	  unsigned int dregno = DF_REF_REGNO (def);
3005	  if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
3006	    bitmap_clear_range (cpy,
3007				DF_REG_DEF_GET (df, dregno)->begin,
3008				DF_REG_DEF_GET (df, dregno)->n_refs);
3009	  if (!(DF_REF_FLAGS (def)
3010		 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3011	    bitmap_set_bit (cpy, DF_REF_ID (def));
3012	}
3013    }
3014
3015  /* Create the chains for the artificial uses of the hard registers
3016     at the end of the block.  */
3017  df_chain_create_bb_process_use (dflow, cpy,
3018				  df_get_artificial_uses (df, bb->index), 0);
3019}
3020
3021/* Create def-use chains from reaching use bitmaps for basic blocks
3022   in BLOCKS.  */
3023
3024static void
3025df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
3026{
3027  unsigned int bb_index;
3028  bitmap_iterator bi;
3029  struct df *df = dflow->df;
3030  struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
3031
3032  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
3033    {
3034      df_chain_create_bb (dflow, rd_dflow, bb_index);
3035    }
3036}
3037
3038
3039/* Free all storage associated with the problem.  */
3040
3041static void
3042df_chain_free (struct dataflow *dflow)
3043{
3044  free_alloc_pool (dflow->block_pool);
3045  free (dflow);
3046}
3047
3048
3049/* Debugging info.  */
3050
3051static void
3052df_chains_dump (struct dataflow *dflow, FILE *file)
3053{
3054  struct df *df = dflow->df;
3055  unsigned int j;
3056
3057  if (dflow->flags & DF_DU_CHAIN)
3058    {
3059      fprintf (file, "Def-use chains:\n");
3060      for (j = 0; j < df->def_info.bitmap_size; j++)
3061	{
3062	  struct df_ref *def = DF_DEFS_GET (df, j);
3063	  if (def)
3064	    {
3065	      fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3066		       j, DF_REF_BBNO (def),
3067		       DF_REF_INSN (def) ?
3068		       DF_INSN_LUID (df, DF_REF_INSN (def)):
3069		       -1,
3070		       DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
3071		       DF_REF_REGNO (def));
3072	      if (def->flags & DF_REF_READ_WRITE)
3073		fprintf (file, "read/write ");
3074	      df_chain_dump (DF_REF_CHAIN (def), file);
3075	      fprintf (file, "\n");
3076	    }
3077	}
3078    }
3079
3080  if (dflow->flags & DF_UD_CHAIN)
3081    {
3082      fprintf (file, "Use-def chains:\n");
3083      for (j = 0; j < df->use_info.bitmap_size; j++)
3084	{
3085	  struct df_ref *use = DF_USES_GET (df, j);
3086	  if (use)
3087	    {
3088	      fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3089		       j, DF_REF_BBNO (use),
3090		       DF_REF_INSN (use) ?
3091		       DF_INSN_LUID (df, DF_REF_INSN (use))
3092		       : -1,
3093		       DF_REF_INSN (DF_USES_GET (df, j)) ?
3094		       DF_REF_INSN_UID (DF_USES_GET (df,j))
3095		       : -1,
3096		       DF_REF_REGNO (use));
3097	      if (use->flags & DF_REF_READ_WRITE)
3098		fprintf (file, "read/write ");
3099	      if (use->flags & DF_REF_STRIPPED)
3100		fprintf (file, "stripped ");
3101	      if (use->flags & DF_REF_IN_NOTE)
3102		fprintf (file, "note ");
3103	      df_chain_dump (DF_REF_CHAIN (use), file);
3104	      fprintf (file, "\n");
3105	    }
3106	}
3107    }
3108}
3109
3110
3111static struct df_problem problem_CHAIN =
3112{
3113  DF_CHAIN,                   /* Problem id.  */
3114  DF_NONE,                    /* Direction.  */
3115  df_chain_alloc,             /* Allocate the problem specific data.  */
3116  df_chain_reset,             /* Reset global information.  */
3117  NULL,                       /* Free basic block info.  */
3118  NULL,                       /* Local compute function.  */
3119  NULL,                       /* Init the solution specific data.  */
3120  NULL,                       /* Iterative solver.  */
3121  NULL,                       /* Confluence operator 0.  */
3122  NULL,                       /* Confluence operator n.  */
3123  NULL,                       /* Transfer function.  */
3124  df_chain_finalize,          /* Finalize function.  */
3125  df_chain_free,              /* Free all of the problem information.  */
3126  df_chains_dump,             /* Debugging.  */
3127  df_rd_add_problem,          /* Dependent problem.  */
3128  0                           /* Changeable flags.  */
3129};
3130
3131
3132/* Create a new DATAFLOW instance and add it to an existing instance
3133   of DF.  The returned structure is what is used to get at the
3134   solution.  */
3135
3136struct dataflow *
3137df_chain_add_problem (struct df *df, int flags)
3138{
3139  return df_add_problem (df, &problem_CHAIN, flags);
3140}
3141
3142
3143/*----------------------------------------------------------------------------
3144   REGISTER INFORMATION
3145
3146   This pass properly computes REG_DEAD and REG_UNUSED notes.
3147
3148   If the DF_RI_LIFE flag is set the following vectors containing
3149   information about register usage are properly set: REG_N_REFS,
3150   REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH, REG_N_CALLS_CROSSED,
3151   REG_N_THROWING_CALLS_CROSSED and REG_BASIC_BLOCK.
3152
3153   ----------------------------------------------------------------------------*/
3154
3155#ifdef REG_DEAD_DEBUGGING
3156static void
3157print_note (char *prefix, rtx insn, rtx note)
3158{
3159  fprintf (stderr, "%s %d ", prefix, INSN_UID (insn));
3160  print_rtl (stderr, note);
3161  fprintf (stderr, "\n");
3162}
3163#endif
3164
3165/* Allocate the lifetime information.  */
3166
3167static void
3168df_ri_alloc (struct dataflow *dflow,
3169	     bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
3170	     bitmap all_blocks ATTRIBUTE_UNUSED)
3171{
3172  int i;
3173  struct df *df = dflow->df;
3174
3175  if (dflow->flags & DF_RI_LIFE)
3176    {
3177      max_regno = max_reg_num ();
3178      allocate_reg_info (max_regno, FALSE, FALSE);
3179
3180      /* Reset all the data we'll collect.  */
3181      for (i = 0; i < max_regno; i++)
3182	{
3183	  REG_N_SETS (i) = DF_REG_DEF_COUNT (df, i);
3184	  REG_N_REFS (i) = DF_REG_USE_COUNT (df, i) + REG_N_SETS (i);
3185	  REG_N_DEATHS (i) = 0;
3186	  REG_N_CALLS_CROSSED (i) = 0;
3187	  REG_N_THROWING_CALLS_CROSSED (i) = 0;
3188	  REG_LIVE_LENGTH (i) = 0;
3189	  REG_FREQ (i) = 0;
3190	  REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3191	}
3192    }
3193}
3194
3195
3196/* After reg-stack, the x86 floating point stack regs are difficult to
3197   analyze because of all of the pushes, pops and rotations.  Thus, we
3198   just leave the notes alone. */
3199
3200static inline bool
3201df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
3202{
3203#ifdef STACK_REGS
3204  return (regstack_completed
3205	  && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG));
3206#else
3207  return false;
3208#endif
3209}
3210
3211
3212/* Remove all of the REG_DEAD or REG_UNUSED notes from INSN.  */
3213
3214static void
3215df_kill_notes (rtx insn, int flags)
3216{
3217  rtx *pprev = &REG_NOTES (insn);
3218  rtx link = *pprev;
3219
3220  while (link)
3221    {
3222      switch (REG_NOTE_KIND (link))
3223	{
3224	case REG_DEAD:
3225	  if (flags & DF_RI_LIFE)
3226	    if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
3227	      REG_N_DEATHS (REGNO (XEXP (link, 0)))++;
3228
3229	  /* Fallthru */
3230	case REG_UNUSED:
3231	  if (!df_ignore_stack_reg (REGNO (XEXP (link, 0))))
3232	    {
3233	      rtx next = XEXP (link, 1);
3234#ifdef REG_DEAD_DEBUGGING
3235	      print_note ("deleting: ", insn, link);
3236#endif
3237	      free_EXPR_LIST_node (link);
3238	      *pprev = link = next;
3239	    }
3240	  break;
3241
3242	default:
3243	  pprev = &XEXP (link, 1);
3244	  link = *pprev;
3245	  break;
3246	}
3247    }
3248}
3249
3250
3251/* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
3252   based on the bits in LIVE.  Do not generate notes for registers in
3253   artificial uses.  DO_NOT_GEN is updated so that REG_DEAD notes are
3254   not generated if the reg is both read and written by the
3255   instruction.
3256*/
3257
3258static void
3259df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
3260			    bitmap live, bitmap do_not_gen,
3261			    bitmap artificial_uses, int flags)
3262{
3263  bool all_dead = true;
3264  struct df_link *regs = mws->regs;
3265  unsigned int regno = DF_REF_REGNO (regs->ref);
3266
3267#ifdef REG_DEAD_DEBUGGING
3268  fprintf (stderr, "mw unused looking at %d\n", DF_REF_REGNO (regs->ref));
3269  df_ref_debug (regs->ref, stderr);
3270#endif
3271  while (regs)
3272    {
3273      unsigned int regno = DF_REF_REGNO (regs->ref);
3274      if ((bitmap_bit_p (live, regno))
3275	  || bitmap_bit_p (artificial_uses, regno))
3276	{
3277	  all_dead = false;
3278	  break;
3279	}
3280      regs = regs->next;
3281    }
3282
3283  if (all_dead)
3284    {
3285      struct df_link *regs = mws->regs;
3286      rtx note = alloc_EXPR_LIST (REG_UNUSED, *DF_REF_LOC (regs->ref),
3287				  REG_NOTES (insn));
3288      REG_NOTES (insn) = note;
3289#ifdef REG_DEAD_DEBUGGING
3290      print_note ("adding 1: ", insn, note);
3291#endif
3292      bitmap_set_bit (do_not_gen, regno);
3293      /* Only do this if the value is totally dead.  */
3294      if (flags & DF_RI_LIFE)
3295	{
3296	  REG_N_DEATHS (regno) ++;
3297	  REG_LIVE_LENGTH (regno)++;
3298	}
3299    }
3300  else
3301    {
3302      struct df_link *regs = mws->regs;
3303      while (regs)
3304	{
3305	  struct df_ref *ref = regs->ref;
3306
3307	  regno = DF_REF_REGNO (ref);
3308	  if ((!bitmap_bit_p (live, regno))
3309	      && (!bitmap_bit_p (artificial_uses, regno)))
3310	    {
3311	      rtx note = alloc_EXPR_LIST (REG_UNUSED, regno_reg_rtx[regno],
3312					  REG_NOTES (insn));
3313	      REG_NOTES (insn) = note;
3314#ifdef REG_DEAD_DEBUGGING
3315	      print_note ("adding 2: ", insn, note);
3316#endif
3317	    }
3318	  bitmap_set_bit (do_not_gen, regno);
3319	  regs = regs->next;
3320	}
3321    }
3322}
3323
3324
3325/* Set the REG_DEAD notes for the multiword hardreg use in INSN based
3326   on the bits in LIVE.  DO_NOT_GEN is used to keep REG_DEAD notes
3327   from being set if the instruction both reads and writes the
3328   register.  */
3329
3330static void
3331df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
3332			  bitmap live, bitmap do_not_gen,
3333			  bitmap artificial_uses, int flags)
3334{
3335  bool all_dead = true;
3336  struct df_link *regs = mws->regs;
3337  unsigned int regno = DF_REF_REGNO (regs->ref);
3338
3339#ifdef REG_DEAD_DEBUGGING
3340  fprintf (stderr, "mw looking at %d\n", DF_REF_REGNO (regs->ref));
3341  df_ref_debug (regs->ref, stderr);
3342#endif
3343  while (regs)
3344    {
3345      unsigned int regno = DF_REF_REGNO (regs->ref);
3346      if ((bitmap_bit_p (live, regno))
3347	  || bitmap_bit_p (artificial_uses, regno))
3348	{
3349	  all_dead = false;
3350	  break;
3351	}
3352      regs = regs->next;
3353    }
3354
3355  if (all_dead)
3356    {
3357      if (!bitmap_bit_p (do_not_gen, regno))
3358	{
3359	  /* Add a dead note for the entire multi word register.  */
3360	  struct df_link *regs = mws->regs;
3361	  rtx note = alloc_EXPR_LIST (REG_DEAD, *DF_REF_LOC (regs->ref),
3362				      REG_NOTES (insn));
3363	  REG_NOTES (insn) = note;
3364#ifdef REG_DEAD_DEBUGGING
3365	  print_note ("adding 1: ", insn, note);
3366#endif
3367
3368	  if (flags & DF_RI_LIFE)
3369	    {
3370	      struct df_link *regs = mws->regs;
3371	      while (regs)
3372		{
3373		  struct df_ref *ref = regs->ref;
3374		  regno = DF_REF_REGNO (ref);
3375		  REG_N_DEATHS (regno)++;
3376		  regs = regs->next;
3377		}
3378	    }
3379	}
3380    }
3381  else
3382    {
3383      struct df_link *regs = mws->regs;
3384      while (regs)
3385	{
3386	  struct df_ref *ref = regs->ref;
3387
3388	  regno = DF_REF_REGNO (ref);
3389	  if ((!bitmap_bit_p (live, regno))
3390	      && (!bitmap_bit_p (artificial_uses, regno))
3391	      && (!bitmap_bit_p (do_not_gen, regno)))
3392	    {
3393	      rtx note = alloc_EXPR_LIST (REG_DEAD, regno_reg_rtx[regno],
3394					  REG_NOTES (insn));
3395	      REG_NOTES (insn) = note;
3396	      if (flags & DF_RI_LIFE)
3397		REG_N_DEATHS (regno)++;
3398#ifdef REG_DEAD_DEBUGGING
3399	      print_note ("adding 2: ", insn, note);
3400#endif
3401	    }
3402
3403	  regs = regs->next;
3404	}
3405    }
3406}
3407
3408
3409/* Create a REG_UNUSED note if necessary for DEF in INSN updating LIVE
3410   and DO_NOT_GEN.  Do not generate notes for registers in artificial
3411   uses.  */
3412
3413static void
3414df_create_unused_note (basic_block bb, rtx insn, struct df_ref *def,
3415		       bitmap live, bitmap do_not_gen, bitmap artificial_uses,
3416		       bitmap local_live, bitmap local_processed,
3417		       int flags, int luid)
3418{
3419  unsigned int dregno = DF_REF_REGNO (def);
3420
3421#ifdef REG_DEAD_DEBUGGING
3422  fprintf (stderr, "  regular looking at def ");
3423  df_ref_debug (def, stderr);
3424#endif
3425
3426  if (bitmap_bit_p (live, dregno))
3427    {
3428      if (flags & DF_RI_LIFE)
3429	{
3430	  /* If we have seen this regno, then it has already been
3431	     processed correctly with the per insn increment.  If we
3432	     have not seen it we need to add the length from here to
3433	     the end of the block to the live length.  */
3434	  if (bitmap_bit_p (local_processed, dregno))
3435	    {
3436	      if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
3437		bitmap_clear_bit (local_live, dregno);
3438	    }
3439	  else
3440	    {
3441	      bitmap_set_bit (local_processed, dregno);
3442	      REG_LIVE_LENGTH (dregno) += luid;
3443	    }
3444	}
3445    }
3446  else if ((!(DF_REF_FLAGS (def) & DF_REF_MW_HARDREG))
3447	    && (!bitmap_bit_p (artificial_uses, dregno))
3448	    && (!df_ignore_stack_reg (dregno)))
3449    {
3450      rtx reg = GET_CODE (*DF_REF_LOC (def)) == SUBREG ?
3451	SUBREG_REG (*DF_REF_LOC (def)) : *DF_REF_LOC (def);
3452      rtx note = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3453      REG_NOTES (insn) = note;
3454#ifdef REG_DEAD_DEBUGGING
3455      print_note ("adding 3: ", insn, note);
3456#endif
3457      if (flags & DF_RI_LIFE)
3458	{
3459	  REG_N_DEATHS (dregno) ++;
3460	  REG_LIVE_LENGTH (dregno)++;
3461	}
3462    }
3463
3464  if ((flags & DF_RI_LIFE) && (dregno >= FIRST_PSEUDO_REGISTER))
3465    {
3466      REG_FREQ (dregno) += REG_FREQ_FROM_BB (bb);
3467      if (REG_BASIC_BLOCK (dregno) == REG_BLOCK_UNKNOWN)
3468	REG_BASIC_BLOCK (dregno) = bb->index;
3469      else if (REG_BASIC_BLOCK (dregno) != bb->index)
3470	REG_BASIC_BLOCK (dregno) = REG_BLOCK_GLOBAL;
3471    }
3472
3473  if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER + DF_REF_MAY_CLOBBER)))
3474    bitmap_set_bit (do_not_gen, dregno);
3475
3476  /* Kill this register if it is not a subreg store.  */
3477  if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
3478    bitmap_clear_bit (live, dregno);
3479}
3480
3481
3482/* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3483   info: lifetime, bb, and number of defs and uses for basic block
3484   BB.  The three bitvectors are scratch regs used here.  */
3485
3486static void
3487df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index,
3488		  bitmap live, bitmap do_not_gen, bitmap artificial_uses,
3489		  bitmap local_live, bitmap local_processed, bitmap setjumps_crossed)
3490{
3491  struct df *df = dflow->df;
3492  basic_block bb = BASIC_BLOCK (bb_index);
3493  rtx insn;
3494  struct df_ref *def;
3495  struct df_ref *use;
3496  int luid = 0;
3497
3498  bitmap_copy (live, df_get_live_out (df, bb));
3499  bitmap_clear (artificial_uses);
3500
3501  if (dflow->flags & DF_RI_LIFE)
3502    {
3503      /* Process the regs live at the end of the block.  Mark them as
3504	 not local to any one basic block.  */
3505      bitmap_iterator bi;
3506      unsigned int regno;
3507      EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3508	REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3509    }
3510
3511  /* Process the artificial defs and uses at the bottom of the block
3512     to begin processing.  */
3513  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
3514    if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3515      bitmap_clear_bit (live, DF_REF_REGNO (def));
3516
3517  for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
3518    if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3519      {
3520	unsigned int regno = DF_REF_REGNO (use);
3521	bitmap_set_bit (live, regno);
3522
3523	/* Notes are not generated for any of the artificial registers
3524	   at the bottom of the block.  */
3525	bitmap_set_bit (artificial_uses, regno);
3526      }
3527
3528  FOR_BB_INSNS_REVERSE (bb, insn)
3529    {
3530      unsigned int uid = INSN_UID (insn);
3531      unsigned int regno;
3532      bitmap_iterator bi;
3533      struct df_mw_hardreg *mws;
3534
3535      if (!INSN_P (insn))
3536	continue;
3537
3538      if (dflow->flags & DF_RI_LIFE)
3539	{
3540	  /* Increment the live_length for all of the registers that
3541	     are are referenced in this block and live at this
3542	     particular point.  */
3543	  bitmap_iterator bi;
3544	  unsigned int regno;
3545	  EXECUTE_IF_SET_IN_BITMAP (local_live, 0, regno, bi)
3546	    {
3547	      REG_LIVE_LENGTH (regno)++;
3548	    }
3549	  luid++;
3550	}
3551
3552      bitmap_clear (do_not_gen);
3553      df_kill_notes (insn, dflow->flags);
3554
3555      /* Process the defs.  */
3556      if (CALL_P (insn))
3557	{
3558	  if (dflow->flags & DF_RI_LIFE)
3559	    {
3560	      bool can_throw = can_throw_internal (insn);
3561	      bool set_jump = (find_reg_note (insn, REG_SETJMP, NULL) != NULL);
3562	      EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3563		{
3564		  REG_N_CALLS_CROSSED (regno)++;
3565		  if (can_throw)
3566		    REG_N_THROWING_CALLS_CROSSED (regno)++;
3567
3568		  /* We have a problem with any pseudoreg that lives
3569		     across the setjmp.  ANSI says that if a user
3570		     variable does not change in value between the
3571		     setjmp and the longjmp, then the longjmp
3572		     preserves it.  This includes longjmp from a place
3573		     where the pseudo appears dead.  (In principle,
3574		     the value still exists if it is in scope.)  If
3575		     the pseudo goes in a hard reg, some other value
3576		     may occupy that hard reg where this pseudo is
3577		     dead, thus clobbering the pseudo.  Conclusion:
3578		     such a pseudo must not go in a hard reg.  */
3579		  if (set_jump && regno >= FIRST_PSEUDO_REGISTER)
3580		    bitmap_set_bit (setjumps_crossed, regno);
3581		}
3582	    }
3583
3584	  /* We only care about real sets for calls.  Clobbers only
3585	     may clobber and cannot be depended on.  */
3586	  for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
3587	    {
3588	      if ((mws->type == DF_REF_REG_DEF)
3589		  && !df_ignore_stack_reg (REGNO (mws->mw_reg)))
3590		df_set_unused_notes_for_mw (insn, mws, live, do_not_gen,
3591					    artificial_uses, dflow->flags);
3592	    }
3593
3594	  /* All of the defs except the return value are some sort of
3595	     clobber.  This code is for the return.  */
3596	  for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
3597	    if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3598	      df_create_unused_note (bb, insn, def, live, do_not_gen,
3599				     artificial_uses, local_live,
3600				     local_processed, dflow->flags, luid);
3601
3602	}
3603      else
3604	{
3605	  /* Regular insn.  */
3606	  for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
3607	    {
3608	      if (mws->type == DF_REF_REG_DEF)
3609		df_set_unused_notes_for_mw (insn, mws, live, do_not_gen,
3610					    artificial_uses, dflow->flags);
3611	    }
3612
3613	  for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
3614	    df_create_unused_note (bb, insn, def, live, do_not_gen,
3615				   artificial_uses, local_live,
3616				   local_processed, dflow->flags, luid);
3617	}
3618
3619      /* Process the uses.  */
3620      for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
3621	{
3622	  if ((mws->type != DF_REF_REG_DEF)
3623	      && !df_ignore_stack_reg (REGNO (mws->mw_reg)))
3624	    df_set_dead_notes_for_mw (insn, mws, live, do_not_gen,
3625				      artificial_uses, dflow->flags);
3626	}
3627
3628      for (use = DF_INSN_UID_USES (df, uid); use; use = use->next_ref)
3629	{
3630	  unsigned int uregno = DF_REF_REGNO (use);
3631
3632	  if ((dflow->flags & DF_RI_LIFE) && (uregno >= FIRST_PSEUDO_REGISTER))
3633	    {
3634	      REG_FREQ (uregno) += REG_FREQ_FROM_BB (bb);
3635	      if (REG_BASIC_BLOCK (uregno) == REG_BLOCK_UNKNOWN)
3636		REG_BASIC_BLOCK (uregno) = bb->index;
3637	      else if (REG_BASIC_BLOCK (uregno) != bb->index)
3638		REG_BASIC_BLOCK (uregno) = REG_BLOCK_GLOBAL;
3639	    }
3640
3641#ifdef REG_DEAD_DEBUGGING
3642	  fprintf (stderr, "  regular looking at use ");
3643	  df_ref_debug (use, stderr);
3644#endif
3645	  if (!bitmap_bit_p (live, uregno))
3646	    {
3647	      if ( (!(DF_REF_FLAGS (use) & DF_REF_MW_HARDREG))
3648		   && (!bitmap_bit_p (do_not_gen, uregno))
3649		   && (!bitmap_bit_p (artificial_uses, uregno))
3650		   && (!(DF_REF_FLAGS (use) & DF_REF_READ_WRITE))
3651		   && (!df_ignore_stack_reg (uregno)))
3652		{
3653		  rtx reg = GET_CODE (*DF_REF_LOC (use)) == SUBREG ?
3654		    SUBREG_REG (*DF_REF_LOC (use)) : *DF_REF_LOC (use);
3655		  rtx note = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3656		  REG_NOTES (insn) = note;
3657		  if (dflow->flags & DF_RI_LIFE)
3658		    REG_N_DEATHS (uregno)++;
3659
3660#ifdef REG_DEAD_DEBUGGING
3661		  print_note ("adding 4: ", insn, note);
3662#endif
3663		}
3664	      /* This register is now live.  */
3665	      bitmap_set_bit (live, uregno);
3666
3667	      if (dflow->flags & DF_RI_LIFE)
3668		{
3669		  /* If we have seen this regno, then it has already
3670		     been processed correctly with the per insn
3671		     increment.  If we have not seen it we set the bit
3672		     so that begins to get processed locally.  Note
3673		     that we don't even get here if the variable was
3674		     live at the end of the block since just a ref
3675		     inside the block does not effect the
3676		     calculations.  */
3677		  REG_LIVE_LENGTH (uregno) ++;
3678		  bitmap_set_bit (local_live, uregno);
3679		  bitmap_set_bit (local_processed, uregno);
3680		}
3681	    }
3682	}
3683    }
3684
3685  if (dflow->flags & DF_RI_LIFE)
3686    {
3687      /* Add the length of the block to all of the registers that were
3688	 not referenced, but still live in this block.  */
3689      bitmap_iterator bi;
3690      unsigned int regno;
3691      bitmap_and_compl_into (live, local_processed);
3692      EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3693	{
3694	  REG_LIVE_LENGTH (regno) += luid;
3695	}
3696      bitmap_clear (local_processed);
3697      bitmap_clear (local_live);
3698    }
3699}
3700
3701
3702/* Compute register info: lifetime, bb, and number of defs and uses.  */
3703static void
3704df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
3705	       bitmap blocks_to_scan)
3706{
3707  unsigned int bb_index;
3708  bitmap_iterator bi;
3709  bitmap live = BITMAP_ALLOC (NULL);
3710  bitmap do_not_gen = BITMAP_ALLOC (NULL);
3711  bitmap artificial_uses = BITMAP_ALLOC (NULL);
3712  bitmap local_live = NULL;
3713  bitmap local_processed = NULL;
3714  bitmap setjumps_crossed = NULL;
3715
3716  if (dflow->flags & DF_RI_LIFE)
3717    {
3718      local_live = BITMAP_ALLOC (NULL);
3719      local_processed = BITMAP_ALLOC (NULL);
3720      setjumps_crossed = BITMAP_ALLOC (NULL);
3721    }
3722
3723
3724#ifdef REG_DEAD_DEBUGGING
3725  df_lr_dump (dflow->df->problems_by_index [DF_LR], stderr);
3726  print_rtl_with_bb (stderr, get_insns());
3727#endif
3728
3729  EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
3730  {
3731    df_ri_bb_compute (dflow, bb_index, live, do_not_gen, artificial_uses,
3732		      local_live, local_processed, setjumps_crossed);
3733  }
3734
3735  BITMAP_FREE (live);
3736  BITMAP_FREE (do_not_gen);
3737  BITMAP_FREE (artificial_uses);
3738  if (dflow->flags & DF_RI_LIFE)
3739    {
3740      bitmap_iterator bi;
3741      unsigned int regno;
3742      /* See the setjump comment in df_ri_bb_compute.  */
3743      EXECUTE_IF_SET_IN_BITMAP (setjumps_crossed, 0, regno, bi)
3744	{
3745	  REG_BASIC_BLOCK (regno) = REG_BLOCK_UNKNOWN;
3746	  REG_LIVE_LENGTH (regno) = -1;
3747	}
3748
3749      BITMAP_FREE (local_live);
3750      BITMAP_FREE (local_processed);
3751      BITMAP_FREE (setjumps_crossed);
3752    }
3753}
3754
3755
3756/* Free all storage associated with the problem.  */
3757
3758static void
3759df_ri_free (struct dataflow *dflow)
3760{
3761  free (dflow->problem_data);
3762  free (dflow);
3763}
3764
3765
3766/* Debugging info.  */
3767
3768static void
3769df_ri_dump (struct dataflow *dflow, FILE *file)
3770{
3771  print_rtl_with_bb (file, get_insns ());
3772
3773  if (dflow->flags & DF_RI_LIFE)
3774    {
3775      fprintf (file, "Register info:\n");
3776      dump_flow_info (file, -1);
3777    }
3778}
3779
3780/* All of the information associated every instance of the problem.  */
3781
3782static struct df_problem problem_RI =
3783{
3784  DF_RI,                      /* Problem id.  */
3785  DF_NONE,                    /* Direction.  */
3786  df_ri_alloc,                /* Allocate the problem specific data.  */
3787  NULL,                       /* Reset global information.  */
3788  NULL,                       /* Free basic block info.  */
3789  df_ri_compute,              /* Local compute function.  */
3790  NULL,                       /* Init the solution specific data.  */
3791  NULL,                       /* Iterative solver.  */
3792  NULL,                       /* Confluence operator 0.  */
3793  NULL,                       /* Confluence operator n.  */
3794  NULL,                       /* Transfer function.  */
3795  NULL,                       /* Finalize function.  */
3796  df_ri_free,                 /* Free all of the problem information.  */
3797  df_ri_dump,                 /* Debugging.  */
3798
3799  /* Technically this is only dependent on the live registers problem
3800     but it will produce information if built one of uninitialized
3801     register problems (UR, UREC) is also run.  */
3802  df_lr_add_problem,          /* Dependent problem.  */
3803  0                           /* Changeable flags.  */
3804};
3805
3806
3807/* Create a new DATAFLOW instance and add it to an existing instance
3808   of DF.  The returned structure is what is used to get at the
3809   solution.  */
3810
3811struct dataflow *
3812df_ri_add_problem (struct df *df, int flags)
3813{
3814  return df_add_problem (df, &problem_RI, flags);
3815}
3816