1169689Skan/* Post reload partially redundant load elimination
2169689Skan   Copyright (C) 2004, 2005
3169689Skan   Free Software Foundation, Inc.
4169689Skan
5169689SkanThis file is part of GCC.
6169689Skan
7169689SkanGCC is free software; you can redistribute it and/or modify it under
8169689Skanthe terms of the GNU General Public License as published by the Free
9169689SkanSoftware Foundation; either version 2, or (at your option) any later
10169689Skanversion.
11169689Skan
12169689SkanGCC is distributed in the hope that it will be useful, but WITHOUT ANY
13169689SkanWARRANTY; without even the implied warranty of MERCHANTABILITY or
14169689SkanFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15169689Skanfor more details.
16169689Skan
17169689SkanYou should have received a copy of the GNU General Public License
18169689Skanalong with GCC; see the file COPYING.  If not, write to the Free
19169689SkanSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20169689Skan02110-1301, USA.  */
21169689Skan
22169689Skan#include "config.h"
23169689Skan#include "system.h"
24169689Skan#include "coretypes.h"
25169689Skan#include "tm.h"
26169689Skan#include "toplev.h"
27169689Skan
28169689Skan#include "rtl.h"
29169689Skan#include "tree.h"
30169689Skan#include "tm_p.h"
31169689Skan#include "regs.h"
32169689Skan#include "hard-reg-set.h"
33169689Skan#include "flags.h"
34169689Skan#include "real.h"
35169689Skan#include "insn-config.h"
36169689Skan#include "recog.h"
37169689Skan#include "basic-block.h"
38169689Skan#include "output.h"
39169689Skan#include "function.h"
40169689Skan#include "expr.h"
41169689Skan#include "except.h"
42169689Skan#include "intl.h"
43169689Skan#include "obstack.h"
44169689Skan#include "hashtab.h"
45169689Skan#include "params.h"
46169689Skan#include "target.h"
47169689Skan#include "timevar.h"
48169689Skan#include "tree-pass.h"
49169689Skan
50169689Skan/* The following code implements gcse after reload, the purpose of this
51169689Skan   pass is to cleanup redundant loads generated by reload and other
52169689Skan   optimizations that come after gcse. It searches for simple inter-block
53169689Skan   redundancies and tries to eliminate them by adding moves and loads
54169689Skan   in cold places.
55169689Skan
56169689Skan   Perform partially redundant load elimination, try to eliminate redundant
57169689Skan   loads created by the reload pass.  We try to look for full or partial
58169689Skan   redundant loads fed by one or more loads/stores in predecessor BBs,
59169689Skan   and try adding loads to make them fully redundant.  We also check if
60169689Skan   it's worth adding loads to be able to delete the redundant load.
61169689Skan
62169689Skan   Algorithm:
63169689Skan   1. Build available expressions hash table:
64169689Skan       For each load/store instruction, if the loaded/stored memory didn't
65169689Skan       change until the end of the basic block add this memory expression to
66169689Skan       the hash table.
67169689Skan   2. Perform Redundancy elimination:
68169689Skan      For each load instruction do the following:
69169689Skan	 perform partial redundancy elimination, check if it's worth adding
70169689Skan	 loads to make the load fully redundant.  If so add loads and
71169689Skan	 register copies and delete the load.
72169689Skan   3. Delete instructions made redundant in step 2.
73169689Skan
74169689Skan   Future enhancement:
75169689Skan     If the loaded register is used/defined between load and some store,
76169689Skan     look for some other free register between load and all its stores,
77169689Skan     and replace the load with a copy from this register to the loaded
78169689Skan     register.
79169689Skan*/
80169689Skan
81169689Skan
82169689Skan/* Keep statistics of this pass.  */
83169689Skanstatic struct
84169689Skan{
85169689Skan  int moves_inserted;
86169689Skan  int copies_inserted;
87169689Skan  int insns_deleted;
88169689Skan} stats;
89169689Skan
90169689Skan/* We need to keep a hash table of expressions.  The table entries are of
91169689Skan   type 'struct expr', and for each expression there is a single linked
92169689Skan   list of occurrences.  */
93169689Skan
94169689Skan/* The table itself.  */
95169689Skanstatic htab_t expr_table;
96169689Skan
97169689Skan/* Expression elements in the hash table.  */
98169689Skanstruct expr
99169689Skan{
100169689Skan  /* The expression (SET_SRC for expressions, PATTERN for assignments).  */
101169689Skan  rtx expr;
102169689Skan
103169689Skan  /* The same hash for this entry.  */
104169689Skan  hashval_t hash;
105169689Skan
106169689Skan  /* List of available occurrence in basic blocks in the function.  */
107169689Skan  struct occr *avail_occr;
108169689Skan};
109169689Skan
110169689Skanstatic struct obstack expr_obstack;
111169689Skan
112169689Skan/* Occurrence of an expression.
113169689Skan   There is at most one occurrence per basic block.  If a pattern appears
114169689Skan   more than once, the last appearance is used.  */
115169689Skan
116169689Skanstruct occr
117169689Skan{
118169689Skan  /* Next occurrence of this expression.  */
119169689Skan  struct occr *next;
120169689Skan  /* The insn that computes the expression.  */
121169689Skan  rtx insn;
122169689Skan  /* Nonzero if this [anticipatable] occurrence has been deleted.  */
123169689Skan  char deleted_p;
124169689Skan};
125169689Skan
126169689Skanstatic struct obstack occr_obstack;
127169689Skan
128169689Skan/* The following structure holds the information about the occurrences of
129169689Skan   the redundant instructions.  */
130169689Skanstruct unoccr
131169689Skan{
132169689Skan  struct unoccr *next;
133169689Skan  edge pred;
134169689Skan  rtx insn;
135169689Skan};
136169689Skan
137169689Skanstatic struct obstack unoccr_obstack;
138169689Skan
139169689Skan/* Array where each element is the CUID if the insn that last set the hard
140169689Skan   register with the number of the element, since the start of the current
141169689Skan   basic block.
142169689Skan
143169689Skan   This array is used during the building of the hash table (step 1) to
144169689Skan   determine if a reg is killed before the end of a basic block.
145169689Skan
146169689Skan   It is also used when eliminating partial redundancies (step 2) to see
147169689Skan   if a reg was modified since the start of a basic block.  */
148169689Skanstatic int *reg_avail_info;
149169689Skan
150169689Skan/* A list of insns that may modify memory within the current basic block.  */
151169689Skanstruct modifies_mem
152169689Skan{
153169689Skan  rtx insn;
154169689Skan  struct modifies_mem *next;
155169689Skan};
156169689Skanstatic struct modifies_mem *modifies_mem_list;
157169689Skan
158169689Skan/* The modifies_mem structs also go on an obstack, only this obstack is
159169689Skan   freed each time after completing the analysis or transformations on
160169689Skan   a basic block.  So we allocate a dummy modifies_mem_obstack_bottom
161169689Skan   object on the obstack to keep track of the bottom of the obstack.  */
162169689Skanstatic struct obstack modifies_mem_obstack;
163169689Skanstatic struct modifies_mem  *modifies_mem_obstack_bottom;
164169689Skan
165169689Skan/* Mapping of insn UIDs to CUIDs.
166169689Skan   CUIDs are like UIDs except they increase monotonically in each basic
167169689Skan   block, have no gaps, and only apply to real insns.  */
168169689Skanstatic int *uid_cuid;
169169689Skan#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
170169689Skan
171169689Skan
172169689Skan/* Helpers for memory allocation/freeing.  */
173169689Skanstatic void alloc_mem (void);
174169689Skanstatic void free_mem (void);
175169689Skan
176169689Skan/* Support for hash table construction and transformations.  */
177169689Skanstatic bool oprs_unchanged_p (rtx, rtx, bool);
178169689Skanstatic void record_last_reg_set_info (rtx, int);
179169689Skanstatic void record_last_mem_set_info (rtx);
180169689Skanstatic void record_last_set_info (rtx, rtx, void *);
181169689Skanstatic void record_opr_changes (rtx);
182169689Skan
183169689Skanstatic void find_mem_conflicts (rtx, rtx, void *);
184169689Skanstatic int load_killed_in_block_p (int, rtx, bool);
185169689Skanstatic void reset_opr_set_tables (void);
186169689Skan
187169689Skan/* Hash table support.  */
188169689Skanstatic hashval_t hash_expr (rtx, int *);
189169689Skanstatic hashval_t hash_expr_for_htab (const void *);
190169689Skanstatic int expr_equiv_p (const void *, const void *);
191169689Skanstatic void insert_expr_in_table (rtx, rtx);
192169689Skanstatic struct expr *lookup_expr_in_table (rtx);
193169689Skanstatic int dump_hash_table_entry (void **, void *);
194169689Skanstatic void dump_hash_table (FILE *);
195169689Skan
196169689Skan/* Helpers for eliminate_partially_redundant_load.  */
197169689Skanstatic bool reg_killed_on_edge (rtx, edge);
198169689Skanstatic bool reg_used_on_edge (rtx, edge);
199169689Skan
200169689Skanstatic rtx get_avail_load_store_reg (rtx);
201169689Skan
202169689Skanstatic bool bb_has_well_behaved_predecessors (basic_block);
203169689Skanstatic struct occr* get_bb_avail_insn (basic_block, struct occr *);
204169689Skanstatic void hash_scan_set (rtx);
205169689Skanstatic void compute_hash_table (void);
206169689Skan
207169689Skan/* The work horses of this pass.  */
208169689Skanstatic void eliminate_partially_redundant_load (basic_block,
209169689Skan						rtx,
210169689Skan						struct expr *);
211169689Skanstatic void eliminate_partially_redundant_loads (void);
212169689Skan
213169689Skan
214169689Skan/* Allocate memory for the CUID mapping array and register/memory
215169689Skan   tracking tables.  */
216169689Skan
217169689Skanstatic void
218169689Skanalloc_mem (void)
219169689Skan{
220169689Skan  int i;
221169689Skan  basic_block bb;
222169689Skan  rtx insn;
223169689Skan
224169689Skan  /* Find the largest UID and create a mapping from UIDs to CUIDs.  */
225169689Skan  uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
226169689Skan  i = 1;
227169689Skan  FOR_EACH_BB (bb)
228169689Skan    FOR_BB_INSNS (bb, insn)
229169689Skan      {
230169689Skan        if (INSN_P (insn))
231169689Skan	  uid_cuid[INSN_UID (insn)] = i++;
232169689Skan	else
233169689Skan	  uid_cuid[INSN_UID (insn)] = i;
234169689Skan      }
235169689Skan
236169689Skan  /* Allocate the available expressions hash table.  We don't want to
237169689Skan     make the hash table too small, but unnecessarily making it too large
238169689Skan     also doesn't help.  The i/4 is a gcse.c relic, and seems like a
239169689Skan     reasonable choice.  */
240169689Skan  expr_table = htab_create (MAX (i / 4, 13),
241169689Skan			    hash_expr_for_htab, expr_equiv_p, NULL);
242169689Skan
243169689Skan  /* We allocate everything on obstacks because we often can roll back
244169689Skan     the whole obstack to some point.  Freeing obstacks is very fast.  */
245169689Skan  gcc_obstack_init (&expr_obstack);
246169689Skan  gcc_obstack_init (&occr_obstack);
247169689Skan  gcc_obstack_init (&unoccr_obstack);
248169689Skan  gcc_obstack_init (&modifies_mem_obstack);
249169689Skan
250169689Skan  /* Working array used to track the last set for each register
251169689Skan     in the current block.  */
252169689Skan  reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
253169689Skan
254169689Skan  /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
255169689Skan     can roll it back in reset_opr_set_tables.  */
256169689Skan  modifies_mem_obstack_bottom =
257169689Skan    (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
258169689Skan					   sizeof (struct modifies_mem));
259169689Skan}
260169689Skan
261169689Skan/* Free memory allocated by alloc_mem.  */
262169689Skan
263169689Skanstatic void
264169689Skanfree_mem (void)
265169689Skan{
266169689Skan  free (uid_cuid);
267169689Skan
268169689Skan  htab_delete (expr_table);
269169689Skan
270169689Skan  obstack_free (&expr_obstack, NULL);
271169689Skan  obstack_free (&occr_obstack, NULL);
272169689Skan  obstack_free (&unoccr_obstack, NULL);
273169689Skan  obstack_free (&modifies_mem_obstack, NULL);
274169689Skan
275169689Skan  free (reg_avail_info);
276169689Skan}
277169689Skan
278169689Skan
279169689Skan/* Hash expression X.
280169689Skan   DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
281169689Skan   or if the expression contains something we don't want to insert in the
282169689Skan   table.  */
283169689Skan
284169689Skanstatic hashval_t
285169689Skanhash_expr (rtx x, int *do_not_record_p)
286169689Skan{
287169689Skan  *do_not_record_p = 0;
288169689Skan  return hash_rtx (x, GET_MODE (x), do_not_record_p,
289169689Skan		   NULL,  /*have_reg_qty=*/false);
290169689Skan}
291169689Skan
292169689Skan/* Callback for hashtab.
293169689Skan   Return the hash value for expression EXP.  We don't actually hash
294169689Skan   here, we just return the cached hash value.  */
295169689Skan
296169689Skanstatic hashval_t
297169689Skanhash_expr_for_htab (const void *expp)
298169689Skan{
299169689Skan  struct expr *exp = (struct expr *) expp;
300169689Skan  return exp->hash;
301169689Skan}
302169689Skan
303169689Skan/* Callback for hashtab.
304169689Skan   Return nonzero if exp1 is equivalent to exp2.  */
305169689Skan
306169689Skanstatic int
307169689Skanexpr_equiv_p (const void *exp1p, const void *exp2p)
308169689Skan{
309169689Skan  struct expr *exp1 = (struct expr *) exp1p;
310169689Skan  struct expr *exp2 = (struct expr *) exp2p;
311169689Skan  int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
312169689Skan
313169689Skan  gcc_assert (!equiv_p || exp1->hash == exp2->hash);
314169689Skan  return equiv_p;
315169689Skan}
316169689Skan
317169689Skan
318169689Skan/* Insert expression X in INSN in the hash TABLE.
319169689Skan   If it is already present, record it as the last occurrence in INSN's
320169689Skan   basic block.  */
321169689Skan
322169689Skanstatic void
323169689Skaninsert_expr_in_table (rtx x, rtx insn)
324169689Skan{
325169689Skan  int do_not_record_p;
326169689Skan  hashval_t hash;
327169689Skan  struct expr *cur_expr, **slot;
328169689Skan  struct occr *avail_occr, *last_occr = NULL;
329169689Skan
330169689Skan  hash = hash_expr (x, &do_not_record_p);
331169689Skan
332169689Skan  /* Do not insert expression in the table if it contains volatile operands,
333169689Skan     or if hash_expr determines the expression is something we don't want
334169689Skan     to or can't handle.  */
335169689Skan  if (do_not_record_p)
336169689Skan    return;
337169689Skan
338169689Skan  /* We anticipate that redundant expressions are rare, so for convenience
339169689Skan     allocate a new hash table element here already and set its fields.
340169689Skan     If we don't do this, we need a hack with a static struct expr.  Anyway,
341169689Skan     obstack_free is really fast and one more obstack_alloc doesn't hurt if
342169689Skan     we're going to see more expressions later on.  */
343169689Skan  cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
344169689Skan					    sizeof (struct expr));
345169689Skan  cur_expr->expr = x;
346169689Skan  cur_expr->hash = hash;
347169689Skan  cur_expr->avail_occr = NULL;
348169689Skan
349169689Skan  slot = (struct expr **) htab_find_slot_with_hash (expr_table, cur_expr,
350169689Skan						    hash, INSERT);
351169689Skan
352169689Skan  if (! (*slot))
353169689Skan    /* The expression isn't found, so insert it.  */
354169689Skan    *slot = cur_expr;
355169689Skan  else
356169689Skan    {
357169689Skan      /* The expression is already in the table, so roll back the
358169689Skan	 obstack and use the existing table entry.  */
359169689Skan      obstack_free (&expr_obstack, cur_expr);
360169689Skan      cur_expr = *slot;
361169689Skan    }
362169689Skan
363169689Skan  /* Search for another occurrence in the same basic block.  */
364169689Skan  avail_occr = cur_expr->avail_occr;
365169689Skan  while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
366169689Skan    {
367169689Skan      /* If an occurrence isn't found, save a pointer to the end of
368169689Skan	 the list.  */
369169689Skan      last_occr = avail_occr;
370169689Skan      avail_occr = avail_occr->next;
371169689Skan    }
372169689Skan
373169689Skan  if (avail_occr)
374169689Skan    /* Found another instance of the expression in the same basic block.
375169689Skan       Prefer this occurrence to the currently recorded one.  We want
376169689Skan       the last one in the block and the block is scanned from start
377169689Skan       to end.  */
378169689Skan    avail_occr->insn = insn;
379169689Skan  else
380169689Skan    {
381169689Skan      /* First occurrence of this expression in this basic block.  */
382169689Skan      avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
383169689Skan						  sizeof (struct occr));
384169689Skan
385169689Skan      /* First occurrence of this expression in any block?  */
386169689Skan      if (cur_expr->avail_occr == NULL)
387169689Skan        cur_expr->avail_occr = avail_occr;
388169689Skan      else
389169689Skan        last_occr->next = avail_occr;
390169689Skan
391169689Skan      avail_occr->insn = insn;
392169689Skan      avail_occr->next = NULL;
393169689Skan      avail_occr->deleted_p = 0;
394169689Skan    }
395169689Skan}
396169689Skan
397169689Skan
398169689Skan/* Lookup pattern PAT in the expression hash table.
399169689Skan   The result is a pointer to the table entry, or NULL if not found.  */
400169689Skan
401169689Skanstatic struct expr *
402169689Skanlookup_expr_in_table (rtx pat)
403169689Skan{
404169689Skan  int do_not_record_p;
405169689Skan  struct expr **slot, *tmp_expr;
406169689Skan  hashval_t hash = hash_expr (pat, &do_not_record_p);
407169689Skan
408169689Skan  if (do_not_record_p)
409169689Skan    return NULL;
410169689Skan
411169689Skan  tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
412169689Skan					    sizeof (struct expr));
413169689Skan  tmp_expr->expr = pat;
414169689Skan  tmp_expr->hash = hash;
415169689Skan  tmp_expr->avail_occr = NULL;
416169689Skan
417169689Skan  slot = (struct expr **) htab_find_slot_with_hash (expr_table, tmp_expr,
418169689Skan                                                    hash, INSERT);
419169689Skan  obstack_free (&expr_obstack, tmp_expr);
420169689Skan
421169689Skan  if (!slot)
422169689Skan    return NULL;
423169689Skan  else
424169689Skan    return (*slot);
425169689Skan}
426169689Skan
427169689Skan
428169689Skan/* Dump all expressions and occurrences that are currently in the
429169689Skan   expression hash table to FILE.  */
430169689Skan
431169689Skan/* This helper is called via htab_traverse.  */
432169689Skanstatic int
433169689Skandump_hash_table_entry (void **slot, void *filep)
434169689Skan{
435169689Skan  struct expr *expr = (struct expr *) *slot;
436169689Skan  FILE *file = (FILE *) filep;
437169689Skan  struct occr *occr;
438169689Skan
439169689Skan  fprintf (file, "expr: ");
440169689Skan  print_rtl (file, expr->expr);
441169689Skan  fprintf (file,"\nhashcode: %u\n", expr->hash);
442169689Skan  fprintf (file,"list of occurrences:\n");
443169689Skan  occr = expr->avail_occr;
444169689Skan  while (occr)
445169689Skan    {
446169689Skan      rtx insn = occr->insn;
447169689Skan      print_rtl_single (file, insn);
448169689Skan      fprintf (file, "\n");
449169689Skan      occr = occr->next;
450169689Skan    }
451169689Skan  fprintf (file, "\n");
452169689Skan  return 1;
453169689Skan}
454169689Skan
455169689Skanstatic void
456169689Skandump_hash_table (FILE *file)
457169689Skan{
458169689Skan  fprintf (file, "\n\nexpression hash table\n");
459169689Skan  fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
460169689Skan           (long) htab_size (expr_table),
461169689Skan           (long) htab_elements (expr_table),
462169689Skan           htab_collisions (expr_table));
463169689Skan  if (htab_elements (expr_table) > 0)
464169689Skan    {
465169689Skan      fprintf (file, "\n\ntable entries:\n");
466169689Skan      htab_traverse (expr_table, dump_hash_table_entry, file);
467169689Skan    }
468169689Skan  fprintf (file, "\n");
469169689Skan}
470169689Skan
471259268Spfg/* Return true if register X is recorded as being set by an instruction
472259268Spfg   whose CUID is greater than the one given.  */
473169689Skan
474259268Spfgstatic bool
475259268Spfgreg_changed_after_insn_p (rtx x, int cuid)
476259268Spfg{
477259268Spfg  unsigned int regno, end_regno;
478259268Spfg
479259268Spfg  regno = REGNO (x);
480259268Spfg  end_regno = END_HARD_REGNO (x);
481259268Spfg  do
482259268Spfg    if (reg_avail_info[regno] > cuid)
483259268Spfg      return true;
484259268Spfg  while (++regno < end_regno);
485259268Spfg  return false;
486259268Spfg}
487259268Spfg
488169689Skan/* Return nonzero if the operands of expression X are unchanged
489169689Skan   1) from the start of INSN's basic block up to but not including INSN
490169689Skan      if AFTER_INSN is false, or
491169689Skan   2) from INSN to the end of INSN's basic block if AFTER_INSN is true.  */
492169689Skan
493169689Skanstatic bool
494169689Skanoprs_unchanged_p (rtx x, rtx insn, bool after_insn)
495169689Skan{
496169689Skan  int i, j;
497169689Skan  enum rtx_code code;
498169689Skan  const char *fmt;
499169689Skan
500169689Skan  if (x == 0)
501169689Skan    return 1;
502169689Skan
503169689Skan  code = GET_CODE (x);
504169689Skan  switch (code)
505169689Skan    {
506169689Skan    case REG:
507169689Skan      /* We are called after register allocation.  */
508169689Skan      gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
509169689Skan      if (after_insn)
510259268Spfg	return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
511169689Skan      else
512259268Spfg	return !reg_changed_after_insn_p (x, 0);
513169689Skan
514169689Skan    case MEM:
515169689Skan      if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
516169689Skan	return 0;
517169689Skan      else
518169689Skan	return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
519169689Skan
520169689Skan    case PC:
521169689Skan    case CC0: /*FIXME*/
522169689Skan    case CONST:
523169689Skan    case CONST_INT:
524169689Skan    case CONST_DOUBLE:
525169689Skan    case CONST_VECTOR:
526169689Skan    case SYMBOL_REF:
527169689Skan    case LABEL_REF:
528169689Skan    case ADDR_VEC:
529169689Skan    case ADDR_DIFF_VEC:
530169689Skan      return 1;
531169689Skan
532169689Skan    case PRE_DEC:
533169689Skan    case PRE_INC:
534169689Skan    case POST_DEC:
535169689Skan    case POST_INC:
536169689Skan    case PRE_MODIFY:
537169689Skan    case POST_MODIFY:
538169689Skan      if (after_insn)
539169689Skan	return 0;
540169689Skan      break;
541169689Skan
542169689Skan    default:
543169689Skan      break;
544169689Skan    }
545169689Skan
546169689Skan  for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
547169689Skan    {
548169689Skan      if (fmt[i] == 'e')
549169689Skan	{
550169689Skan	  if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
551169689Skan	    return 0;
552169689Skan	}
553169689Skan      else if (fmt[i] == 'E')
554169689Skan	for (j = 0; j < XVECLEN (x, i); j++)
555169689Skan	  if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
556169689Skan	    return 0;
557169689Skan    }
558169689Skan
559169689Skan  return 1;
560169689Skan}
561169689Skan
562169689Skan
563169689Skan/* Used for communication between find_mem_conflicts and
564169689Skan   load_killed_in_block_p.  Nonzero if find_mem_conflicts finds a
565169689Skan   conflict between two memory references.
566169689Skan   This is a bit of a hack to work around the limitations of note_stores.  */
567169689Skanstatic int mems_conflict_p;
568169689Skan
569169689Skan/* DEST is the output of an instruction.  If it is a memory reference, and
570169689Skan   possibly conflicts with the load found in DATA, then set mems_conflict_p
571169689Skan   to a nonzero value.  */
572169689Skan
573169689Skanstatic void
574169689Skanfind_mem_conflicts (rtx dest, rtx setter ATTRIBUTE_UNUSED,
575169689Skan		    void *data)
576169689Skan{
577169689Skan  rtx mem_op = (rtx) data;
578169689Skan
579169689Skan  while (GET_CODE (dest) == SUBREG
580169689Skan	 || GET_CODE (dest) == ZERO_EXTRACT
581169689Skan	 || GET_CODE (dest) == STRICT_LOW_PART)
582169689Skan    dest = XEXP (dest, 0);
583169689Skan
584169689Skan  /* If DEST is not a MEM, then it will not conflict with the load.  Note
585169689Skan     that function calls are assumed to clobber memory, but are handled
586169689Skan     elsewhere.  */
587169689Skan  if (! MEM_P (dest))
588169689Skan    return;
589169689Skan
590169689Skan  if (true_dependence (dest, GET_MODE (dest), mem_op,
591169689Skan		       rtx_addr_varies_p))
592169689Skan    mems_conflict_p = 1;
593169689Skan}
594169689Skan
595169689Skan
596169689Skan/* Return nonzero if the expression in X (a memory reference) is killed
597169689Skan   in the current basic block before (if AFTER_INSN is false) or after
598169689Skan   (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
599169689Skan
600169689Skan   This function assumes that the modifies_mem table is flushed when
601169689Skan   the hash table construction or redundancy elimination phases start
602169689Skan   processing a new basic block.  */
603169689Skan
604169689Skanstatic int
605169689Skanload_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
606169689Skan{
607169689Skan  struct modifies_mem *list_entry = modifies_mem_list;
608169689Skan
609169689Skan  while (list_entry)
610169689Skan    {
611169689Skan      rtx setter = list_entry->insn;
612169689Skan
613169689Skan      /* Ignore entries in the list that do not apply.  */
614169689Skan      if ((after_insn
615169689Skan	   && INSN_CUID (setter) < uid_limit)
616169689Skan	  || (! after_insn
617169689Skan	      && INSN_CUID (setter) > uid_limit))
618169689Skan	{
619169689Skan	  list_entry = list_entry->next;
620169689Skan	  continue;
621169689Skan	}
622169689Skan
623169689Skan      /* If SETTER is a call everything is clobbered.  Note that calls
624169689Skan	 to pure functions are never put on the list, so we need not
625169689Skan	 worry about them.  */
626169689Skan      if (CALL_P (setter))
627169689Skan	return 1;
628169689Skan
629169689Skan      /* SETTER must be an insn of some kind that sets memory.  Call
630169689Skan	 note_stores to examine each hunk of memory that is modified.
631169689Skan	 It will set mems_conflict_p to nonzero if there may be a
632169689Skan	 conflict between X and SETTER.  */
633169689Skan      mems_conflict_p = 0;
634169689Skan      note_stores (PATTERN (setter), find_mem_conflicts, x);
635169689Skan      if (mems_conflict_p)
636169689Skan	return 1;
637169689Skan
638169689Skan      list_entry = list_entry->next;
639169689Skan    }
640169689Skan  return 0;
641169689Skan}
642169689Skan
643169689Skan
644169689Skan/* Record register first/last/block set information for REGNO in INSN.  */
645169689Skan
646169689Skanstatic inline void
647169689Skanrecord_last_reg_set_info (rtx insn, int regno)
648169689Skan{
649169689Skan  reg_avail_info[regno] = INSN_CUID (insn);
650169689Skan}
651169689Skan
652169689Skan
653169689Skan/* Record memory modification information for INSN.  We do not actually care
654169689Skan   about the memory location(s) that are set, or even how they are set (consider
655169689Skan   a CALL_INSN).  We merely need to record which insns modify memory.  */
656169689Skan
657169689Skanstatic void
658169689Skanrecord_last_mem_set_info (rtx insn)
659169689Skan{
660169689Skan  struct modifies_mem *list_entry;
661169689Skan
662169689Skan  list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
663169689Skan						      sizeof (struct modifies_mem));
664169689Skan  list_entry->insn = insn;
665169689Skan  list_entry->next = modifies_mem_list;
666169689Skan  modifies_mem_list = list_entry;
667169689Skan}
668169689Skan
669169689Skan/* Called from compute_hash_table via note_stores to handle one
670169689Skan   SET or CLOBBER in an insn.  DATA is really the instruction in which
671169689Skan   the SET is taking place.  */
672169689Skan
673169689Skanstatic void
674169689Skanrecord_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
675169689Skan{
676169689Skan  rtx last_set_insn = (rtx) data;
677169689Skan
678169689Skan  if (GET_CODE (dest) == SUBREG)
679169689Skan    dest = SUBREG_REG (dest);
680169689Skan
681169689Skan  if (REG_P (dest))
682169689Skan    record_last_reg_set_info (last_set_insn, REGNO (dest));
683169689Skan  else if (MEM_P (dest))
684169689Skan    {
685169689Skan      /* Ignore pushes, they don't clobber memory.  They may still
686169689Skan	 clobber the stack pointer though.  Some targets do argument
687169689Skan	 pushes without adding REG_INC notes.  See e.g. PR25196,
688169689Skan	 where a pushsi2 on i386 doesn't have REG_INC notes.  Note
689169689Skan	 such changes here too.  */
690169689Skan      if (! push_operand (dest, GET_MODE (dest)))
691169689Skan	record_last_mem_set_info (last_set_insn);
692169689Skan      else
693169689Skan	record_last_reg_set_info (last_set_insn, STACK_POINTER_REGNUM);
694169689Skan    }
695169689Skan}
696169689Skan
697169689Skan
698169689Skan/* Reset tables used to keep track of what's still available since the
699169689Skan   start of the block.  */
700169689Skan
701169689Skanstatic void
702169689Skanreset_opr_set_tables (void)
703169689Skan{
704169689Skan  memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
705169689Skan  obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
706169689Skan  modifies_mem_list = NULL;
707169689Skan}
708169689Skan
709169689Skan
710169689Skan/* Record things set by INSN.
711169689Skan   This data is used by oprs_unchanged_p.  */
712169689Skan
713169689Skanstatic void
714169689Skanrecord_opr_changes (rtx insn)
715169689Skan{
716169689Skan  rtx note;
717169689Skan
718169689Skan  /* Find all stores and record them.  */
719169689Skan  note_stores (PATTERN (insn), record_last_set_info, insn);
720169689Skan
721169689Skan  /* Also record autoincremented REGs for this insn as changed.  */
722169689Skan  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
723169689Skan    if (REG_NOTE_KIND (note) == REG_INC)
724169689Skan      record_last_reg_set_info (insn, REGNO (XEXP (note, 0)));
725169689Skan
726169689Skan  /* Finally, if this is a call, record all call clobbers.  */
727169689Skan  if (CALL_P (insn))
728169689Skan    {
729259268Spfg      unsigned int regno, end_regno;
730259268Spfg      rtx link, x;
731169689Skan
732169689Skan      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
733169689Skan	if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
734169689Skan	  record_last_reg_set_info (insn, regno);
735169689Skan
736259268Spfg      for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
737259268Spfg	if (GET_CODE (XEXP (link, 0)) == CLOBBER)
738259268Spfg	  {
739259268Spfg	    x = XEXP (XEXP (link, 0), 0);
740259268Spfg	    if (REG_P (x))
741259268Spfg	      {
742259268Spfg		gcc_assert (HARD_REGISTER_P (x));
743259268Spfg	        regno = REGNO (x);
744259268Spfg		end_regno = END_HARD_REGNO (x);
745259268Spfg		do
746259268Spfg		  record_last_reg_set_info (insn, regno);
747259268Spfg		while (++regno < end_regno);
748259268Spfg	      }
749259268Spfg	  }
750259268Spfg
751169689Skan      if (! CONST_OR_PURE_CALL_P (insn))
752169689Skan	record_last_mem_set_info (insn);
753169689Skan    }
754169689Skan}
755169689Skan
756169689Skan
757169689Skan/* Scan the pattern of INSN and add an entry to the hash TABLE.
758169689Skan   After reload we are interested in loads/stores only.  */
759169689Skan
760169689Skanstatic void
761169689Skanhash_scan_set (rtx insn)
762169689Skan{
763169689Skan  rtx pat = PATTERN (insn);
764169689Skan  rtx src = SET_SRC (pat);
765169689Skan  rtx dest = SET_DEST (pat);
766169689Skan
767169689Skan  /* We are only interested in loads and stores.  */
768169689Skan  if (! MEM_P (src) && ! MEM_P (dest))
769169689Skan    return;
770169689Skan
771169689Skan  /* Don't mess with jumps and nops.  */
772169689Skan  if (JUMP_P (insn) || set_noop_p (pat))
773169689Skan    return;
774169689Skan
775169689Skan  if (REG_P (dest))
776169689Skan    {
777169689Skan      if (/* Don't CSE something if we can't do a reg/reg copy.  */
778169689Skan	  can_copy_p (GET_MODE (dest))
779169689Skan	  /* Is SET_SRC something we want to gcse?  */
780169689Skan	  && general_operand (src, GET_MODE (src))
781169689Skan#ifdef STACK_REGS
782169689Skan	  /* Never consider insns touching the register stack.  It may
783169689Skan	     create situations that reg-stack cannot handle (e.g. a stack
784169689Skan	     register live across an abnormal edge).  */
785169689Skan	  && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
786169689Skan#endif
787169689Skan	  /* An expression is not available if its operands are
788169689Skan	     subsequently modified, including this insn.  */
789169689Skan	  && oprs_unchanged_p (src, insn, true))
790169689Skan	{
791169689Skan	  insert_expr_in_table (src, insn);
792169689Skan	}
793169689Skan    }
794169689Skan  else if (REG_P (src))
795169689Skan    {
796169689Skan      /* Only record sets of pseudo-regs in the hash table.  */
797169689Skan      if (/* Don't CSE something if we can't do a reg/reg copy.  */
798169689Skan	  can_copy_p (GET_MODE (src))
799169689Skan	  /* Is SET_DEST something we want to gcse?  */
800169689Skan	  && general_operand (dest, GET_MODE (dest))
801169689Skan#ifdef STACK_REGS
802169689Skan	  /* As above for STACK_REGS.  */
803169689Skan	  && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
804169689Skan#endif
805169689Skan	  && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
806169689Skan	  /* Check if the memory expression is killed after insn.  */
807169689Skan	  && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
808169689Skan	  && oprs_unchanged_p (XEXP (dest, 0), insn, true))
809169689Skan	{
810169689Skan	  insert_expr_in_table (dest, insn);
811169689Skan	}
812169689Skan    }
813169689Skan}
814169689Skan
815169689Skan
816169689Skan/* Create hash table of memory expressions available at end of basic
817169689Skan   blocks.  Basically you should think of this hash table as the
818169689Skan   representation of AVAIL_OUT.  This is the set of expressions that
819169689Skan   is generated in a basic block and not killed before the end of the
820169689Skan   same basic block.  Notice that this is really a local computation.  */
821169689Skan
822169689Skanstatic void
823169689Skancompute_hash_table (void)
824169689Skan{
825169689Skan  basic_block bb;
826169689Skan
827169689Skan  FOR_EACH_BB (bb)
828169689Skan    {
829169689Skan      rtx insn;
830169689Skan
831169689Skan      /* First pass over the instructions records information used to
832169689Skan	 determine when registers and memory are last set.
833169689Skan	 Since we compute a "local" AVAIL_OUT, reset the tables that
834169689Skan	 help us keep track of what has been modified since the start
835169689Skan	 of the block.  */
836169689Skan      reset_opr_set_tables ();
837169689Skan      FOR_BB_INSNS (bb, insn)
838169689Skan	{
839169689Skan	  if (INSN_P (insn))
840169689Skan            record_opr_changes (insn);
841169689Skan	}
842169689Skan
843169689Skan      /* The next pass actually builds the hash table.  */
844169689Skan      FOR_BB_INSNS (bb, insn)
845169689Skan	if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
846169689Skan	  hash_scan_set (insn);
847169689Skan    }
848169689Skan}
849169689Skan
850169689Skan
851169689Skan/* Check if register REG is killed in any insn waiting to be inserted on
852169689Skan   edge E.  This function is required to check that our data flow analysis
853169689Skan   is still valid prior to commit_edge_insertions.  */
854169689Skan
855169689Skanstatic bool
856169689Skanreg_killed_on_edge (rtx reg, edge e)
857169689Skan{
858169689Skan  rtx insn;
859169689Skan
860169689Skan  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
861169689Skan    if (INSN_P (insn) && reg_set_p (reg, insn))
862169689Skan      return true;
863169689Skan
864169689Skan  return false;
865169689Skan}
866169689Skan
867169689Skan/* Similar to above - check if register REG is used in any insn waiting
868169689Skan   to be inserted on edge E.
869169689Skan   Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
870169689Skan   with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p.  */
871169689Skan
872169689Skanstatic bool
873169689Skanreg_used_on_edge (rtx reg, edge e)
874169689Skan{
875169689Skan  rtx insn;
876169689Skan
877169689Skan  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
878169689Skan    if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
879169689Skan      return true;
880169689Skan
881169689Skan  return false;
882169689Skan}
883169689Skan
884169689Skan/* Return the loaded/stored register of a load/store instruction.  */
885169689Skan
886169689Skanstatic rtx
887169689Skanget_avail_load_store_reg (rtx insn)
888169689Skan{
889169689Skan  if (REG_P (SET_DEST (PATTERN (insn))))
890169689Skan    /* A load.  */
891169689Skan    return SET_DEST(PATTERN(insn));
892169689Skan  else
893169689Skan    {
894169689Skan      /* A store.  */
895169689Skan      gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
896169689Skan      return SET_SRC (PATTERN (insn));
897169689Skan    }
898169689Skan}
899169689Skan
900169689Skan/* Return nonzero if the predecessors of BB are "well behaved".  */
901169689Skan
902169689Skanstatic bool
903169689Skanbb_has_well_behaved_predecessors (basic_block bb)
904169689Skan{
905169689Skan  edge pred;
906169689Skan  edge_iterator ei;
907169689Skan
908169689Skan  if (EDGE_COUNT (bb->preds) == 0)
909169689Skan    return false;
910169689Skan
911169689Skan  FOR_EACH_EDGE (pred, ei, bb->preds)
912169689Skan    {
913169689Skan      if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
914169689Skan	return false;
915169689Skan
916169689Skan      if (JUMP_TABLE_DATA_P (BB_END (pred->src)))
917169689Skan	return false;
918169689Skan    }
919169689Skan  return true;
920169689Skan}
921169689Skan
922169689Skan
923169689Skan/* Search for the occurrences of expression in BB.  */
924169689Skan
925169689Skanstatic struct occr*
926169689Skanget_bb_avail_insn (basic_block bb, struct occr *occr)
927169689Skan{
928169689Skan  for (; occr != NULL; occr = occr->next)
929169689Skan    if (BLOCK_FOR_INSN (occr->insn) == bb)
930169689Skan      return occr;
931169689Skan  return NULL;
932169689Skan}
933169689Skan
934169689Skan
935169689Skan/* This handles the case where several stores feed a partially redundant
936169689Skan   load. It checks if the redundancy elimination is possible and if it's
937169689Skan   worth it.
938169689Skan
939169689Skan   Redundancy elimination is possible if,
940169689Skan   1) None of the operands of an insn have been modified since the start
941169689Skan      of the current basic block.
942169689Skan   2) In any predecessor of the current basic block, the same expression
943169689Skan      is generated.
944169689Skan
945169689Skan   See the function body for the heuristics that determine if eliminating
946169689Skan   a redundancy is also worth doing, assuming it is possible.  */
947169689Skan
948169689Skanstatic void
949169689Skaneliminate_partially_redundant_load (basic_block bb, rtx insn,
950169689Skan				    struct expr *expr)
951169689Skan{
952169689Skan  edge pred;
953169689Skan  rtx avail_insn = NULL_RTX;
954169689Skan  rtx avail_reg;
955169689Skan  rtx dest, pat;
956169689Skan  struct occr *a_occr;
957169689Skan  struct unoccr *occr, *avail_occrs = NULL;
958169689Skan  struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
959169689Skan  int npred_ok = 0;
960169689Skan  gcov_type ok_count = 0; /* Redundant load execution count.  */
961169689Skan  gcov_type critical_count = 0; /* Execution count of critical edges.  */
962169689Skan  edge_iterator ei;
963169689Skan  bool critical_edge_split = false;
964169689Skan
965169689Skan  /* The execution count of the loads to be added to make the
966169689Skan     load fully redundant.  */
967169689Skan  gcov_type not_ok_count = 0;
968169689Skan  basic_block pred_bb;
969169689Skan
970169689Skan  pat = PATTERN (insn);
971169689Skan  dest = SET_DEST (pat);
972169689Skan
973169689Skan  /* Check that the loaded register is not used, set, or killed from the
974169689Skan     beginning of the block.  */
975259268Spfg  if (reg_changed_after_insn_p (dest, 0)
976259268Spfg      || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
977169689Skan    return;
978169689Skan
979169689Skan  /* Check potential for replacing load with copy for predecessors.  */
980169689Skan  FOR_EACH_EDGE (pred, ei, bb->preds)
981169689Skan    {
982169689Skan      rtx next_pred_bb_end;
983169689Skan
984169689Skan      avail_insn = NULL_RTX;
985169689Skan      avail_reg = NULL_RTX;
986169689Skan      pred_bb = pred->src;
987169689Skan      next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
988169689Skan      for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
989169689Skan	   a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
990169689Skan	{
991169689Skan	  /* Check if the loaded register is not used.  */
992169689Skan	  avail_insn = a_occr->insn;
993169689Skan	  avail_reg = get_avail_load_store_reg (avail_insn);
994169689Skan	  gcc_assert (avail_reg);
995169689Skan
996169689Skan	  /* Make sure we can generate a move from register avail_reg to
997169689Skan	     dest.  */
998169689Skan	  extract_insn (gen_move_insn (copy_rtx (dest),
999169689Skan				       copy_rtx (avail_reg)));
1000169689Skan	  if (! constrain_operands (1)
1001169689Skan	      || reg_killed_on_edge (avail_reg, pred)
1002169689Skan	      || reg_used_on_edge (dest, pred))
1003169689Skan	    {
1004169689Skan	      avail_insn = NULL;
1005169689Skan	      continue;
1006169689Skan	    }
1007259268Spfg	  if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
1008169689Skan	    /* AVAIL_INSN remains non-null.  */
1009169689Skan	    break;
1010169689Skan	  else
1011169689Skan	    avail_insn = NULL;
1012169689Skan	}
1013169689Skan
1014169689Skan      if (EDGE_CRITICAL_P (pred))
1015169689Skan	critical_count += pred->count;
1016169689Skan
1017169689Skan      if (avail_insn != NULL_RTX)
1018169689Skan	{
1019169689Skan	  npred_ok++;
1020169689Skan	  ok_count += pred->count;
1021169689Skan	  if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1022169689Skan						    copy_rtx (avail_reg)))))
1023169689Skan	    {
1024169689Skan	      /* Check if there is going to be a split.  */
1025169689Skan	      if (EDGE_CRITICAL_P (pred))
1026169689Skan		critical_edge_split = true;
1027169689Skan	    }
1028169689Skan	  else /* Its a dead move no need to generate.  */
1029169689Skan	    continue;
1030169689Skan	  occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1031169689Skan						  sizeof (struct unoccr));
1032169689Skan	  occr->insn = avail_insn;
1033169689Skan	  occr->pred = pred;
1034169689Skan	  occr->next = avail_occrs;
1035169689Skan	  avail_occrs = occr;
1036169689Skan	  if (! rollback_unoccr)
1037169689Skan	    rollback_unoccr = occr;
1038169689Skan	}
1039169689Skan      else
1040169689Skan	{
1041169689Skan	  /* Adding a load on a critical edge will cause a split.  */
1042169689Skan	  if (EDGE_CRITICAL_P (pred))
1043169689Skan	    critical_edge_split = true;
1044169689Skan	  not_ok_count += pred->count;
1045169689Skan	  unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1046169689Skan						    sizeof (struct unoccr));
1047169689Skan	  unoccr->insn = NULL_RTX;
1048169689Skan	  unoccr->pred = pred;
1049169689Skan	  unoccr->next = unavail_occrs;
1050169689Skan	  unavail_occrs = unoccr;
1051169689Skan	  if (! rollback_unoccr)
1052169689Skan	    rollback_unoccr = unoccr;
1053169689Skan	}
1054169689Skan    }
1055169689Skan
1056169689Skan  if (/* No load can be replaced by copy.  */
1057169689Skan      npred_ok == 0
1058169689Skan      /* Prevent exploding the code.  */
1059169689Skan      || (optimize_size && npred_ok > 1)
1060169689Skan      /* If we don't have profile information we cannot tell if splitting
1061169689Skan         a critical edge is profitable or not so don't do it.  */
1062169689Skan      || ((! profile_info || ! flag_branch_probabilities
1063169689Skan	   || targetm.cannot_modify_jumps_p ())
1064169689Skan	  && critical_edge_split))
1065169689Skan    goto cleanup;
1066169689Skan
1067169689Skan  /* Check if it's worth applying the partial redundancy elimination.  */
1068169689Skan  if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
1069169689Skan    goto cleanup;
1070169689Skan  if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
1071169689Skan    goto cleanup;
1072169689Skan
1073169689Skan  /* Generate moves to the loaded register from where
1074169689Skan     the memory is available.  */
1075169689Skan  for (occr = avail_occrs; occr; occr = occr->next)
1076169689Skan    {
1077169689Skan      avail_insn = occr->insn;
1078169689Skan      pred = occr->pred;
1079169689Skan      /* Set avail_reg to be the register having the value of the
1080169689Skan	 memory.  */
1081169689Skan      avail_reg = get_avail_load_store_reg (avail_insn);
1082169689Skan      gcc_assert (avail_reg);
1083169689Skan
1084169689Skan      insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1085169689Skan					  copy_rtx (avail_reg)),
1086169689Skan			   pred);
1087169689Skan      stats.moves_inserted++;
1088169689Skan
1089169689Skan      if (dump_file)
1090169689Skan	fprintf (dump_file,
1091169689Skan		 "generating move from %d to %d on edge from %d to %d\n",
1092169689Skan		 REGNO (avail_reg),
1093169689Skan		 REGNO (dest),
1094169689Skan		 pred->src->index,
1095169689Skan		 pred->dest->index);
1096169689Skan    }
1097169689Skan
1098169689Skan  /* Regenerate loads where the memory is unavailable.  */
1099169689Skan  for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1100169689Skan    {
1101169689Skan      pred = unoccr->pred;
1102169689Skan      insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1103169689Skan      stats.copies_inserted++;
1104169689Skan
1105169689Skan      if (dump_file)
1106169689Skan	{
1107169689Skan	  fprintf (dump_file,
1108169689Skan		   "generating on edge from %d to %d a copy of load: ",
1109169689Skan		   pred->src->index,
1110169689Skan		   pred->dest->index);
1111169689Skan	  print_rtl (dump_file, PATTERN (insn));
1112169689Skan	  fprintf (dump_file, "\n");
1113169689Skan	}
1114169689Skan    }
1115169689Skan
1116169689Skan  /* Delete the insn if it is not available in this block and mark it
1117169689Skan     for deletion if it is available. If insn is available it may help
1118169689Skan     discover additional redundancies, so mark it for later deletion.  */
1119169689Skan  for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
1120169689Skan       a_occr && (a_occr->insn != insn);
1121169689Skan       a_occr = get_bb_avail_insn (bb, a_occr->next));
1122169689Skan
1123169689Skan  if (!a_occr)
1124169689Skan    {
1125169689Skan      stats.insns_deleted++;
1126169689Skan
1127169689Skan      if (dump_file)
1128169689Skan	{
1129169689Skan	  fprintf (dump_file, "deleting insn:\n");
1130169689Skan          print_rtl_single (dump_file, insn);
1131169689Skan          fprintf (dump_file, "\n");
1132169689Skan	}
1133169689Skan      delete_insn (insn);
1134169689Skan    }
1135169689Skan  else
1136169689Skan    a_occr->deleted_p = 1;
1137169689Skan
1138169689Skancleanup:
1139169689Skan  if (rollback_unoccr)
1140169689Skan    obstack_free (&unoccr_obstack, rollback_unoccr);
1141169689Skan}
1142169689Skan
1143169689Skan/* Performing the redundancy elimination as described before.  */
1144169689Skan
1145169689Skanstatic void
1146169689Skaneliminate_partially_redundant_loads (void)
1147169689Skan{
1148169689Skan  rtx insn;
1149169689Skan  basic_block bb;
1150169689Skan
1151169689Skan  /* Note we start at block 1.  */
1152169689Skan
1153169689Skan  if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1154169689Skan    return;
1155169689Skan
1156169689Skan  FOR_BB_BETWEEN (bb,
1157169689Skan		  ENTRY_BLOCK_PTR->next_bb->next_bb,
1158169689Skan		  EXIT_BLOCK_PTR,
1159169689Skan		  next_bb)
1160169689Skan    {
1161169689Skan      /* Don't try anything on basic blocks with strange predecessors.  */
1162169689Skan      if (! bb_has_well_behaved_predecessors (bb))
1163169689Skan	continue;
1164169689Skan
1165169689Skan      /* Do not try anything on cold basic blocks.  */
1166169689Skan      if (probably_cold_bb_p (bb))
1167169689Skan	continue;
1168169689Skan
1169169689Skan      /* Reset the table of things changed since the start of the current
1170169689Skan	 basic block.  */
1171169689Skan      reset_opr_set_tables ();
1172169689Skan
1173169689Skan      /* Look at all insns in the current basic block and see if there are
1174169689Skan	 any loads in it that we can record.  */
1175169689Skan      FOR_BB_INSNS (bb, insn)
1176169689Skan	{
1177169689Skan	  /* Is it a load - of the form (set (reg) (mem))?  */
1178169689Skan	  if (NONJUMP_INSN_P (insn)
1179169689Skan              && GET_CODE (PATTERN (insn)) == SET
1180169689Skan	      && REG_P (SET_DEST (PATTERN (insn)))
1181169689Skan	      && MEM_P (SET_SRC (PATTERN (insn))))
1182169689Skan	    {
1183169689Skan	      rtx pat = PATTERN (insn);
1184169689Skan	      rtx src = SET_SRC (pat);
1185169689Skan	      struct expr *expr;
1186169689Skan
1187169689Skan	      if (!MEM_VOLATILE_P (src)
1188169689Skan		  && GET_MODE (src) != BLKmode
1189169689Skan		  && general_operand (src, GET_MODE (src))
1190169689Skan		  /* Are the operands unchanged since the start of the
1191169689Skan		     block?  */
1192169689Skan		  && oprs_unchanged_p (src, insn, false)
1193169689Skan		  && !(flag_non_call_exceptions && may_trap_p (src))
1194169689Skan		  && !side_effects_p (src)
1195169689Skan		  /* Is the expression recorded?  */
1196169689Skan		  && (expr = lookup_expr_in_table (src)) != NULL)
1197169689Skan		{
1198169689Skan		  /* We now have a load (insn) and an available memory at
1199169689Skan		     its BB start (expr). Try to remove the loads if it is
1200169689Skan		     redundant.  */
1201169689Skan		  eliminate_partially_redundant_load (bb, insn, expr);
1202169689Skan		}
1203169689Skan	    }
1204169689Skan
1205169689Skan	  /* Keep track of everything modified by this insn, so that we
1206169689Skan	     know what has been modified since the start of the current
1207169689Skan	     basic block.  */
1208169689Skan	  if (INSN_P (insn))
1209169689Skan	    record_opr_changes (insn);
1210169689Skan	}
1211169689Skan    }
1212169689Skan
1213169689Skan  commit_edge_insertions ();
1214169689Skan}
1215169689Skan
1216169689Skan/* Go over the expression hash table and delete insns that were
1217169689Skan   marked for later deletion.  */
1218169689Skan
1219169689Skan/* This helper is called via htab_traverse.  */
1220169689Skanstatic int
1221169689Skandelete_redundant_insns_1 (void **slot, void *data ATTRIBUTE_UNUSED)
1222169689Skan{
1223169689Skan  struct expr *expr = (struct expr *) *slot;
1224169689Skan  struct occr *occr;
1225169689Skan
1226169689Skan  for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1227169689Skan    {
1228169689Skan      if (occr->deleted_p)
1229169689Skan	{
1230169689Skan	  delete_insn (occr->insn);
1231169689Skan	  stats.insns_deleted++;
1232169689Skan
1233169689Skan	  if (dump_file)
1234169689Skan	    {
1235169689Skan	      fprintf (dump_file, "deleting insn:\n");
1236169689Skan	      print_rtl_single (dump_file, occr->insn);
1237169689Skan	      fprintf (dump_file, "\n");
1238169689Skan	    }
1239169689Skan	}
1240169689Skan    }
1241169689Skan
1242169689Skan  return 1;
1243169689Skan}
1244169689Skan
1245169689Skanstatic void
1246169689Skandelete_redundant_insns (void)
1247169689Skan{
1248169689Skan  htab_traverse (expr_table, delete_redundant_insns_1, NULL);
1249169689Skan  if (dump_file)
1250169689Skan    fprintf (dump_file, "\n");
1251169689Skan}
1252169689Skan
1253169689Skan/* Main entry point of the GCSE after reload - clean some redundant loads
1254169689Skan   due to spilling.  */
1255169689Skan
1256169689Skanstatic void
1257169689Skangcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1258169689Skan{
1259169689Skan
1260169689Skan  memset (&stats, 0, sizeof (stats));
1261169689Skan
1262169689Skan  /* Allocate ememory for this pass.
1263169689Skan     Also computes and initializes the insns' CUIDs.  */
1264169689Skan  alloc_mem ();
1265169689Skan
1266169689Skan  /* We need alias analysis.  */
1267169689Skan  init_alias_analysis ();
1268169689Skan
1269169689Skan  compute_hash_table ();
1270169689Skan
1271169689Skan  if (dump_file)
1272169689Skan    dump_hash_table (dump_file);
1273169689Skan
1274169689Skan  if (htab_elements (expr_table) > 0)
1275169689Skan    {
1276169689Skan      eliminate_partially_redundant_loads ();
1277169689Skan      delete_redundant_insns ();
1278169689Skan
1279169689Skan      if (dump_file)
1280169689Skan	{
1281169689Skan	  fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1282169689Skan	  fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1283169689Skan	  fprintf (dump_file, "moves inserted:  %d\n", stats.moves_inserted);
1284169689Skan	  fprintf (dump_file, "insns deleted:   %d\n", stats.insns_deleted);
1285169689Skan	  fprintf (dump_file, "\n\n");
1286169689Skan	}
1287169689Skan    }
1288169689Skan
1289169689Skan  /* We are finished with alias.  */
1290169689Skan  end_alias_analysis ();
1291169689Skan
1292169689Skan  free_mem ();
1293169689Skan}
1294169689Skan
1295169689Skan
1296169689Skanstatic bool
1297169689Skangate_handle_gcse2 (void)
1298169689Skan{
1299169689Skan  return (optimize > 0 && flag_gcse_after_reload);
1300169689Skan}
1301169689Skan
1302169689Skan
1303169689Skanstatic unsigned int
1304169689Skanrest_of_handle_gcse2 (void)
1305169689Skan{
1306169689Skan  gcse_after_reload_main (get_insns ());
1307169689Skan  rebuild_jump_labels (get_insns ());
1308169689Skan  delete_trivially_dead_insns (get_insns (), max_reg_num ());
1309169689Skan  return 0;
1310169689Skan}
1311169689Skan
1312169689Skanstruct tree_opt_pass pass_gcse2 =
1313169689Skan{
1314169689Skan  "gcse2",                              /* name */
1315169689Skan  gate_handle_gcse2,                    /* gate */
1316169689Skan  rest_of_handle_gcse2,                 /* execute */
1317169689Skan  NULL,                                 /* sub */
1318169689Skan  NULL,                                 /* next */
1319169689Skan  0,                                    /* static_pass_number */
1320169689Skan  TV_GCSE_AFTER_RELOAD,                 /* tv_id */
1321169689Skan  0,                                    /* properties_required */
1322169689Skan  0,                                    /* properties_provided */
1323169689Skan  0,                                    /* properties_destroyed */
1324169689Skan  0,                                    /* todo_flags_start */
1325169689Skan  TODO_dump_func |
1326169689Skan  TODO_verify_flow | TODO_ggc_collect,  /* todo_flags_finish */
1327169689Skan  'J'                                   /* letter */
1328169689Skan};
1329169689Skan
1330