1/* Post reload partially redundant load elimination
2   Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "diagnostic-core.h"
25
26#include "hash-table.h"
27#include "rtl.h"
28#include "hash-set.h"
29#include "machmode.h"
30#include "vec.h"
31#include "double-int.h"
32#include "input.h"
33#include "alias.h"
34#include "symtab.h"
35#include "wide-int.h"
36#include "inchash.h"
37#include "tree.h"
38#include "tm_p.h"
39#include "regs.h"
40#include "hard-reg-set.h"
41#include "flags.h"
42#include "insn-config.h"
43#include "recog.h"
44#include "predict.h"
45#include "function.h"
46#include "dominance.h"
47#include "cfg.h"
48#include "cfgrtl.h"
49#include "basic-block.h"
50#include "profile.h"
51#include "hashtab.h"
52#include "statistics.h"
53#include "real.h"
54#include "fixed-value.h"
55#include "expmed.h"
56#include "dojump.h"
57#include "explow.h"
58#include "calls.h"
59#include "emit-rtl.h"
60#include "varasm.h"
61#include "stmt.h"
62#include "expr.h"
63#include "except.h"
64#include "intl.h"
65#include "obstack.h"
66#include "params.h"
67#include "target.h"
68#include "tree-pass.h"
69#include "dbgcnt.h"
70#include "df.h"
71#include "gcse-common.h"
72
73/* The following code implements gcse after reload, the purpose of this
74   pass is to cleanup redundant loads generated by reload and other
75   optimizations that come after gcse. It searches for simple inter-block
76   redundancies and tries to eliminate them by adding moves and loads
77   in cold places.
78
79   Perform partially redundant load elimination, try to eliminate redundant
80   loads created by the reload pass.  We try to look for full or partial
81   redundant loads fed by one or more loads/stores in predecessor BBs,
82   and try adding loads to make them fully redundant.  We also check if
83   it's worth adding loads to be able to delete the redundant load.
84
85   Algorithm:
86   1. Build available expressions hash table:
87       For each load/store instruction, if the loaded/stored memory didn't
88       change until the end of the basic block add this memory expression to
89       the hash table.
90   2. Perform Redundancy elimination:
91      For each load instruction do the following:
92	 perform partial redundancy elimination, check if it's worth adding
93	 loads to make the load fully redundant.  If so add loads and
94	 register copies and delete the load.
95   3. Delete instructions made redundant in step 2.
96
97   Future enhancement:
98     If the loaded register is used/defined between load and some store,
99     look for some other free register between load and all its stores,
100     and replace the load with a copy from this register to the loaded
101     register.
102*/
103
104
105/* Keep statistics of this pass.  */
106static struct
107{
108  int moves_inserted;
109  int copies_inserted;
110  int insns_deleted;
111} stats;
112
113/* We need to keep a hash table of expressions.  The table entries are of
114   type 'struct expr', and for each expression there is a single linked
115   list of occurrences.  */
116
117/* Expression elements in the hash table.  */
118struct expr
119{
120  /* The expression (SET_SRC for expressions, PATTERN for assignments).  */
121  rtx expr;
122
123  /* The same hash for this entry.  */
124  hashval_t hash;
125
126  /* Index in the transparent bitmaps.  */
127  unsigned int bitmap_index;
128
129  /* List of available occurrence in basic blocks in the function.  */
130  struct occr *avail_occr;
131};
132
133/* Hashtable helpers.  */
134
135struct expr_hasher : typed_noop_remove <expr>
136{
137  typedef expr value_type;
138  typedef expr compare_type;
139  static inline hashval_t hash (const value_type *);
140  static inline bool equal (const value_type *, const compare_type *);
141};
142
143
144/* Hash expression X.
145   DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
146   or if the expression contains something we don't want to insert in the
147   table.  */
148
149static hashval_t
150hash_expr (rtx x, int *do_not_record_p)
151{
152  *do_not_record_p = 0;
153  return hash_rtx (x, GET_MODE (x), do_not_record_p,
154		   NULL,  /*have_reg_qty=*/false);
155}
156
157/* Callback for hashtab.
158   Return the hash value for expression EXP.  We don't actually hash
159   here, we just return the cached hash value.  */
160
161inline hashval_t
162expr_hasher::hash (const value_type *exp)
163{
164  return exp->hash;
165}
166
167/* Callback for hashtab.
168   Return nonzero if exp1 is equivalent to exp2.  */
169
170inline bool
171expr_hasher::equal (const value_type *exp1, const compare_type *exp2)
172{
173  int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
174
175  gcc_assert (!equiv_p || exp1->hash == exp2->hash);
176  return equiv_p;
177}
178
179/* The table itself.  */
180static hash_table<expr_hasher> *expr_table;
181
182
183static struct obstack expr_obstack;
184
185/* Occurrence of an expression.
186   There is at most one occurrence per basic block.  If a pattern appears
187   more than once, the last appearance is used.  */
188
189struct occr
190{
191  /* Next occurrence of this expression.  */
192  struct occr *next;
193  /* The insn that computes the expression.  */
194  rtx_insn *insn;
195  /* Nonzero if this [anticipatable] occurrence has been deleted.  */
196  char deleted_p;
197};
198
199static struct obstack occr_obstack;
200
201/* The following structure holds the information about the occurrences of
202   the redundant instructions.  */
203struct unoccr
204{
205  struct unoccr *next;
206  edge pred;
207  rtx_insn *insn;
208};
209
210static struct obstack unoccr_obstack;
211
212/* Array where each element is the CUID if the insn that last set the hard
213   register with the number of the element, since the start of the current
214   basic block.
215
216   This array is used during the building of the hash table (step 1) to
217   determine if a reg is killed before the end of a basic block.
218
219   It is also used when eliminating partial redundancies (step 2) to see
220   if a reg was modified since the start of a basic block.  */
221static int *reg_avail_info;
222
223/* A list of insns that may modify memory within the current basic block.  */
224struct modifies_mem
225{
226  rtx_insn *insn;
227  struct modifies_mem *next;
228};
229static struct modifies_mem *modifies_mem_list;
230
231/* The modifies_mem structs also go on an obstack, only this obstack is
232   freed each time after completing the analysis or transformations on
233   a basic block.  So we allocate a dummy modifies_mem_obstack_bottom
234   object on the obstack to keep track of the bottom of the obstack.  */
235static struct obstack modifies_mem_obstack;
236static struct modifies_mem  *modifies_mem_obstack_bottom;
237
238/* Mapping of insn UIDs to CUIDs.
239   CUIDs are like UIDs except they increase monotonically in each basic
240   block, have no gaps, and only apply to real insns.  */
241static int *uid_cuid;
242#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
243
244/* Bitmap of blocks which have memory stores.  */
245static bitmap modify_mem_list_set;
246
247/* Bitmap of blocks which have calls.  */
248static bitmap blocks_with_calls;
249
250/* Vector indexed by block # with a list of all the insns that
251   modify memory within the block.  */
252static vec<rtx_insn *> *modify_mem_list;
253
254/* Vector indexed by block # with a canonicalized list of insns
255   that modify memory in the block.  */
256static vec<modify_pair> *canon_modify_mem_list;
257
258/* Vector of simple bitmaps indexed by block number.  Each component sbitmap
259   indicates which expressions are transparent through the block.  */
260static sbitmap *transp;
261
262
263/* Helpers for memory allocation/freeing.  */
264static void alloc_mem (void);
265static void free_mem (void);
266
267/* Support for hash table construction and transformations.  */
268static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
269static void record_last_reg_set_info (rtx_insn *, rtx);
270static void record_last_reg_set_info_regno (rtx_insn *, int);
271static void record_last_mem_set_info (rtx_insn *);
272static void record_last_set_info (rtx, const_rtx, void *);
273static void record_opr_changes (rtx_insn *);
274
275static void find_mem_conflicts (rtx, const_rtx, void *);
276static int load_killed_in_block_p (int, rtx, bool);
277static void reset_opr_set_tables (void);
278
279/* Hash table support.  */
280static hashval_t hash_expr (rtx, int *);
281static void insert_expr_in_table (rtx, rtx_insn *);
282static struct expr *lookup_expr_in_table (rtx);
283static void dump_hash_table (FILE *);
284
285/* Helpers for eliminate_partially_redundant_load.  */
286static bool reg_killed_on_edge (rtx, edge);
287static bool reg_used_on_edge (rtx, edge);
288
289static rtx get_avail_load_store_reg (rtx_insn *);
290
291static bool bb_has_well_behaved_predecessors (basic_block);
292static struct occr* get_bb_avail_insn (basic_block, struct occr *, int);
293static void hash_scan_set (rtx_insn *);
294static void compute_hash_table (void);
295
296/* The work horses of this pass.  */
297static void eliminate_partially_redundant_load (basic_block,
298						rtx_insn *,
299						struct expr *);
300static void eliminate_partially_redundant_loads (void);
301
302
303/* Allocate memory for the CUID mapping array and register/memory
304   tracking tables.  */
305
306static void
307alloc_mem (void)
308{
309  int i;
310  basic_block bb;
311  rtx_insn *insn;
312
313  /* Find the largest UID and create a mapping from UIDs to CUIDs.  */
314  uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
315  i = 1;
316  FOR_EACH_BB_FN (bb, cfun)
317    FOR_BB_INSNS (bb, insn)
318      {
319        if (INSN_P (insn))
320	  uid_cuid[INSN_UID (insn)] = i++;
321	else
322	  uid_cuid[INSN_UID (insn)] = i;
323      }
324
325  /* Allocate the available expressions hash table.  We don't want to
326     make the hash table too small, but unnecessarily making it too large
327     also doesn't help.  The i/4 is a gcse.c relic, and seems like a
328     reasonable choice.  */
329  expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13));
330
331  /* We allocate everything on obstacks because we often can roll back
332     the whole obstack to some point.  Freeing obstacks is very fast.  */
333  gcc_obstack_init (&expr_obstack);
334  gcc_obstack_init (&occr_obstack);
335  gcc_obstack_init (&unoccr_obstack);
336  gcc_obstack_init (&modifies_mem_obstack);
337
338  /* Working array used to track the last set for each register
339     in the current block.  */
340  reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
341
342  /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
343     can roll it back in reset_opr_set_tables.  */
344  modifies_mem_obstack_bottom =
345    (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
346					   sizeof (struct modifies_mem));
347
348  blocks_with_calls = BITMAP_ALLOC (NULL);
349  modify_mem_list_set = BITMAP_ALLOC (NULL);
350
351  modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun),
352					      sizeof (vec_rtx_heap));
353  canon_modify_mem_list
354    = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun),
355					sizeof (vec_modify_pair_heap));
356}
357
358/* Free memory allocated by alloc_mem.  */
359
360static void
361free_mem (void)
362{
363  free (uid_cuid);
364
365  delete expr_table;
366  expr_table = NULL;
367
368  obstack_free (&expr_obstack, NULL);
369  obstack_free (&occr_obstack, NULL);
370  obstack_free (&unoccr_obstack, NULL);
371  obstack_free (&modifies_mem_obstack, NULL);
372
373  unsigned i;
374  bitmap_iterator bi;
375  EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
376    {
377      modify_mem_list[i].release ();
378      canon_modify_mem_list[i].release ();
379    }
380
381  BITMAP_FREE (blocks_with_calls);
382  BITMAP_FREE (modify_mem_list_set);
383  free (reg_avail_info);
384}
385
386
387/* Insert expression X in INSN in the hash TABLE.
388   If it is already present, record it as the last occurrence in INSN's
389   basic block.  */
390
391static void
392insert_expr_in_table (rtx x, rtx_insn *insn)
393{
394  int do_not_record_p;
395  hashval_t hash;
396  struct expr *cur_expr, **slot;
397  struct occr *avail_occr, *last_occr = NULL;
398
399  hash = hash_expr (x, &do_not_record_p);
400
401  /* Do not insert expression in the table if it contains volatile operands,
402     or if hash_expr determines the expression is something we don't want
403     to or can't handle.  */
404  if (do_not_record_p)
405    return;
406
407  /* We anticipate that redundant expressions are rare, so for convenience
408     allocate a new hash table element here already and set its fields.
409     If we don't do this, we need a hack with a static struct expr.  Anyway,
410     obstack_free is really fast and one more obstack_alloc doesn't hurt if
411     we're going to see more expressions later on.  */
412  cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
413					    sizeof (struct expr));
414  cur_expr->expr = x;
415  cur_expr->hash = hash;
416  cur_expr->avail_occr = NULL;
417
418  slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT);
419
420  if (! (*slot))
421    {
422      /* The expression isn't found, so insert it.  */
423      *slot = cur_expr;
424
425      /* Anytime we add an entry to the table, record the index
426	 of the new entry.  The bitmap index starts counting
427	 at zero.  */
428      cur_expr->bitmap_index = expr_table->elements () - 1;
429    }
430  else
431    {
432      /* The expression is already in the table, so roll back the
433	 obstack and use the existing table entry.  */
434      obstack_free (&expr_obstack, cur_expr);
435      cur_expr = *slot;
436    }
437
438  /* Search for another occurrence in the same basic block.  */
439  avail_occr = cur_expr->avail_occr;
440  while (avail_occr
441	 && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn))
442    {
443      /* If an occurrence isn't found, save a pointer to the end of
444	 the list.  */
445      last_occr = avail_occr;
446      avail_occr = avail_occr->next;
447    }
448
449  if (avail_occr)
450    /* Found another instance of the expression in the same basic block.
451       Prefer this occurrence to the currently recorded one.  We want
452       the last one in the block and the block is scanned from start
453       to end.  */
454    avail_occr->insn = insn;
455  else
456    {
457      /* First occurrence of this expression in this basic block.  */
458      avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
459						  sizeof (struct occr));
460
461      /* First occurrence of this expression in any block?  */
462      if (cur_expr->avail_occr == NULL)
463        cur_expr->avail_occr = avail_occr;
464      else
465        last_occr->next = avail_occr;
466
467      avail_occr->insn = insn;
468      avail_occr->next = NULL;
469      avail_occr->deleted_p = 0;
470    }
471}
472
473
474/* Lookup pattern PAT in the expression hash table.
475   The result is a pointer to the table entry, or NULL if not found.  */
476
477static struct expr *
478lookup_expr_in_table (rtx pat)
479{
480  int do_not_record_p;
481  struct expr **slot, *tmp_expr;
482  hashval_t hash = hash_expr (pat, &do_not_record_p);
483
484  if (do_not_record_p)
485    return NULL;
486
487  tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
488					    sizeof (struct expr));
489  tmp_expr->expr = pat;
490  tmp_expr->hash = hash;
491  tmp_expr->avail_occr = NULL;
492
493  slot = expr_table->find_slot_with_hash (tmp_expr, hash, INSERT);
494  obstack_free (&expr_obstack, tmp_expr);
495
496  if (!slot)
497    return NULL;
498  else
499    return (*slot);
500}
501
502
503/* Dump all expressions and occurrences that are currently in the
504   expression hash table to FILE.  */
505
506/* This helper is called via htab_traverse.  */
507int
508dump_expr_hash_table_entry (expr **slot, FILE *file)
509{
510  struct expr *exprs = *slot;
511  struct occr *occr;
512
513  fprintf (file, "expr: ");
514  print_rtl (file, exprs->expr);
515  fprintf (file,"\nhashcode: %u\n", exprs->hash);
516  fprintf (file,"list of occurrences:\n");
517  occr = exprs->avail_occr;
518  while (occr)
519    {
520      rtx_insn *insn = occr->insn;
521      print_rtl_single (file, insn);
522      fprintf (file, "\n");
523      occr = occr->next;
524    }
525  fprintf (file, "\n");
526  return 1;
527}
528
529static void
530dump_hash_table (FILE *file)
531{
532  fprintf (file, "\n\nexpression hash table\n");
533  fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
534           (long) expr_table->size (),
535           (long) expr_table->elements (),
536           expr_table->collisions ());
537  if (expr_table->elements () > 0)
538    {
539      fprintf (file, "\n\ntable entries:\n");
540      expr_table->traverse <FILE *, dump_expr_hash_table_entry> (file);
541    }
542  fprintf (file, "\n");
543}
544
545/* Return true if register X is recorded as being set by an instruction
546   whose CUID is greater than the one given.  */
547
548static bool
549reg_changed_after_insn_p (rtx x, int cuid)
550{
551  unsigned int regno, end_regno;
552
553  regno = REGNO (x);
554  end_regno = END_HARD_REGNO (x);
555  do
556    if (reg_avail_info[regno] > cuid)
557      return true;
558  while (++regno < end_regno);
559  return false;
560}
561
562/* Return nonzero if the operands of expression X are unchanged
563   1) from the start of INSN's basic block up to but not including INSN
564      if AFTER_INSN is false, or
565   2) from INSN to the end of INSN's basic block if AFTER_INSN is true.  */
566
567static bool
568oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
569{
570  int i, j;
571  enum rtx_code code;
572  const char *fmt;
573
574  if (x == 0)
575    return 1;
576
577  code = GET_CODE (x);
578  switch (code)
579    {
580    case REG:
581      /* We are called after register allocation.  */
582      gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
583      if (after_insn)
584	return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
585      else
586	return !reg_changed_after_insn_p (x, 0);
587
588    case MEM:
589      if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
590	return 0;
591      else
592	return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
593
594    case PC:
595    case CC0: /*FIXME*/
596    case CONST:
597    CASE_CONST_ANY:
598    case SYMBOL_REF:
599    case LABEL_REF:
600    case ADDR_VEC:
601    case ADDR_DIFF_VEC:
602      return 1;
603
604    case PRE_DEC:
605    case PRE_INC:
606    case POST_DEC:
607    case POST_INC:
608    case PRE_MODIFY:
609    case POST_MODIFY:
610      if (after_insn)
611	return 0;
612      break;
613
614    default:
615      break;
616    }
617
618  for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
619    {
620      if (fmt[i] == 'e')
621	{
622	  if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
623	    return 0;
624	}
625      else if (fmt[i] == 'E')
626	for (j = 0; j < XVECLEN (x, i); j++)
627	  if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
628	    return 0;
629    }
630
631  return 1;
632}
633
634
635/* Used for communication between find_mem_conflicts and
636   load_killed_in_block_p.  Nonzero if find_mem_conflicts finds a
637   conflict between two memory references.
638   This is a bit of a hack to work around the limitations of note_stores.  */
639static int mems_conflict_p;
640
641/* DEST is the output of an instruction.  If it is a memory reference, and
642   possibly conflicts with the load found in DATA, then set mems_conflict_p
643   to a nonzero value.  */
644
645static void
646find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
647		    void *data)
648{
649  rtx mem_op = (rtx) data;
650
651  while (GET_CODE (dest) == SUBREG
652	 || GET_CODE (dest) == ZERO_EXTRACT
653	 || GET_CODE (dest) == STRICT_LOW_PART)
654    dest = XEXP (dest, 0);
655
656  /* If DEST is not a MEM, then it will not conflict with the load.  Note
657     that function calls are assumed to clobber memory, but are handled
658     elsewhere.  */
659  if (! MEM_P (dest))
660    return;
661
662  if (true_dependence (dest, GET_MODE (dest), mem_op))
663    mems_conflict_p = 1;
664}
665
666
667/* Return nonzero if the expression in X (a memory reference) is killed
668   in the current basic block before (if AFTER_INSN is false) or after
669   (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
670
671   This function assumes that the modifies_mem table is flushed when
672   the hash table construction or redundancy elimination phases start
673   processing a new basic block.  */
674
675static int
676load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
677{
678  struct modifies_mem *list_entry = modifies_mem_list;
679
680  while (list_entry)
681    {
682      rtx_insn *setter = list_entry->insn;
683
684      /* Ignore entries in the list that do not apply.  */
685      if ((after_insn
686	   && INSN_CUID (setter) < uid_limit)
687	  || (! after_insn
688	      && INSN_CUID (setter) > uid_limit))
689	{
690	  list_entry = list_entry->next;
691	  continue;
692	}
693
694      /* If SETTER is a call everything is clobbered.  Note that calls
695	 to pure functions are never put on the list, so we need not
696	 worry about them.  */
697      if (CALL_P (setter))
698	return 1;
699
700      /* SETTER must be an insn of some kind that sets memory.  Call
701	 note_stores to examine each hunk of memory that is modified.
702	 It will set mems_conflict_p to nonzero if there may be a
703	 conflict between X and SETTER.  */
704      mems_conflict_p = 0;
705      note_stores (PATTERN (setter), find_mem_conflicts, x);
706      if (mems_conflict_p)
707	return 1;
708
709      list_entry = list_entry->next;
710    }
711  return 0;
712}
713
714
715/* Record register first/last/block set information for REGNO in INSN.  */
716
717static inline void
718record_last_reg_set_info (rtx_insn *insn, rtx reg)
719{
720  unsigned int regno, end_regno;
721
722  regno = REGNO (reg);
723  end_regno = END_HARD_REGNO (reg);
724  do
725    reg_avail_info[regno] = INSN_CUID (insn);
726  while (++regno < end_regno);
727}
728
729static inline void
730record_last_reg_set_info_regno (rtx_insn *insn, int regno)
731{
732  reg_avail_info[regno] = INSN_CUID (insn);
733}
734
735
736/* Record memory modification information for INSN.  We do not actually care
737   about the memory location(s) that are set, or even how they are set (consider
738   a CALL_INSN).  We merely need to record which insns modify memory.  */
739
740static void
741record_last_mem_set_info (rtx_insn *insn)
742{
743  struct modifies_mem *list_entry;
744
745  list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
746						      sizeof (struct modifies_mem));
747  list_entry->insn = insn;
748  list_entry->next = modifies_mem_list;
749  modifies_mem_list = list_entry;
750
751  record_last_mem_set_info_common (insn, modify_mem_list,
752				   canon_modify_mem_list,
753				   modify_mem_list_set,
754				   blocks_with_calls);
755}
756
757/* Called from compute_hash_table via note_stores to handle one
758   SET or CLOBBER in an insn.  DATA is really the instruction in which
759   the SET is taking place.  */
760
761static void
762record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
763{
764  rtx_insn *last_set_insn = (rtx_insn *) data;
765
766  if (GET_CODE (dest) == SUBREG)
767    dest = SUBREG_REG (dest);
768
769  if (REG_P (dest))
770    record_last_reg_set_info (last_set_insn, dest);
771  else if (MEM_P (dest))
772    {
773      /* Ignore pushes, they don't clobber memory.  They may still
774	 clobber the stack pointer though.  Some targets do argument
775	 pushes without adding REG_INC notes.  See e.g. PR25196,
776	 where a pushsi2 on i386 doesn't have REG_INC notes.  Note
777	 such changes here too.  */
778      if (! push_operand (dest, GET_MODE (dest)))
779	record_last_mem_set_info (last_set_insn);
780      else
781	record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
782    }
783}
784
785
786/* Reset tables used to keep track of what's still available since the
787   start of the block.  */
788
789static void
790reset_opr_set_tables (void)
791{
792  memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
793  obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
794  modifies_mem_list = NULL;
795}
796
797
798/* Record things set by INSN.
799   This data is used by oprs_unchanged_p.  */
800
801static void
802record_opr_changes (rtx_insn *insn)
803{
804  rtx note;
805
806  /* Find all stores and record them.  */
807  note_stores (PATTERN (insn), record_last_set_info, insn);
808
809  /* Also record autoincremented REGs for this insn as changed.  */
810  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
811    if (REG_NOTE_KIND (note) == REG_INC)
812      record_last_reg_set_info (insn, XEXP (note, 0));
813
814  /* Finally, if this is a call, record all call clobbers.  */
815  if (CALL_P (insn))
816    {
817      unsigned int regno;
818      rtx link, x;
819      hard_reg_set_iterator hrsi;
820      EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
821	record_last_reg_set_info_regno (insn, regno);
822
823      for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
824	if (GET_CODE (XEXP (link, 0)) == CLOBBER)
825	  {
826	    x = XEXP (XEXP (link, 0), 0);
827	    if (REG_P (x))
828	      {
829		gcc_assert (HARD_REGISTER_P (x));
830		record_last_reg_set_info (insn, x);
831	      }
832	  }
833
834      if (! RTL_CONST_OR_PURE_CALL_P (insn))
835	record_last_mem_set_info (insn);
836    }
837}
838
839
840/* Scan the pattern of INSN and add an entry to the hash TABLE.
841   After reload we are interested in loads/stores only.  */
842
843static void
844hash_scan_set (rtx_insn *insn)
845{
846  rtx pat = PATTERN (insn);
847  rtx src = SET_SRC (pat);
848  rtx dest = SET_DEST (pat);
849
850  /* We are only interested in loads and stores.  */
851  if (! MEM_P (src) && ! MEM_P (dest))
852    return;
853
854  /* Don't mess with jumps and nops.  */
855  if (JUMP_P (insn) || set_noop_p (pat))
856    return;
857
858  if (REG_P (dest))
859    {
860      if (/* Don't CSE something if we can't do a reg/reg copy.  */
861	  can_copy_p (GET_MODE (dest))
862	  /* Is SET_SRC something we want to gcse?  */
863	  && general_operand (src, GET_MODE (src))
864#ifdef STACK_REGS
865	  /* Never consider insns touching the register stack.  It may
866	     create situations that reg-stack cannot handle (e.g. a stack
867	     register live across an abnormal edge).  */
868	  && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
869#endif
870	  /* An expression is not available if its operands are
871	     subsequently modified, including this insn.  */
872	  && oprs_unchanged_p (src, insn, true))
873	{
874	  insert_expr_in_table (src, insn);
875	}
876    }
877  else if (REG_P (src))
878    {
879      /* Only record sets of pseudo-regs in the hash table.  */
880      if (/* Don't CSE something if we can't do a reg/reg copy.  */
881	  can_copy_p (GET_MODE (src))
882	  /* Is SET_DEST something we want to gcse?  */
883	  && general_operand (dest, GET_MODE (dest))
884#ifdef STACK_REGS
885	  /* As above for STACK_REGS.  */
886	  && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
887#endif
888	  && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
889	  /* Check if the memory expression is killed after insn.  */
890	  && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
891	  && oprs_unchanged_p (XEXP (dest, 0), insn, true))
892	{
893	  insert_expr_in_table (dest, insn);
894	}
895    }
896}
897
898
899/* Create hash table of memory expressions available at end of basic
900   blocks.  Basically you should think of this hash table as the
901   representation of AVAIL_OUT.  This is the set of expressions that
902   is generated in a basic block and not killed before the end of the
903   same basic block.  Notice that this is really a local computation.  */
904
905static void
906compute_hash_table (void)
907{
908  basic_block bb;
909
910  FOR_EACH_BB_FN (bb, cfun)
911    {
912      rtx_insn *insn;
913
914      /* First pass over the instructions records information used to
915	 determine when registers and memory are last set.
916	 Since we compute a "local" AVAIL_OUT, reset the tables that
917	 help us keep track of what has been modified since the start
918	 of the block.  */
919      reset_opr_set_tables ();
920      FOR_BB_INSNS (bb, insn)
921	{
922	  if (INSN_P (insn))
923            record_opr_changes (insn);
924	}
925
926      /* The next pass actually builds the hash table.  */
927      FOR_BB_INSNS (bb, insn)
928	if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
929	  hash_scan_set (insn);
930    }
931}
932
933
934/* Check if register REG is killed in any insn waiting to be inserted on
935   edge E.  This function is required to check that our data flow analysis
936   is still valid prior to commit_edge_insertions.  */
937
938static bool
939reg_killed_on_edge (rtx reg, edge e)
940{
941  rtx_insn *insn;
942
943  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
944    if (INSN_P (insn) && reg_set_p (reg, insn))
945      return true;
946
947  return false;
948}
949
950/* Similar to above - check if register REG is used in any insn waiting
951   to be inserted on edge E.
952   Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
953   with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p.  */
954
955static bool
956reg_used_on_edge (rtx reg, edge e)
957{
958  rtx_insn *insn;
959
960  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
961    if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
962      return true;
963
964  return false;
965}
966
967/* Return the loaded/stored register of a load/store instruction.  */
968
969static rtx
970get_avail_load_store_reg (rtx_insn *insn)
971{
972  if (REG_P (SET_DEST (PATTERN (insn))))
973    /* A load.  */
974    return SET_DEST (PATTERN (insn));
975  else
976    {
977      /* A store.  */
978      gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
979      return SET_SRC (PATTERN (insn));
980    }
981}
982
983/* Return nonzero if the predecessors of BB are "well behaved".  */
984
985static bool
986bb_has_well_behaved_predecessors (basic_block bb)
987{
988  edge pred;
989  edge_iterator ei;
990
991  if (EDGE_COUNT (bb->preds) == 0)
992    return false;
993
994  FOR_EACH_EDGE (pred, ei, bb->preds)
995    {
996      if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
997	return false;
998
999      if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
1000	return false;
1001
1002      if (tablejump_p (BB_END (pred->src), NULL, NULL))
1003	return false;
1004    }
1005  return true;
1006}
1007
1008
1009/* Search for the occurrences of expression in BB.  */
1010
1011static struct occr*
1012get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index)
1013{
1014  struct occr *occr = orig_occr;
1015
1016  for (; occr != NULL; occr = occr->next)
1017    if (BLOCK_FOR_INSN (occr->insn) == bb)
1018      return occr;
1019
1020  /* If we could not find an occurrence in BB, see if BB
1021     has a single predecessor with an occurrence that is
1022     transparent through BB.  */
1023  if (single_pred_p (bb)
1024      && bitmap_bit_p (transp[bb->index], bitmap_index)
1025      && (occr = get_bb_avail_insn (single_pred (bb), orig_occr, bitmap_index)))
1026    {
1027      rtx avail_reg = get_avail_load_store_reg (occr->insn);
1028      if (!reg_set_between_p (avail_reg,
1029			      PREV_INSN (BB_HEAD (bb)),
1030			      NEXT_INSN (BB_END (bb)))
1031	  && !reg_killed_on_edge (avail_reg, single_pred_edge (bb)))
1032	return occr;
1033    }
1034
1035  return NULL;
1036}
1037
1038
1039/* This helper is called via htab_traverse.  */
1040int
1041compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED)
1042{
1043  struct expr *expr = *slot;
1044
1045  compute_transp (expr->expr, expr->bitmap_index, transp,
1046		  blocks_with_calls, modify_mem_list_set,
1047		  canon_modify_mem_list);
1048  return 1;
1049}
1050
1051/* This handles the case where several stores feed a partially redundant
1052   load. It checks if the redundancy elimination is possible and if it's
1053   worth it.
1054
1055   Redundancy elimination is possible if,
1056   1) None of the operands of an insn have been modified since the start
1057      of the current basic block.
1058   2) In any predecessor of the current basic block, the same expression
1059      is generated.
1060
1061   See the function body for the heuristics that determine if eliminating
1062   a redundancy is also worth doing, assuming it is possible.  */
1063
1064static void
1065eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
1066				    struct expr *expr)
1067{
1068  edge pred;
1069  rtx_insn *avail_insn = NULL;
1070  rtx avail_reg;
1071  rtx dest, pat;
1072  struct occr *a_occr;
1073  struct unoccr *occr, *avail_occrs = NULL;
1074  struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
1075  int npred_ok = 0;
1076  gcov_type ok_count = 0; /* Redundant load execution count.  */
1077  gcov_type critical_count = 0; /* Execution count of critical edges.  */
1078  edge_iterator ei;
1079  bool critical_edge_split = false;
1080
1081  /* The execution count of the loads to be added to make the
1082     load fully redundant.  */
1083  gcov_type not_ok_count = 0;
1084  basic_block pred_bb;
1085
1086  pat = PATTERN (insn);
1087  dest = SET_DEST (pat);
1088
1089  /* Check that the loaded register is not used, set, or killed from the
1090     beginning of the block.  */
1091  if (reg_changed_after_insn_p (dest, 0)
1092      || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
1093    return;
1094
1095  /* Check potential for replacing load with copy for predecessors.  */
1096  FOR_EACH_EDGE (pred, ei, bb->preds)
1097    {
1098      rtx_insn *next_pred_bb_end;
1099
1100      avail_insn = NULL;
1101      avail_reg = NULL_RTX;
1102      pred_bb = pred->src;
1103      for (a_occr = get_bb_avail_insn (pred_bb,
1104				       expr->avail_occr,
1105				       expr->bitmap_index);
1106	   a_occr;
1107	   a_occr = get_bb_avail_insn (pred_bb,
1108				       a_occr->next,
1109				       expr->bitmap_index))
1110	{
1111	  /* Check if the loaded register is not used.  */
1112	  avail_insn = a_occr->insn;
1113	  avail_reg = get_avail_load_store_reg (avail_insn);
1114	  gcc_assert (avail_reg);
1115
1116	  /* Make sure we can generate a move from register avail_reg to
1117	     dest.  */
1118	  rtx_insn *move = as_a <rtx_insn *>
1119	    (gen_move_insn (copy_rtx (dest), copy_rtx (avail_reg)));
1120	  extract_insn (move);
1121	  if (! constrain_operands (1, get_preferred_alternatives (insn,
1122								   pred_bb))
1123	      || reg_killed_on_edge (avail_reg, pred)
1124	      || reg_used_on_edge (dest, pred))
1125	    {
1126	      avail_insn = NULL;
1127	      continue;
1128	    }
1129	  next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn)));
1130	  if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
1131	    /* AVAIL_INSN remains non-null.  */
1132	    break;
1133	  else
1134	    avail_insn = NULL;
1135	}
1136
1137      if (EDGE_CRITICAL_P (pred))
1138	critical_count += pred->count;
1139
1140      if (avail_insn != NULL_RTX)
1141	{
1142	  npred_ok++;
1143	  ok_count += pred->count;
1144	  if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1145						    copy_rtx (avail_reg)))))
1146	    {
1147	      /* Check if there is going to be a split.  */
1148	      if (EDGE_CRITICAL_P (pred))
1149		critical_edge_split = true;
1150	    }
1151	  else /* Its a dead move no need to generate.  */
1152	    continue;
1153	  occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1154						  sizeof (struct unoccr));
1155	  occr->insn = avail_insn;
1156	  occr->pred = pred;
1157	  occr->next = avail_occrs;
1158	  avail_occrs = occr;
1159	  if (! rollback_unoccr)
1160	    rollback_unoccr = occr;
1161	}
1162      else
1163	{
1164	  /* Adding a load on a critical edge will cause a split.  */
1165	  if (EDGE_CRITICAL_P (pred))
1166	    critical_edge_split = true;
1167	  not_ok_count += pred->count;
1168	  unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1169						    sizeof (struct unoccr));
1170	  unoccr->insn = NULL;
1171	  unoccr->pred = pred;
1172	  unoccr->next = unavail_occrs;
1173	  unavail_occrs = unoccr;
1174	  if (! rollback_unoccr)
1175	    rollback_unoccr = unoccr;
1176	}
1177    }
1178
1179  if (/* No load can be replaced by copy.  */
1180      npred_ok == 0
1181      /* Prevent exploding the code.  */
1182      || (optimize_bb_for_size_p (bb) && npred_ok > 1)
1183      /* If we don't have profile information we cannot tell if splitting
1184         a critical edge is profitable or not so don't do it.  */
1185      || ((! profile_info || ! flag_branch_probabilities
1186	   || targetm.cannot_modify_jumps_p ())
1187	  && critical_edge_split))
1188    goto cleanup;
1189
1190  /* Check if it's worth applying the partial redundancy elimination.  */
1191  if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
1192    goto cleanup;
1193  if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
1194    goto cleanup;
1195
1196  /* Generate moves to the loaded register from where
1197     the memory is available.  */
1198  for (occr = avail_occrs; occr; occr = occr->next)
1199    {
1200      avail_insn = occr->insn;
1201      pred = occr->pred;
1202      /* Set avail_reg to be the register having the value of the
1203	 memory.  */
1204      avail_reg = get_avail_load_store_reg (avail_insn);
1205      gcc_assert (avail_reg);
1206
1207      insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1208					  copy_rtx (avail_reg)),
1209			   pred);
1210      stats.moves_inserted++;
1211
1212      if (dump_file)
1213	fprintf (dump_file,
1214		 "generating move from %d to %d on edge from %d to %d\n",
1215		 REGNO (avail_reg),
1216		 REGNO (dest),
1217		 pred->src->index,
1218		 pred->dest->index);
1219    }
1220
1221  /* Regenerate loads where the memory is unavailable.  */
1222  for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1223    {
1224      pred = unoccr->pred;
1225      insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1226      stats.copies_inserted++;
1227
1228      if (dump_file)
1229	{
1230	  fprintf (dump_file,
1231		   "generating on edge from %d to %d a copy of load: ",
1232		   pred->src->index,
1233		   pred->dest->index);
1234	  print_rtl (dump_file, PATTERN (insn));
1235	  fprintf (dump_file, "\n");
1236	}
1237    }
1238
1239  /* Delete the insn if it is not available in this block and mark it
1240     for deletion if it is available. If insn is available it may help
1241     discover additional redundancies, so mark it for later deletion.  */
1242  for (a_occr = get_bb_avail_insn (bb, expr->avail_occr, expr->bitmap_index);
1243       a_occr && (a_occr->insn != insn);
1244       a_occr = get_bb_avail_insn (bb, a_occr->next, expr->bitmap_index))
1245    ;
1246
1247  if (!a_occr)
1248    {
1249      stats.insns_deleted++;
1250
1251      if (dump_file)
1252	{
1253	  fprintf (dump_file, "deleting insn:\n");
1254          print_rtl_single (dump_file, insn);
1255          fprintf (dump_file, "\n");
1256	}
1257      delete_insn (insn);
1258    }
1259  else
1260    a_occr->deleted_p = 1;
1261
1262cleanup:
1263  if (rollback_unoccr)
1264    obstack_free (&unoccr_obstack, rollback_unoccr);
1265}
1266
1267/* Performing the redundancy elimination as described before.  */
1268
1269static void
1270eliminate_partially_redundant_loads (void)
1271{
1272  rtx_insn *insn;
1273  basic_block bb;
1274
1275  /* Note we start at block 1.  */
1276
1277  if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1278    return;
1279
1280  FOR_BB_BETWEEN (bb,
1281		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
1282		  EXIT_BLOCK_PTR_FOR_FN (cfun),
1283		  next_bb)
1284    {
1285      /* Don't try anything on basic blocks with strange predecessors.  */
1286      if (! bb_has_well_behaved_predecessors (bb))
1287	continue;
1288
1289      /* Do not try anything on cold basic blocks.  */
1290      if (optimize_bb_for_size_p (bb))
1291	continue;
1292
1293      /* Reset the table of things changed since the start of the current
1294	 basic block.  */
1295      reset_opr_set_tables ();
1296
1297      /* Look at all insns in the current basic block and see if there are
1298	 any loads in it that we can record.  */
1299      FOR_BB_INSNS (bb, insn)
1300	{
1301	  /* Is it a load - of the form (set (reg) (mem))?  */
1302	  if (NONJUMP_INSN_P (insn)
1303              && GET_CODE (PATTERN (insn)) == SET
1304	      && REG_P (SET_DEST (PATTERN (insn)))
1305	      && MEM_P (SET_SRC (PATTERN (insn))))
1306	    {
1307	      rtx pat = PATTERN (insn);
1308	      rtx src = SET_SRC (pat);
1309	      struct expr *expr;
1310
1311	      if (!MEM_VOLATILE_P (src)
1312		  && GET_MODE (src) != BLKmode
1313		  && general_operand (src, GET_MODE (src))
1314		  /* Are the operands unchanged since the start of the
1315		     block?  */
1316		  && oprs_unchanged_p (src, insn, false)
1317		  && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
1318		  && !side_effects_p (src)
1319		  /* Is the expression recorded?  */
1320		  && (expr = lookup_expr_in_table (src)) != NULL)
1321		{
1322		  /* We now have a load (insn) and an available memory at
1323		     its BB start (expr). Try to remove the loads if it is
1324		     redundant.  */
1325		  eliminate_partially_redundant_load (bb, insn, expr);
1326		}
1327	    }
1328
1329	  /* Keep track of everything modified by this insn, so that we
1330	     know what has been modified since the start of the current
1331	     basic block.  */
1332	  if (INSN_P (insn))
1333	    record_opr_changes (insn);
1334	}
1335    }
1336
1337  commit_edge_insertions ();
1338}
1339
1340/* Go over the expression hash table and delete insns that were
1341   marked for later deletion.  */
1342
1343/* This helper is called via htab_traverse.  */
1344int
1345delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED)
1346{
1347  struct expr *exprs = *slot;
1348  struct occr *occr;
1349
1350  for (occr = exprs->avail_occr; occr != NULL; occr = occr->next)
1351    {
1352      if (occr->deleted_p && dbg_cnt (gcse2_delete))
1353	{
1354	  delete_insn (occr->insn);
1355	  stats.insns_deleted++;
1356
1357	  if (dump_file)
1358	    {
1359	      fprintf (dump_file, "deleting insn:\n");
1360	      print_rtl_single (dump_file, occr->insn);
1361	      fprintf (dump_file, "\n");
1362	    }
1363	}
1364    }
1365
1366  return 1;
1367}
1368
1369static void
1370delete_redundant_insns (void)
1371{
1372  expr_table->traverse <void *, delete_redundant_insns_1> (NULL);
1373  if (dump_file)
1374    fprintf (dump_file, "\n");
1375}
1376
1377/* Main entry point of the GCSE after reload - clean some redundant loads
1378   due to spilling.  */
1379
1380static void
1381gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1382{
1383
1384  memset (&stats, 0, sizeof (stats));
1385
1386  /* Allocate memory for this pass.
1387     Also computes and initializes the insns' CUIDs.  */
1388  alloc_mem ();
1389
1390  /* We need alias analysis.  */
1391  init_alias_analysis ();
1392
1393  compute_hash_table ();
1394
1395  if (dump_file)
1396    dump_hash_table (dump_file);
1397
1398  if (expr_table->elements () > 0)
1399    {
1400      /* Knowing which MEMs are transparent through a block can signifiantly
1401	 increase the number of redundant loads found.  So compute transparency
1402	 information for each memory expression in the hash table.  */
1403      df_analyze ();
1404      /* This can not be part of the normal allocation routine because
1405	 we have to know the number of elements in the hash table.  */
1406      transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1407				     expr_table->elements ());
1408      bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
1409      expr_table->traverse <FILE *, compute_expr_transp> (dump_file);
1410      eliminate_partially_redundant_loads ();
1411      delete_redundant_insns ();
1412      sbitmap_vector_free (transp);
1413
1414      if (dump_file)
1415	{
1416	  fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1417	  fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1418	  fprintf (dump_file, "moves inserted:  %d\n", stats.moves_inserted);
1419	  fprintf (dump_file, "insns deleted:   %d\n", stats.insns_deleted);
1420	  fprintf (dump_file, "\n\n");
1421	}
1422
1423      statistics_counter_event (cfun, "copies inserted",
1424				stats.copies_inserted);
1425      statistics_counter_event (cfun, "moves inserted",
1426				stats.moves_inserted);
1427      statistics_counter_event (cfun, "insns deleted",
1428				stats.insns_deleted);
1429    }
1430
1431  /* We are finished with alias.  */
1432  end_alias_analysis ();
1433
1434  free_mem ();
1435}
1436
1437
1438
1439static unsigned int
1440rest_of_handle_gcse2 (void)
1441{
1442  gcse_after_reload_main (get_insns ());
1443  rebuild_jump_labels (get_insns ());
1444  return 0;
1445}
1446
1447namespace {
1448
1449const pass_data pass_data_gcse2 =
1450{
1451  RTL_PASS, /* type */
1452  "gcse2", /* name */
1453  OPTGROUP_NONE, /* optinfo_flags */
1454  TV_GCSE_AFTER_RELOAD, /* tv_id */
1455  0, /* properties_required */
1456  0, /* properties_provided */
1457  0, /* properties_destroyed */
1458  0, /* todo_flags_start */
1459  0, /* todo_flags_finish */
1460};
1461
1462class pass_gcse2 : public rtl_opt_pass
1463{
1464public:
1465  pass_gcse2 (gcc::context *ctxt)
1466    : rtl_opt_pass (pass_data_gcse2, ctxt)
1467  {}
1468
1469  /* opt_pass methods: */
1470  virtual bool gate (function *fun)
1471    {
1472      return (optimize > 0 && flag_gcse_after_reload
1473	      && optimize_function_for_speed_p (fun));
1474    }
1475
1476  virtual unsigned int execute (function *) { return rest_of_handle_gcse2 (); }
1477
1478}; // class pass_gcse2
1479
1480} // anon namespace
1481
1482rtl_opt_pass *
1483make_pass_gcse2 (gcc::context *ctxt)
1484{
1485  return new pass_gcse2 (ctxt);
1486}
1487