1/* Common subexpression elimination for GNU compiler.
2   Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "rtl.h"
25#include "tm_p.h"
26#include "hard-reg-set.h"
27#include "regs.h"
28#include "predict.h"
29#include "vec.h"
30#include "hashtab.h"
31#include "hash-set.h"
32#include "machmode.h"
33#include "input.h"
34#include "function.h"
35#include "dominance.h"
36#include "cfg.h"
37#include "cfgrtl.h"
38#include "cfganal.h"
39#include "cfgcleanup.h"
40#include "basic-block.h"
41#include "flags.h"
42#include "insn-config.h"
43#include "recog.h"
44#include "symtab.h"
45#include "statistics.h"
46#include "double-int.h"
47#include "real.h"
48#include "fixed-value.h"
49#include "alias.h"
50#include "wide-int.h"
51#include "inchash.h"
52#include "tree.h"
53#include "expmed.h"
54#include "dojump.h"
55#include "explow.h"
56#include "calls.h"
57#include "emit-rtl.h"
58#include "varasm.h"
59#include "stmt.h"
60#include "expr.h"
61#include "diagnostic-core.h"
62#include "toplev.h"
63#include "ggc.h"
64#include "except.h"
65#include "target.h"
66#include "params.h"
67#include "rtlhooks-def.h"
68#include "tree-pass.h"
69#include "df.h"
70#include "dbgcnt.h"
71#include "rtl-iter.h"
72
73/* The basic idea of common subexpression elimination is to go
74   through the code, keeping a record of expressions that would
75   have the same value at the current scan point, and replacing
76   expressions encountered with the cheapest equivalent expression.
77
78   It is too complicated to keep track of the different possibilities
79   when control paths merge in this code; so, at each label, we forget all
80   that is known and start fresh.  This can be described as processing each
81   extended basic block separately.  We have a separate pass to perform
82   global CSE.
83
84   Note CSE can turn a conditional or computed jump into a nop or
85   an unconditional jump.  When this occurs we arrange to run the jump
86   optimizer after CSE to delete the unreachable code.
87
88   We use two data structures to record the equivalent expressions:
89   a hash table for most expressions, and a vector of "quantity
90   numbers" to record equivalent (pseudo) registers.
91
92   The use of the special data structure for registers is desirable
93   because it is faster.  It is possible because registers references
94   contain a fairly small number, the register number, taken from
95   a contiguously allocated series, and two register references are
96   identical if they have the same number.  General expressions
97   do not have any such thing, so the only way to retrieve the
98   information recorded on an expression other than a register
99   is to keep it in a hash table.
100
101Registers and "quantity numbers":
102
103   At the start of each basic block, all of the (hardware and pseudo)
104   registers used in the function are given distinct quantity
105   numbers to indicate their contents.  During scan, when the code
106   copies one register into another, we copy the quantity number.
107   When a register is loaded in any other way, we allocate a new
108   quantity number to describe the value generated by this operation.
109   `REG_QTY (N)' records what quantity register N is currently thought
110   of as containing.
111
112   All real quantity numbers are greater than or equal to zero.
113   If register N has not been assigned a quantity, `REG_QTY (N)' will
114   equal -N - 1, which is always negative.
115
116   Quantity numbers below zero do not exist and none of the `qty_table'
117   entries should be referenced with a negative index.
118
119   We also maintain a bidirectional chain of registers for each
120   quantity number.  The `qty_table` members `first_reg' and `last_reg',
121   and `reg_eqv_table' members `next' and `prev' hold these chains.
122
123   The first register in a chain is the one whose lifespan is least local.
124   Among equals, it is the one that was seen first.
125   We replace any equivalent register with that one.
126
127   If two registers have the same quantity number, it must be true that
128   REG expressions with qty_table `mode' must be in the hash table for both
129   registers and must be in the same class.
130
131   The converse is not true.  Since hard registers may be referenced in
132   any mode, two REG expressions might be equivalent in the hash table
133   but not have the same quantity number if the quantity number of one
134   of the registers is not the same mode as those expressions.
135
136Constants and quantity numbers
137
138   When a quantity has a known constant value, that value is stored
139   in the appropriate qty_table `const_rtx'.  This is in addition to
140   putting the constant in the hash table as is usual for non-regs.
141
142   Whether a reg or a constant is preferred is determined by the configuration
143   macro CONST_COSTS and will often depend on the constant value.  In any
144   event, expressions containing constants can be simplified, by fold_rtx.
145
146   When a quantity has a known nearly constant value (such as an address
147   of a stack slot), that value is stored in the appropriate qty_table
148   `const_rtx'.
149
150   Integer constants don't have a machine mode.  However, cse
151   determines the intended machine mode from the destination
152   of the instruction that moves the constant.  The machine mode
153   is recorded in the hash table along with the actual RTL
154   constant expression so that different modes are kept separate.
155
156Other expressions:
157
158   To record known equivalences among expressions in general
159   we use a hash table called `table'.  It has a fixed number of buckets
160   that contain chains of `struct table_elt' elements for expressions.
161   These chains connect the elements whose expressions have the same
162   hash codes.
163
164   Other chains through the same elements connect the elements which
165   currently have equivalent values.
166
167   Register references in an expression are canonicalized before hashing
168   the expression.  This is done using `reg_qty' and qty_table `first_reg'.
169   The hash code of a register reference is computed using the quantity
170   number, not the register number.
171
172   When the value of an expression changes, it is necessary to remove from the
173   hash table not just that expression but all expressions whose values
174   could be different as a result.
175
176     1. If the value changing is in memory, except in special cases
177     ANYTHING referring to memory could be changed.  That is because
178     nobody knows where a pointer does not point.
179     The function `invalidate_memory' removes what is necessary.
180
181     The special cases are when the address is constant or is
182     a constant plus a fixed register such as the frame pointer
183     or a static chain pointer.  When such addresses are stored in,
184     we can tell exactly which other such addresses must be invalidated
185     due to overlap.  `invalidate' does this.
186     All expressions that refer to non-constant
187     memory addresses are also invalidated.  `invalidate_memory' does this.
188
189     2. If the value changing is a register, all expressions
190     containing references to that register, and only those,
191     must be removed.
192
193   Because searching the entire hash table for expressions that contain
194   a register is very slow, we try to figure out when it isn't necessary.
195   Precisely, this is necessary only when expressions have been
196   entered in the hash table using this register, and then the value has
197   changed, and then another expression wants to be added to refer to
198   the register's new value.  This sequence of circumstances is rare
199   within any one basic block.
200
201   `REG_TICK' and `REG_IN_TABLE', accessors for members of
202   cse_reg_info, are used to detect this case.  REG_TICK (i) is
203   incremented whenever a value is stored in register i.
204   REG_IN_TABLE (i) holds -1 if no references to register i have been
205   entered in the table; otherwise, it contains the value REG_TICK (i)
206   had when the references were entered.  If we want to enter a
207   reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
208   remove old references.  Until we want to enter a new entry, the
209   mere fact that the two vectors don't match makes the entries be
210   ignored if anyone tries to match them.
211
212   Registers themselves are entered in the hash table as well as in
213   the equivalent-register chains.  However, `REG_TICK' and
214   `REG_IN_TABLE' do not apply to expressions which are simple
215   register references.  These expressions are removed from the table
216   immediately when they become invalid, and this can be done even if
217   we do not immediately search for all the expressions that refer to
218   the register.
219
220   A CLOBBER rtx in an instruction invalidates its operand for further
221   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
222   invalidates everything that resides in memory.
223
224Related expressions:
225
226   Constant expressions that differ only by an additive integer
227   are called related.  When a constant expression is put in
228   the table, the related expression with no constant term
229   is also entered.  These are made to point at each other
230   so that it is possible to find out if there exists any
231   register equivalent to an expression related to a given expression.  */
232
233/* Length of qty_table vector.  We know in advance we will not need
234   a quantity number this big.  */
235
236static int max_qty;
237
238/* Next quantity number to be allocated.
239   This is 1 + the largest number needed so far.  */
240
241static int next_qty;
242
243/* Per-qty information tracking.
244
245   `first_reg' and `last_reg' track the head and tail of the
246   chain of registers which currently contain this quantity.
247
248   `mode' contains the machine mode of this quantity.
249
250   `const_rtx' holds the rtx of the constant value of this
251   quantity, if known.  A summations of the frame/arg pointer
252   and a constant can also be entered here.  When this holds
253   a known value, `const_insn' is the insn which stored the
254   constant value.
255
256   `comparison_{code,const,qty}' are used to track when a
257   comparison between a quantity and some constant or register has
258   been passed.  In such a case, we know the results of the comparison
259   in case we see it again.  These members record a comparison that
260   is known to be true.  `comparison_code' holds the rtx code of such
261   a comparison, else it is set to UNKNOWN and the other two
262   comparison members are undefined.  `comparison_const' holds
263   the constant being compared against, or zero if the comparison
264   is not against a constant.  `comparison_qty' holds the quantity
265   being compared against when the result is known.  If the comparison
266   is not with a register, `comparison_qty' is -1.  */
267
268struct qty_table_elem
269{
270  rtx const_rtx;
271  rtx_insn *const_insn;
272  rtx comparison_const;
273  int comparison_qty;
274  unsigned int first_reg, last_reg;
275  /* The sizes of these fields should match the sizes of the
276     code and mode fields of struct rtx_def (see rtl.h).  */
277  ENUM_BITFIELD(rtx_code) comparison_code : 16;
278  ENUM_BITFIELD(machine_mode) mode : 8;
279};
280
281/* The table of all qtys, indexed by qty number.  */
282static struct qty_table_elem *qty_table;
283
284#ifdef HAVE_cc0
285/* For machines that have a CC0, we do not record its value in the hash
286   table since its use is guaranteed to be the insn immediately following
287   its definition and any other insn is presumed to invalidate it.
288
289   Instead, we store below the current and last value assigned to CC0.
290   If it should happen to be a constant, it is stored in preference
291   to the actual assigned value.  In case it is a constant, we store
292   the mode in which the constant should be interpreted.  */
293
294static rtx this_insn_cc0, prev_insn_cc0;
295static machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
296#endif
297
298/* Insn being scanned.  */
299
300static rtx_insn *this_insn;
301static bool optimize_this_for_speed_p;
302
303/* Index by register number, gives the number of the next (or
304   previous) register in the chain of registers sharing the same
305   value.
306
307   Or -1 if this register is at the end of the chain.
308
309   If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
310
311/* Per-register equivalence chain.  */
312struct reg_eqv_elem
313{
314  int next, prev;
315};
316
317/* The table of all register equivalence chains.  */
318static struct reg_eqv_elem *reg_eqv_table;
319
320struct cse_reg_info
321{
322  /* The timestamp at which this register is initialized.  */
323  unsigned int timestamp;
324
325  /* The quantity number of the register's current contents.  */
326  int reg_qty;
327
328  /* The number of times the register has been altered in the current
329     basic block.  */
330  int reg_tick;
331
332  /* The REG_TICK value at which rtx's containing this register are
333     valid in the hash table.  If this does not equal the current
334     reg_tick value, such expressions existing in the hash table are
335     invalid.  */
336  int reg_in_table;
337
338  /* The SUBREG that was set when REG_TICK was last incremented.  Set
339     to -1 if the last store was to the whole register, not a subreg.  */
340  unsigned int subreg_ticked;
341};
342
343/* A table of cse_reg_info indexed by register numbers.  */
344static struct cse_reg_info *cse_reg_info_table;
345
346/* The size of the above table.  */
347static unsigned int cse_reg_info_table_size;
348
349/* The index of the first entry that has not been initialized.  */
350static unsigned int cse_reg_info_table_first_uninitialized;
351
352/* The timestamp at the beginning of the current run of
353   cse_extended_basic_block.  We increment this variable at the beginning of
354   the current run of cse_extended_basic_block.  The timestamp field of a
355   cse_reg_info entry matches the value of this variable if and only
356   if the entry has been initialized during the current run of
357   cse_extended_basic_block.  */
358static unsigned int cse_reg_info_timestamp;
359
360/* A HARD_REG_SET containing all the hard registers for which there is
361   currently a REG expression in the hash table.  Note the difference
362   from the above variables, which indicate if the REG is mentioned in some
363   expression in the table.  */
364
365static HARD_REG_SET hard_regs_in_table;
366
367/* True if CSE has altered the CFG.  */
368static bool cse_cfg_altered;
369
370/* True if CSE has altered conditional jump insns in such a way
371   that jump optimization should be redone.  */
372static bool cse_jumps_altered;
373
374/* True if we put a LABEL_REF into the hash table for an INSN
375   without a REG_LABEL_OPERAND, we have to rerun jump after CSE
376   to put in the note.  */
377static bool recorded_label_ref;
378
379/* canon_hash stores 1 in do_not_record
380   if it notices a reference to CC0, PC, or some other volatile
381   subexpression.  */
382
383static int do_not_record;
384
385/* canon_hash stores 1 in hash_arg_in_memory
386   if it notices a reference to memory within the expression being hashed.  */
387
388static int hash_arg_in_memory;
389
390/* The hash table contains buckets which are chains of `struct table_elt's,
391   each recording one expression's information.
392   That expression is in the `exp' field.
393
394   The canon_exp field contains a canonical (from the point of view of
395   alias analysis) version of the `exp' field.
396
397   Those elements with the same hash code are chained in both directions
398   through the `next_same_hash' and `prev_same_hash' fields.
399
400   Each set of expressions with equivalent values
401   are on a two-way chain through the `next_same_value'
402   and `prev_same_value' fields, and all point with
403   the `first_same_value' field at the first element in
404   that chain.  The chain is in order of increasing cost.
405   Each element's cost value is in its `cost' field.
406
407   The `in_memory' field is nonzero for elements that
408   involve any reference to memory.  These elements are removed
409   whenever a write is done to an unidentified location in memory.
410   To be safe, we assume that a memory address is unidentified unless
411   the address is either a symbol constant or a constant plus
412   the frame pointer or argument pointer.
413
414   The `related_value' field is used to connect related expressions
415   (that differ by adding an integer).
416   The related expressions are chained in a circular fashion.
417   `related_value' is zero for expressions for which this
418   chain is not useful.
419
420   The `cost' field stores the cost of this element's expression.
421   The `regcost' field stores the value returned by approx_reg_cost for
422   this element's expression.
423
424   The `is_const' flag is set if the element is a constant (including
425   a fixed address).
426
427   The `flag' field is used as a temporary during some search routines.
428
429   The `mode' field is usually the same as GET_MODE (`exp'), but
430   if `exp' is a CONST_INT and has no machine mode then the `mode'
431   field is the mode it was being used as.  Each constant is
432   recorded separately for each mode it is used with.  */
433
434struct table_elt
435{
436  rtx exp;
437  rtx canon_exp;
438  struct table_elt *next_same_hash;
439  struct table_elt *prev_same_hash;
440  struct table_elt *next_same_value;
441  struct table_elt *prev_same_value;
442  struct table_elt *first_same_value;
443  struct table_elt *related_value;
444  int cost;
445  int regcost;
446  /* The size of this field should match the size
447     of the mode field of struct rtx_def (see rtl.h).  */
448  ENUM_BITFIELD(machine_mode) mode : 8;
449  char in_memory;
450  char is_const;
451  char flag;
452};
453
454/* We don't want a lot of buckets, because we rarely have very many
455   things stored in the hash table, and a lot of buckets slows
456   down a lot of loops that happen frequently.  */
457#define HASH_SHIFT	5
458#define HASH_SIZE	(1 << HASH_SHIFT)
459#define HASH_MASK	(HASH_SIZE - 1)
460
461/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
462   register (hard registers may require `do_not_record' to be set).  */
463
464#define HASH(X, M)	\
465 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
466  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
467  : canon_hash (X, M)) & HASH_MASK)
468
469/* Like HASH, but without side-effects.  */
470#define SAFE_HASH(X, M)	\
471 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
472  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
473  : safe_hash (X, M)) & HASH_MASK)
474
475/* Determine whether register number N is considered a fixed register for the
476   purpose of approximating register costs.
477   It is desirable to replace other regs with fixed regs, to reduce need for
478   non-fixed hard regs.
479   A reg wins if it is either the frame pointer or designated as fixed.  */
480#define FIXED_REGNO_P(N)  \
481  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
482   || fixed_regs[N] || global_regs[N])
483
484/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
485   hard registers and pointers into the frame are the cheapest with a cost
486   of 0.  Next come pseudos with a cost of one and other hard registers with
487   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
488
489#define CHEAP_REGNO(N)							\
490  (REGNO_PTR_FRAME_P (N)						\
491   || (HARD_REGISTER_NUM_P (N)						\
492       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
493
494#define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET, 1))
495#define COST_IN(X, OUTER, OPNO) (REG_P (X) ? 0 : notreg_cost (X, OUTER, OPNO))
496
497/* Get the number of times this register has been updated in this
498   basic block.  */
499
500#define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
501
502/* Get the point at which REG was recorded in the table.  */
503
504#define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
505
506/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
507   SUBREG).  */
508
509#define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
510
511/* Get the quantity number for REG.  */
512
513#define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
514
515/* Determine if the quantity number for register X represents a valid index
516   into the qty_table.  */
517
518#define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
519
520/* Compare table_elt X and Y and return true iff X is cheaper than Y.  */
521
522#define CHEAPER(X, Y) \
523 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
524
525static struct table_elt *table[HASH_SIZE];
526
527/* Chain of `struct table_elt's made so far for this function
528   but currently removed from the table.  */
529
530static struct table_elt *free_element_chain;
531
532/* Set to the cost of a constant pool reference if one was found for a
533   symbolic constant.  If this was found, it means we should try to
534   convert constants into constant pool entries if they don't fit in
535   the insn.  */
536
537static int constant_pool_entries_cost;
538static int constant_pool_entries_regcost;
539
540/* Trace a patch through the CFG.  */
541
542struct branch_path
543{
544  /* The basic block for this path entry.  */
545  basic_block bb;
546};
547
548/* This data describes a block that will be processed by
549   cse_extended_basic_block.  */
550
551struct cse_basic_block_data
552{
553  /* Total number of SETs in block.  */
554  int nsets;
555  /* Size of current branch path, if any.  */
556  int path_size;
557  /* Current path, indicating which basic_blocks will be processed.  */
558  struct branch_path *path;
559};
560
561
562/* Pointers to the live in/live out bitmaps for the boundaries of the
563   current EBB.  */
564static bitmap cse_ebb_live_in, cse_ebb_live_out;
565
566/* A simple bitmap to track which basic blocks have been visited
567   already as part of an already processed extended basic block.  */
568static sbitmap cse_visited_basic_blocks;
569
570static bool fixed_base_plus_p (rtx x);
571static int notreg_cost (rtx, enum rtx_code, int);
572static int preferable (int, int, int, int);
573static void new_basic_block (void);
574static void make_new_qty (unsigned int, machine_mode);
575static void make_regs_eqv (unsigned int, unsigned int);
576static void delete_reg_equiv (unsigned int);
577static int mention_regs (rtx);
578static int insert_regs (rtx, struct table_elt *, int);
579static void remove_from_table (struct table_elt *, unsigned);
580static void remove_pseudo_from_table (rtx, unsigned);
581static struct table_elt *lookup (rtx, unsigned, machine_mode);
582static struct table_elt *lookup_for_remove (rtx, unsigned, machine_mode);
583static rtx lookup_as_function (rtx, enum rtx_code);
584static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
585					    machine_mode, int, int);
586static struct table_elt *insert (rtx, struct table_elt *, unsigned,
587				 machine_mode);
588static void merge_equiv_classes (struct table_elt *, struct table_elt *);
589static void invalidate (rtx, machine_mode);
590static void remove_invalid_refs (unsigned int);
591static void remove_invalid_subreg_refs (unsigned int, unsigned int,
592					machine_mode);
593static void rehash_using_reg (rtx);
594static void invalidate_memory (void);
595static void invalidate_for_call (void);
596static rtx use_related_value (rtx, struct table_elt *);
597
598static inline unsigned canon_hash (rtx, machine_mode);
599static inline unsigned safe_hash (rtx, machine_mode);
600static inline unsigned hash_rtx_string (const char *);
601
602static rtx canon_reg (rtx, rtx_insn *);
603static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
604					   machine_mode *,
605					   machine_mode *);
606static rtx fold_rtx (rtx, rtx_insn *);
607static rtx equiv_constant (rtx);
608static void record_jump_equiv (rtx_insn *, bool);
609static void record_jump_cond (enum rtx_code, machine_mode, rtx, rtx,
610			      int);
611static void cse_insn (rtx_insn *);
612static void cse_prescan_path (struct cse_basic_block_data *);
613static void invalidate_from_clobbers (rtx_insn *);
614static void invalidate_from_sets_and_clobbers (rtx_insn *);
615static rtx cse_process_notes (rtx, rtx, bool *);
616static void cse_extended_basic_block (struct cse_basic_block_data *);
617extern void dump_class (struct table_elt*);
618static void get_cse_reg_info_1 (unsigned int regno);
619static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
620
621static void flush_hash_table (void);
622static bool insn_live_p (rtx_insn *, int *);
623static bool set_live_p (rtx, rtx_insn *, int *);
624static void cse_change_cc_mode_insn (rtx_insn *, rtx);
625static void cse_change_cc_mode_insns (rtx_insn *, rtx_insn *, rtx);
626static machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
627				       bool);
628
629
630#undef RTL_HOOKS_GEN_LOWPART
631#define RTL_HOOKS_GEN_LOWPART		gen_lowpart_if_possible
632
633static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
634
635/* Nonzero if X has the form (PLUS frame-pointer integer).  */
636
637static bool
638fixed_base_plus_p (rtx x)
639{
640  switch (GET_CODE (x))
641    {
642    case REG:
643      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
644	return true;
645      if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
646	return true;
647      return false;
648
649    case PLUS:
650      if (!CONST_INT_P (XEXP (x, 1)))
651	return false;
652      return fixed_base_plus_p (XEXP (x, 0));
653
654    default:
655      return false;
656    }
657}
658
659/* Dump the expressions in the equivalence class indicated by CLASSP.
660   This function is used only for debugging.  */
661DEBUG_FUNCTION void
662dump_class (struct table_elt *classp)
663{
664  struct table_elt *elt;
665
666  fprintf (stderr, "Equivalence chain for ");
667  print_rtl (stderr, classp->exp);
668  fprintf (stderr, ": \n");
669
670  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
671    {
672      print_rtl (stderr, elt->exp);
673      fprintf (stderr, "\n");
674    }
675}
676
677/* Return an estimate of the cost of the registers used in an rtx.
678   This is mostly the number of different REG expressions in the rtx;
679   however for some exceptions like fixed registers we use a cost of
680   0.  If any other hard register reference occurs, return MAX_COST.  */
681
682static int
683approx_reg_cost (const_rtx x)
684{
685  int cost = 0;
686  subrtx_iterator::array_type array;
687  FOR_EACH_SUBRTX (iter, array, x, NONCONST)
688    {
689      const_rtx x = *iter;
690      if (REG_P (x))
691	{
692	  unsigned int regno = REGNO (x);
693	  if (!CHEAP_REGNO (regno))
694	    {
695	      if (regno < FIRST_PSEUDO_REGISTER)
696		{
697		  if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
698		    return MAX_COST;
699		  cost += 2;
700		}
701	      else
702		cost += 1;
703	    }
704	}
705    }
706  return cost;
707}
708
709/* Return a negative value if an rtx A, whose costs are given by COST_A
710   and REGCOST_A, is more desirable than an rtx B.
711   Return a positive value if A is less desirable, or 0 if the two are
712   equally good.  */
713static int
714preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
715{
716  /* First, get rid of cases involving expressions that are entirely
717     unwanted.  */
718  if (cost_a != cost_b)
719    {
720      if (cost_a == MAX_COST)
721	return 1;
722      if (cost_b == MAX_COST)
723	return -1;
724    }
725
726  /* Avoid extending lifetimes of hardregs.  */
727  if (regcost_a != regcost_b)
728    {
729      if (regcost_a == MAX_COST)
730	return 1;
731      if (regcost_b == MAX_COST)
732	return -1;
733    }
734
735  /* Normal operation costs take precedence.  */
736  if (cost_a != cost_b)
737    return cost_a - cost_b;
738  /* Only if these are identical consider effects on register pressure.  */
739  if (regcost_a != regcost_b)
740    return regcost_a - regcost_b;
741  return 0;
742}
743
744/* Internal function, to compute cost when X is not a register; called
745   from COST macro to keep it simple.  */
746
747static int
748notreg_cost (rtx x, enum rtx_code outer, int opno)
749{
750  return ((GET_CODE (x) == SUBREG
751	   && REG_P (SUBREG_REG (x))
752	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
753	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
754	   && (GET_MODE_SIZE (GET_MODE (x))
755	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
756	   && subreg_lowpart_p (x)
757	   && TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x),
758					     GET_MODE (SUBREG_REG (x))))
759	  ? 0
760	  : rtx_cost (x, outer, opno, optimize_this_for_speed_p) * 2);
761}
762
763
764/* Initialize CSE_REG_INFO_TABLE.  */
765
766static void
767init_cse_reg_info (unsigned int nregs)
768{
769  /* Do we need to grow the table?  */
770  if (nregs > cse_reg_info_table_size)
771    {
772      unsigned int new_size;
773
774      if (cse_reg_info_table_size < 2048)
775	{
776	  /* Compute a new size that is a power of 2 and no smaller
777	     than the large of NREGS and 64.  */
778	  new_size = (cse_reg_info_table_size
779		      ? cse_reg_info_table_size : 64);
780
781	  while (new_size < nregs)
782	    new_size *= 2;
783	}
784      else
785	{
786	  /* If we need a big table, allocate just enough to hold
787	     NREGS registers.  */
788	  new_size = nregs;
789	}
790
791      /* Reallocate the table with NEW_SIZE entries.  */
792      free (cse_reg_info_table);
793      cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
794      cse_reg_info_table_size = new_size;
795      cse_reg_info_table_first_uninitialized = 0;
796    }
797
798  /* Do we have all of the first NREGS entries initialized?  */
799  if (cse_reg_info_table_first_uninitialized < nregs)
800    {
801      unsigned int old_timestamp = cse_reg_info_timestamp - 1;
802      unsigned int i;
803
804      /* Put the old timestamp on newly allocated entries so that they
805	 will all be considered out of date.  We do not touch those
806	 entries beyond the first NREGS entries to be nice to the
807	 virtual memory.  */
808      for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
809	cse_reg_info_table[i].timestamp = old_timestamp;
810
811      cse_reg_info_table_first_uninitialized = nregs;
812    }
813}
814
815/* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
816
817static void
818get_cse_reg_info_1 (unsigned int regno)
819{
820  /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
821     entry will be considered to have been initialized.  */
822  cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
823
824  /* Initialize the rest of the entry.  */
825  cse_reg_info_table[regno].reg_tick = 1;
826  cse_reg_info_table[regno].reg_in_table = -1;
827  cse_reg_info_table[regno].subreg_ticked = -1;
828  cse_reg_info_table[regno].reg_qty = -regno - 1;
829}
830
831/* Find a cse_reg_info entry for REGNO.  */
832
833static inline struct cse_reg_info *
834get_cse_reg_info (unsigned int regno)
835{
836  struct cse_reg_info *p = &cse_reg_info_table[regno];
837
838  /* If this entry has not been initialized, go ahead and initialize
839     it.  */
840  if (p->timestamp != cse_reg_info_timestamp)
841    get_cse_reg_info_1 (regno);
842
843  return p;
844}
845
846/* Clear the hash table and initialize each register with its own quantity,
847   for a new basic block.  */
848
849static void
850new_basic_block (void)
851{
852  int i;
853
854  next_qty = 0;
855
856  /* Invalidate cse_reg_info_table.  */
857  cse_reg_info_timestamp++;
858
859  /* Clear out hash table state for this pass.  */
860  CLEAR_HARD_REG_SET (hard_regs_in_table);
861
862  /* The per-quantity values used to be initialized here, but it is
863     much faster to initialize each as it is made in `make_new_qty'.  */
864
865  for (i = 0; i < HASH_SIZE; i++)
866    {
867      struct table_elt *first;
868
869      first = table[i];
870      if (first != NULL)
871	{
872	  struct table_elt *last = first;
873
874	  table[i] = NULL;
875
876	  while (last->next_same_hash != NULL)
877	    last = last->next_same_hash;
878
879	  /* Now relink this hash entire chain into
880	     the free element list.  */
881
882	  last->next_same_hash = free_element_chain;
883	  free_element_chain = first;
884	}
885    }
886
887#ifdef HAVE_cc0
888  prev_insn_cc0 = 0;
889#endif
890}
891
892/* Say that register REG contains a quantity in mode MODE not in any
893   register before and initialize that quantity.  */
894
895static void
896make_new_qty (unsigned int reg, machine_mode mode)
897{
898  int q;
899  struct qty_table_elem *ent;
900  struct reg_eqv_elem *eqv;
901
902  gcc_assert (next_qty < max_qty);
903
904  q = REG_QTY (reg) = next_qty++;
905  ent = &qty_table[q];
906  ent->first_reg = reg;
907  ent->last_reg = reg;
908  ent->mode = mode;
909  ent->const_rtx = ent->const_insn = NULL;
910  ent->comparison_code = UNKNOWN;
911
912  eqv = &reg_eqv_table[reg];
913  eqv->next = eqv->prev = -1;
914}
915
916/* Make reg NEW equivalent to reg OLD.
917   OLD is not changing; NEW is.  */
918
919static void
920make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
921{
922  unsigned int lastr, firstr;
923  int q = REG_QTY (old_reg);
924  struct qty_table_elem *ent;
925
926  ent = &qty_table[q];
927
928  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
929  gcc_assert (REGNO_QTY_VALID_P (old_reg));
930
931  REG_QTY (new_reg) = q;
932  firstr = ent->first_reg;
933  lastr = ent->last_reg;
934
935  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
936     hard regs.  Among pseudos, if NEW will live longer than any other reg
937     of the same qty, and that is beyond the current basic block,
938     make it the new canonical replacement for this qty.  */
939  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
940      /* Certain fixed registers might be of the class NO_REGS.  This means
941	 that not only can they not be allocated by the compiler, but
942	 they cannot be used in substitutions or canonicalizations
943	 either.  */
944      && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
945      && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
946	  || (new_reg >= FIRST_PSEUDO_REGISTER
947	      && (firstr < FIRST_PSEUDO_REGISTER
948		  || (bitmap_bit_p (cse_ebb_live_out, new_reg)
949		      && !bitmap_bit_p (cse_ebb_live_out, firstr))
950		  || (bitmap_bit_p (cse_ebb_live_in, new_reg)
951		      && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
952    {
953      reg_eqv_table[firstr].prev = new_reg;
954      reg_eqv_table[new_reg].next = firstr;
955      reg_eqv_table[new_reg].prev = -1;
956      ent->first_reg = new_reg;
957    }
958  else
959    {
960      /* If NEW is a hard reg (known to be non-fixed), insert at end.
961	 Otherwise, insert before any non-fixed hard regs that are at the
962	 end.  Registers of class NO_REGS cannot be used as an
963	 equivalent for anything.  */
964      while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
965	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
966	     && new_reg >= FIRST_PSEUDO_REGISTER)
967	lastr = reg_eqv_table[lastr].prev;
968      reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
969      if (reg_eqv_table[lastr].next >= 0)
970	reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
971      else
972	qty_table[q].last_reg = new_reg;
973      reg_eqv_table[lastr].next = new_reg;
974      reg_eqv_table[new_reg].prev = lastr;
975    }
976}
977
978/* Remove REG from its equivalence class.  */
979
980static void
981delete_reg_equiv (unsigned int reg)
982{
983  struct qty_table_elem *ent;
984  int q = REG_QTY (reg);
985  int p, n;
986
987  /* If invalid, do nothing.  */
988  if (! REGNO_QTY_VALID_P (reg))
989    return;
990
991  ent = &qty_table[q];
992
993  p = reg_eqv_table[reg].prev;
994  n = reg_eqv_table[reg].next;
995
996  if (n != -1)
997    reg_eqv_table[n].prev = p;
998  else
999    ent->last_reg = p;
1000  if (p != -1)
1001    reg_eqv_table[p].next = n;
1002  else
1003    ent->first_reg = n;
1004
1005  REG_QTY (reg) = -reg - 1;
1006}
1007
1008/* Remove any invalid expressions from the hash table
1009   that refer to any of the registers contained in expression X.
1010
1011   Make sure that newly inserted references to those registers
1012   as subexpressions will be considered valid.
1013
1014   mention_regs is not called when a register itself
1015   is being stored in the table.
1016
1017   Return 1 if we have done something that may have changed the hash code
1018   of X.  */
1019
1020static int
1021mention_regs (rtx x)
1022{
1023  enum rtx_code code;
1024  int i, j;
1025  const char *fmt;
1026  int changed = 0;
1027
1028  if (x == 0)
1029    return 0;
1030
1031  code = GET_CODE (x);
1032  if (code == REG)
1033    {
1034      unsigned int regno = REGNO (x);
1035      unsigned int endregno = END_REGNO (x);
1036      unsigned int i;
1037
1038      for (i = regno; i < endregno; i++)
1039	{
1040	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1041	    remove_invalid_refs (i);
1042
1043	  REG_IN_TABLE (i) = REG_TICK (i);
1044	  SUBREG_TICKED (i) = -1;
1045	}
1046
1047      return 0;
1048    }
1049
1050  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1051     pseudo if they don't use overlapping words.  We handle only pseudos
1052     here for simplicity.  */
1053  if (code == SUBREG && REG_P (SUBREG_REG (x))
1054      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1055    {
1056      unsigned int i = REGNO (SUBREG_REG (x));
1057
1058      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1059	{
1060	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1061	     the last store to this register really stored into this
1062	     subreg, then remove the memory of this subreg.
1063	     Otherwise, remove any memory of the entire register and
1064	     all its subregs from the table.  */
1065	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1066	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1067	    remove_invalid_refs (i);
1068	  else
1069	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1070	}
1071
1072      REG_IN_TABLE (i) = REG_TICK (i);
1073      SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1074      return 0;
1075    }
1076
1077  /* If X is a comparison or a COMPARE and either operand is a register
1078     that does not have a quantity, give it one.  This is so that a later
1079     call to record_jump_equiv won't cause X to be assigned a different
1080     hash code and not found in the table after that call.
1081
1082     It is not necessary to do this here, since rehash_using_reg can
1083     fix up the table later, but doing this here eliminates the need to
1084     call that expensive function in the most common case where the only
1085     use of the register is in the comparison.  */
1086
1087  if (code == COMPARE || COMPARISON_P (x))
1088    {
1089      if (REG_P (XEXP (x, 0))
1090	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1091	if (insert_regs (XEXP (x, 0), NULL, 0))
1092	  {
1093	    rehash_using_reg (XEXP (x, 0));
1094	    changed = 1;
1095	  }
1096
1097      if (REG_P (XEXP (x, 1))
1098	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1099	if (insert_regs (XEXP (x, 1), NULL, 0))
1100	  {
1101	    rehash_using_reg (XEXP (x, 1));
1102	    changed = 1;
1103	  }
1104    }
1105
1106  fmt = GET_RTX_FORMAT (code);
1107  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1108    if (fmt[i] == 'e')
1109      changed |= mention_regs (XEXP (x, i));
1110    else if (fmt[i] == 'E')
1111      for (j = 0; j < XVECLEN (x, i); j++)
1112	changed |= mention_regs (XVECEXP (x, i, j));
1113
1114  return changed;
1115}
1116
1117/* Update the register quantities for inserting X into the hash table
1118   with a value equivalent to CLASSP.
1119   (If the class does not contain a REG, it is irrelevant.)
1120   If MODIFIED is nonzero, X is a destination; it is being modified.
1121   Note that delete_reg_equiv should be called on a register
1122   before insert_regs is done on that register with MODIFIED != 0.
1123
1124   Nonzero value means that elements of reg_qty have changed
1125   so X's hash code may be different.  */
1126
1127static int
1128insert_regs (rtx x, struct table_elt *classp, int modified)
1129{
1130  if (REG_P (x))
1131    {
1132      unsigned int regno = REGNO (x);
1133      int qty_valid;
1134
1135      /* If REGNO is in the equivalence table already but is of the
1136	 wrong mode for that equivalence, don't do anything here.  */
1137
1138      qty_valid = REGNO_QTY_VALID_P (regno);
1139      if (qty_valid)
1140	{
1141	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1142
1143	  if (ent->mode != GET_MODE (x))
1144	    return 0;
1145	}
1146
1147      if (modified || ! qty_valid)
1148	{
1149	  if (classp)
1150	    for (classp = classp->first_same_value;
1151		 classp != 0;
1152		 classp = classp->next_same_value)
1153	      if (REG_P (classp->exp)
1154		  && GET_MODE (classp->exp) == GET_MODE (x))
1155		{
1156		  unsigned c_regno = REGNO (classp->exp);
1157
1158		  gcc_assert (REGNO_QTY_VALID_P (c_regno));
1159
1160		  /* Suppose that 5 is hard reg and 100 and 101 are
1161		     pseudos.  Consider
1162
1163		     (set (reg:si 100) (reg:si 5))
1164		     (set (reg:si 5) (reg:si 100))
1165		     (set (reg:di 101) (reg:di 5))
1166
1167		     We would now set REG_QTY (101) = REG_QTY (5), but the
1168		     entry for 5 is in SImode.  When we use this later in
1169		     copy propagation, we get the register in wrong mode.  */
1170		  if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1171		    continue;
1172
1173		  make_regs_eqv (regno, c_regno);
1174		  return 1;
1175		}
1176
1177	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1178	     than REG_IN_TABLE to find out if there was only a single preceding
1179	     invalidation - for the SUBREG - or another one, which would be
1180	     for the full register.  However, if we find here that REG_TICK
1181	     indicates that the register is invalid, it means that it has
1182	     been invalidated in a separate operation.  The SUBREG might be used
1183	     now (then this is a recursive call), or we might use the full REG
1184	     now and a SUBREG of it later.  So bump up REG_TICK so that
1185	     mention_regs will do the right thing.  */
1186	  if (! modified
1187	      && REG_IN_TABLE (regno) >= 0
1188	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1189	    REG_TICK (regno)++;
1190	  make_new_qty (regno, GET_MODE (x));
1191	  return 1;
1192	}
1193
1194      return 0;
1195    }
1196
1197  /* If X is a SUBREG, we will likely be inserting the inner register in the
1198     table.  If that register doesn't have an assigned quantity number at
1199     this point but does later, the insertion that we will be doing now will
1200     not be accessible because its hash code will have changed.  So assign
1201     a quantity number now.  */
1202
1203  else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1204	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1205    {
1206      insert_regs (SUBREG_REG (x), NULL, 0);
1207      mention_regs (x);
1208      return 1;
1209    }
1210  else
1211    return mention_regs (x);
1212}
1213
1214
1215/* Compute upper and lower anchors for CST.  Also compute the offset of CST
1216   from these anchors/bases such that *_BASE + *_OFFS = CST.  Return false iff
1217   CST is equal to an anchor.  */
1218
1219static bool
1220compute_const_anchors (rtx cst,
1221		       HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1222		       HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1223{
1224  HOST_WIDE_INT n = INTVAL (cst);
1225
1226  *lower_base = n & ~(targetm.const_anchor - 1);
1227  if (*lower_base == n)
1228    return false;
1229
1230  *upper_base =
1231    (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
1232  *upper_offs = n - *upper_base;
1233  *lower_offs = n - *lower_base;
1234  return true;
1235}
1236
1237/* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE.  */
1238
1239static void
1240insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1241		     machine_mode mode)
1242{
1243  struct table_elt *elt;
1244  unsigned hash;
1245  rtx anchor_exp;
1246  rtx exp;
1247
1248  anchor_exp = GEN_INT (anchor);
1249  hash = HASH (anchor_exp, mode);
1250  elt = lookup (anchor_exp, hash, mode);
1251  if (!elt)
1252    elt = insert (anchor_exp, NULL, hash, mode);
1253
1254  exp = plus_constant (mode, reg, offs);
1255  /* REG has just been inserted and the hash codes recomputed.  */
1256  mention_regs (exp);
1257  hash = HASH (exp, mode);
1258
1259  /* Use the cost of the register rather than the whole expression.  When
1260     looking up constant anchors we will further offset the corresponding
1261     expression therefore it does not make sense to prefer REGs over
1262     reg-immediate additions.  Prefer instead the oldest expression.  Also
1263     don't prefer pseudos over hard regs so that we derive constants in
1264     argument registers from other argument registers rather than from the
1265     original pseudo that was used to synthesize the constant.  */
1266  insert_with_costs (exp, elt, hash, mode, COST (reg), 1);
1267}
1268
1269/* The constant CST is equivalent to the register REG.  Create
1270   equivalences between the two anchors of CST and the corresponding
1271   register-offset expressions using REG.  */
1272
1273static void
1274insert_const_anchors (rtx reg, rtx cst, machine_mode mode)
1275{
1276  HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1277
1278  if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1279			      &upper_base, &upper_offs))
1280      return;
1281
1282  /* Ignore anchors of value 0.  Constants accessible from zero are
1283     simple.  */
1284  if (lower_base != 0)
1285    insert_const_anchor (lower_base, reg, -lower_offs, mode);
1286
1287  if (upper_base != 0)
1288    insert_const_anchor (upper_base, reg, -upper_offs, mode);
1289}
1290
1291/* We need to express ANCHOR_ELT->exp + OFFS.  Walk the equivalence list of
1292   ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1293   valid expression.  Return the cheapest and oldest of such expressions.  In
1294   *OLD, return how old the resulting expression is compared to the other
1295   equivalent expressions.  */
1296
1297static rtx
1298find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1299			   unsigned *old)
1300{
1301  struct table_elt *elt;
1302  unsigned idx;
1303  struct table_elt *match_elt;
1304  rtx match;
1305
1306  /* Find the cheapest and *oldest* expression to maximize the chance of
1307     reusing the same pseudo.  */
1308
1309  match_elt = NULL;
1310  match = NULL_RTX;
1311  for (elt = anchor_elt->first_same_value, idx = 0;
1312       elt;
1313       elt = elt->next_same_value, idx++)
1314    {
1315      if (match_elt && CHEAPER (match_elt, elt))
1316	return match;
1317
1318      if (REG_P (elt->exp)
1319	  || (GET_CODE (elt->exp) == PLUS
1320	      && REG_P (XEXP (elt->exp, 0))
1321	      && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1322	{
1323	  rtx x;
1324
1325	  /* Ignore expressions that are no longer valid.  */
1326	  if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1327	    continue;
1328
1329	  x = plus_constant (GET_MODE (elt->exp), elt->exp, offs);
1330	  if (REG_P (x)
1331	      || (GET_CODE (x) == PLUS
1332		  && IN_RANGE (INTVAL (XEXP (x, 1)),
1333			       -targetm.const_anchor,
1334			       targetm.const_anchor - 1)))
1335	    {
1336	      match = x;
1337	      match_elt = elt;
1338	      *old = idx;
1339	    }
1340	}
1341    }
1342
1343  return match;
1344}
1345
1346/* Try to express the constant SRC_CONST using a register+offset expression
1347   derived from a constant anchor.  Return it if successful or NULL_RTX,
1348   otherwise.  */
1349
1350static rtx
1351try_const_anchors (rtx src_const, machine_mode mode)
1352{
1353  struct table_elt *lower_elt, *upper_elt;
1354  HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1355  rtx lower_anchor_rtx, upper_anchor_rtx;
1356  rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1357  unsigned lower_old, upper_old;
1358
1359  /* CONST_INT is used for CC modes, but we should leave those alone.  */
1360  if (GET_MODE_CLASS (mode) == MODE_CC)
1361    return NULL_RTX;
1362
1363  gcc_assert (SCALAR_INT_MODE_P (mode));
1364  if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1365			      &upper_base, &upper_offs))
1366    return NULL_RTX;
1367
1368  lower_anchor_rtx = GEN_INT (lower_base);
1369  upper_anchor_rtx = GEN_INT (upper_base);
1370  lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1371  upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1372
1373  if (lower_elt)
1374    lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1375  if (upper_elt)
1376    upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1377
1378  if (!lower_exp)
1379    return upper_exp;
1380  if (!upper_exp)
1381    return lower_exp;
1382
1383  /* Return the older expression.  */
1384  return (upper_old > lower_old ? upper_exp : lower_exp);
1385}
1386
1387/* Look in or update the hash table.  */
1388
1389/* Remove table element ELT from use in the table.
1390   HASH is its hash code, made using the HASH macro.
1391   It's an argument because often that is known in advance
1392   and we save much time not recomputing it.  */
1393
1394static void
1395remove_from_table (struct table_elt *elt, unsigned int hash)
1396{
1397  if (elt == 0)
1398    return;
1399
1400  /* Mark this element as removed.  See cse_insn.  */
1401  elt->first_same_value = 0;
1402
1403  /* Remove the table element from its equivalence class.  */
1404
1405  {
1406    struct table_elt *prev = elt->prev_same_value;
1407    struct table_elt *next = elt->next_same_value;
1408
1409    if (next)
1410      next->prev_same_value = prev;
1411
1412    if (prev)
1413      prev->next_same_value = next;
1414    else
1415      {
1416	struct table_elt *newfirst = next;
1417	while (next)
1418	  {
1419	    next->first_same_value = newfirst;
1420	    next = next->next_same_value;
1421	  }
1422      }
1423  }
1424
1425  /* Remove the table element from its hash bucket.  */
1426
1427  {
1428    struct table_elt *prev = elt->prev_same_hash;
1429    struct table_elt *next = elt->next_same_hash;
1430
1431    if (next)
1432      next->prev_same_hash = prev;
1433
1434    if (prev)
1435      prev->next_same_hash = next;
1436    else if (table[hash] == elt)
1437      table[hash] = next;
1438    else
1439      {
1440	/* This entry is not in the proper hash bucket.  This can happen
1441	   when two classes were merged by `merge_equiv_classes'.  Search
1442	   for the hash bucket that it heads.  This happens only very
1443	   rarely, so the cost is acceptable.  */
1444	for (hash = 0; hash < HASH_SIZE; hash++)
1445	  if (table[hash] == elt)
1446	    table[hash] = next;
1447      }
1448  }
1449
1450  /* Remove the table element from its related-value circular chain.  */
1451
1452  if (elt->related_value != 0 && elt->related_value != elt)
1453    {
1454      struct table_elt *p = elt->related_value;
1455
1456      while (p->related_value != elt)
1457	p = p->related_value;
1458      p->related_value = elt->related_value;
1459      if (p->related_value == p)
1460	p->related_value = 0;
1461    }
1462
1463  /* Now add it to the free element chain.  */
1464  elt->next_same_hash = free_element_chain;
1465  free_element_chain = elt;
1466}
1467
1468/* Same as above, but X is a pseudo-register.  */
1469
1470static void
1471remove_pseudo_from_table (rtx x, unsigned int hash)
1472{
1473  struct table_elt *elt;
1474
1475  /* Because a pseudo-register can be referenced in more than one
1476     mode, we might have to remove more than one table entry.  */
1477  while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1478    remove_from_table (elt, hash);
1479}
1480
1481/* Look up X in the hash table and return its table element,
1482   or 0 if X is not in the table.
1483
1484   MODE is the machine-mode of X, or if X is an integer constant
1485   with VOIDmode then MODE is the mode with which X will be used.
1486
1487   Here we are satisfied to find an expression whose tree structure
1488   looks like X.  */
1489
1490static struct table_elt *
1491lookup (rtx x, unsigned int hash, machine_mode mode)
1492{
1493  struct table_elt *p;
1494
1495  for (p = table[hash]; p; p = p->next_same_hash)
1496    if (mode == p->mode && ((x == p->exp && REG_P (x))
1497			    || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1498      return p;
1499
1500  return 0;
1501}
1502
1503/* Like `lookup' but don't care whether the table element uses invalid regs.
1504   Also ignore discrepancies in the machine mode of a register.  */
1505
1506static struct table_elt *
1507lookup_for_remove (rtx x, unsigned int hash, machine_mode mode)
1508{
1509  struct table_elt *p;
1510
1511  if (REG_P (x))
1512    {
1513      unsigned int regno = REGNO (x);
1514
1515      /* Don't check the machine mode when comparing registers;
1516	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1517      for (p = table[hash]; p; p = p->next_same_hash)
1518	if (REG_P (p->exp)
1519	    && REGNO (p->exp) == regno)
1520	  return p;
1521    }
1522  else
1523    {
1524      for (p = table[hash]; p; p = p->next_same_hash)
1525	if (mode == p->mode
1526	    && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1527	  return p;
1528    }
1529
1530  return 0;
1531}
1532
1533/* Look for an expression equivalent to X and with code CODE.
1534   If one is found, return that expression.  */
1535
1536static rtx
1537lookup_as_function (rtx x, enum rtx_code code)
1538{
1539  struct table_elt *p
1540    = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1541
1542  if (p == 0)
1543    return 0;
1544
1545  for (p = p->first_same_value; p; p = p->next_same_value)
1546    if (GET_CODE (p->exp) == code
1547	/* Make sure this is a valid entry in the table.  */
1548	&& exp_equiv_p (p->exp, p->exp, 1, false))
1549      return p->exp;
1550
1551  return 0;
1552}
1553
1554/* Insert X in the hash table, assuming HASH is its hash code and
1555   CLASSP is an element of the class it should go in (or 0 if a new
1556   class should be made).  COST is the code of X and reg_cost is the
1557   cost of registers in X.  It is inserted at the proper position to
1558   keep the class in the order cheapest first.
1559
1560   MODE is the machine-mode of X, or if X is an integer constant
1561   with VOIDmode then MODE is the mode with which X will be used.
1562
1563   For elements of equal cheapness, the most recent one
1564   goes in front, except that the first element in the list
1565   remains first unless a cheaper element is added.  The order of
1566   pseudo-registers does not matter, as canon_reg will be called to
1567   find the cheapest when a register is retrieved from the table.
1568
1569   The in_memory field in the hash table element is set to 0.
1570   The caller must set it nonzero if appropriate.
1571
1572   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1573   and if insert_regs returns a nonzero value
1574   you must then recompute its hash code before calling here.
1575
1576   If necessary, update table showing constant values of quantities.  */
1577
1578static struct table_elt *
1579insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1580		   machine_mode mode, int cost, int reg_cost)
1581{
1582  struct table_elt *elt;
1583
1584  /* If X is a register and we haven't made a quantity for it,
1585     something is wrong.  */
1586  gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1587
1588  /* If X is a hard register, show it is being put in the table.  */
1589  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1590    add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1591
1592  /* Put an element for X into the right hash bucket.  */
1593
1594  elt = free_element_chain;
1595  if (elt)
1596    free_element_chain = elt->next_same_hash;
1597  else
1598    elt = XNEW (struct table_elt);
1599
1600  elt->exp = x;
1601  elt->canon_exp = NULL_RTX;
1602  elt->cost = cost;
1603  elt->regcost = reg_cost;
1604  elt->next_same_value = 0;
1605  elt->prev_same_value = 0;
1606  elt->next_same_hash = table[hash];
1607  elt->prev_same_hash = 0;
1608  elt->related_value = 0;
1609  elt->in_memory = 0;
1610  elt->mode = mode;
1611  elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1612
1613  if (table[hash])
1614    table[hash]->prev_same_hash = elt;
1615  table[hash] = elt;
1616
1617  /* Put it into the proper value-class.  */
1618  if (classp)
1619    {
1620      classp = classp->first_same_value;
1621      if (CHEAPER (elt, classp))
1622	/* Insert at the head of the class.  */
1623	{
1624	  struct table_elt *p;
1625	  elt->next_same_value = classp;
1626	  classp->prev_same_value = elt;
1627	  elt->first_same_value = elt;
1628
1629	  for (p = classp; p; p = p->next_same_value)
1630	    p->first_same_value = elt;
1631	}
1632      else
1633	{
1634	  /* Insert not at head of the class.  */
1635	  /* Put it after the last element cheaper than X.  */
1636	  struct table_elt *p, *next;
1637
1638	  for (p = classp;
1639	       (next = p->next_same_value) && CHEAPER (next, elt);
1640	       p = next)
1641	    ;
1642
1643	  /* Put it after P and before NEXT.  */
1644	  elt->next_same_value = next;
1645	  if (next)
1646	    next->prev_same_value = elt;
1647
1648	  elt->prev_same_value = p;
1649	  p->next_same_value = elt;
1650	  elt->first_same_value = classp;
1651	}
1652    }
1653  else
1654    elt->first_same_value = elt;
1655
1656  /* If this is a constant being set equivalent to a register or a register
1657     being set equivalent to a constant, note the constant equivalence.
1658
1659     If this is a constant, it cannot be equivalent to a different constant,
1660     and a constant is the only thing that can be cheaper than a register.  So
1661     we know the register is the head of the class (before the constant was
1662     inserted).
1663
1664     If this is a register that is not already known equivalent to a
1665     constant, we must check the entire class.
1666
1667     If this is a register that is already known equivalent to an insn,
1668     update the qtys `const_insn' to show that `this_insn' is the latest
1669     insn making that quantity equivalent to the constant.  */
1670
1671  if (elt->is_const && classp && REG_P (classp->exp)
1672      && !REG_P (x))
1673    {
1674      int exp_q = REG_QTY (REGNO (classp->exp));
1675      struct qty_table_elem *exp_ent = &qty_table[exp_q];
1676
1677      exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1678      exp_ent->const_insn = this_insn;
1679    }
1680
1681  else if (REG_P (x)
1682	   && classp
1683	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1684	   && ! elt->is_const)
1685    {
1686      struct table_elt *p;
1687
1688      for (p = classp; p != 0; p = p->next_same_value)
1689	{
1690	  if (p->is_const && !REG_P (p->exp))
1691	    {
1692	      int x_q = REG_QTY (REGNO (x));
1693	      struct qty_table_elem *x_ent = &qty_table[x_q];
1694
1695	      x_ent->const_rtx
1696		= gen_lowpart (GET_MODE (x), p->exp);
1697	      x_ent->const_insn = this_insn;
1698	      break;
1699	    }
1700	}
1701    }
1702
1703  else if (REG_P (x)
1704	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1705	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1706    qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1707
1708  /* If this is a constant with symbolic value,
1709     and it has a term with an explicit integer value,
1710     link it up with related expressions.  */
1711  if (GET_CODE (x) == CONST)
1712    {
1713      rtx subexp = get_related_value (x);
1714      unsigned subhash;
1715      struct table_elt *subelt, *subelt_prev;
1716
1717      if (subexp != 0)
1718	{
1719	  /* Get the integer-free subexpression in the hash table.  */
1720	  subhash = SAFE_HASH (subexp, mode);
1721	  subelt = lookup (subexp, subhash, mode);
1722	  if (subelt == 0)
1723	    subelt = insert (subexp, NULL, subhash, mode);
1724	  /* Initialize SUBELT's circular chain if it has none.  */
1725	  if (subelt->related_value == 0)
1726	    subelt->related_value = subelt;
1727	  /* Find the element in the circular chain that precedes SUBELT.  */
1728	  subelt_prev = subelt;
1729	  while (subelt_prev->related_value != subelt)
1730	    subelt_prev = subelt_prev->related_value;
1731	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1732	     This way the element that follows SUBELT is the oldest one.  */
1733	  elt->related_value = subelt_prev->related_value;
1734	  subelt_prev->related_value = elt;
1735	}
1736    }
1737
1738  return elt;
1739}
1740
1741/* Wrap insert_with_costs by passing the default costs.  */
1742
1743static struct table_elt *
1744insert (rtx x, struct table_elt *classp, unsigned int hash,
1745	machine_mode mode)
1746{
1747  return
1748    insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x));
1749}
1750
1751
1752/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1753   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1754   the two classes equivalent.
1755
1756   CLASS1 will be the surviving class; CLASS2 should not be used after this
1757   call.
1758
1759   Any invalid entries in CLASS2 will not be copied.  */
1760
1761static void
1762merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1763{
1764  struct table_elt *elt, *next, *new_elt;
1765
1766  /* Ensure we start with the head of the classes.  */
1767  class1 = class1->first_same_value;
1768  class2 = class2->first_same_value;
1769
1770  /* If they were already equal, forget it.  */
1771  if (class1 == class2)
1772    return;
1773
1774  for (elt = class2; elt; elt = next)
1775    {
1776      unsigned int hash;
1777      rtx exp = elt->exp;
1778      machine_mode mode = elt->mode;
1779
1780      next = elt->next_same_value;
1781
1782      /* Remove old entry, make a new one in CLASS1's class.
1783	 Don't do this for invalid entries as we cannot find their
1784	 hash code (it also isn't necessary).  */
1785      if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1786	{
1787	  bool need_rehash = false;
1788
1789	  hash_arg_in_memory = 0;
1790	  hash = HASH (exp, mode);
1791
1792	  if (REG_P (exp))
1793	    {
1794	      need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1795	      delete_reg_equiv (REGNO (exp));
1796	    }
1797
1798	  if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1799	    remove_pseudo_from_table (exp, hash);
1800	  else
1801	    remove_from_table (elt, hash);
1802
1803	  if (insert_regs (exp, class1, 0) || need_rehash)
1804	    {
1805	      rehash_using_reg (exp);
1806	      hash = HASH (exp, mode);
1807	    }
1808	  new_elt = insert (exp, class1, hash, mode);
1809	  new_elt->in_memory = hash_arg_in_memory;
1810	  if (GET_CODE (exp) == ASM_OPERANDS && elt->cost == MAX_COST)
1811	    new_elt->cost = MAX_COST;
1812	}
1813    }
1814}
1815
1816/* Flush the entire hash table.  */
1817
1818static void
1819flush_hash_table (void)
1820{
1821  int i;
1822  struct table_elt *p;
1823
1824  for (i = 0; i < HASH_SIZE; i++)
1825    for (p = table[i]; p; p = table[i])
1826      {
1827	/* Note that invalidate can remove elements
1828	   after P in the current hash chain.  */
1829	if (REG_P (p->exp))
1830	  invalidate (p->exp, VOIDmode);
1831	else
1832	  remove_from_table (p, i);
1833      }
1834}
1835
1836/* Check whether an anti dependence exists between X and EXP.  MODE and
1837   ADDR are as for canon_anti_dependence.  */
1838
1839static bool
1840check_dependence (const_rtx x, rtx exp, machine_mode mode, rtx addr)
1841{
1842  subrtx_iterator::array_type array;
1843  FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1844    {
1845      const_rtx x = *iter;
1846      if (MEM_P (x) && canon_anti_dependence (x, true, exp, mode, addr))
1847	return true;
1848    }
1849  return false;
1850}
1851
1852/* Remove from the hash table, or mark as invalid, all expressions whose
1853   values could be altered by storing in X.  X is a register, a subreg, or
1854   a memory reference with nonvarying address (because, when a memory
1855   reference with a varying address is stored in, all memory references are
1856   removed by invalidate_memory so specific invalidation is superfluous).
1857   FULL_MODE, if not VOIDmode, indicates that this much should be
1858   invalidated instead of just the amount indicated by the mode of X.  This
1859   is only used for bitfield stores into memory.
1860
1861   A nonvarying address may be just a register or just a symbol reference,
1862   or it may be either of those plus a numeric offset.  */
1863
1864static void
1865invalidate (rtx x, machine_mode full_mode)
1866{
1867  int i;
1868  struct table_elt *p;
1869  rtx addr;
1870
1871  switch (GET_CODE (x))
1872    {
1873    case REG:
1874      {
1875	/* If X is a register, dependencies on its contents are recorded
1876	   through the qty number mechanism.  Just change the qty number of
1877	   the register, mark it as invalid for expressions that refer to it,
1878	   and remove it itself.  */
1879	unsigned int regno = REGNO (x);
1880	unsigned int hash = HASH (x, GET_MODE (x));
1881
1882	/* Remove REGNO from any quantity list it might be on and indicate
1883	   that its value might have changed.  If it is a pseudo, remove its
1884	   entry from the hash table.
1885
1886	   For a hard register, we do the first two actions above for any
1887	   additional hard registers corresponding to X.  Then, if any of these
1888	   registers are in the table, we must remove any REG entries that
1889	   overlap these registers.  */
1890
1891	delete_reg_equiv (regno);
1892	REG_TICK (regno)++;
1893	SUBREG_TICKED (regno) = -1;
1894
1895	if (regno >= FIRST_PSEUDO_REGISTER)
1896	  remove_pseudo_from_table (x, hash);
1897	else
1898	  {
1899	    HOST_WIDE_INT in_table
1900	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1901	    unsigned int endregno = END_HARD_REGNO (x);
1902	    unsigned int tregno, tendregno, rn;
1903	    struct table_elt *p, *next;
1904
1905	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1906
1907	    for (rn = regno + 1; rn < endregno; rn++)
1908	      {
1909		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1910		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1911		delete_reg_equiv (rn);
1912		REG_TICK (rn)++;
1913		SUBREG_TICKED (rn) = -1;
1914	      }
1915
1916	    if (in_table)
1917	      for (hash = 0; hash < HASH_SIZE; hash++)
1918		for (p = table[hash]; p; p = next)
1919		  {
1920		    next = p->next_same_hash;
1921
1922		    if (!REG_P (p->exp)
1923			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1924		      continue;
1925
1926		    tregno = REGNO (p->exp);
1927		    tendregno = END_HARD_REGNO (p->exp);
1928		    if (tendregno > regno && tregno < endregno)
1929		      remove_from_table (p, hash);
1930		  }
1931	  }
1932      }
1933      return;
1934
1935    case SUBREG:
1936      invalidate (SUBREG_REG (x), VOIDmode);
1937      return;
1938
1939    case PARALLEL:
1940      for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1941	invalidate (XVECEXP (x, 0, i), VOIDmode);
1942      return;
1943
1944    case EXPR_LIST:
1945      /* This is part of a disjoint return value; extract the location in
1946	 question ignoring the offset.  */
1947      invalidate (XEXP (x, 0), VOIDmode);
1948      return;
1949
1950    case MEM:
1951      addr = canon_rtx (get_addr (XEXP (x, 0)));
1952      /* Calculate the canonical version of X here so that
1953	 true_dependence doesn't generate new RTL for X on each call.  */
1954      x = canon_rtx (x);
1955
1956      /* Remove all hash table elements that refer to overlapping pieces of
1957	 memory.  */
1958      if (full_mode == VOIDmode)
1959	full_mode = GET_MODE (x);
1960
1961      for (i = 0; i < HASH_SIZE; i++)
1962	{
1963	  struct table_elt *next;
1964
1965	  for (p = table[i]; p; p = next)
1966	    {
1967	      next = p->next_same_hash;
1968	      if (p->in_memory)
1969		{
1970		  /* Just canonicalize the expression once;
1971		     otherwise each time we call invalidate
1972		     true_dependence will canonicalize the
1973		     expression again.  */
1974		  if (!p->canon_exp)
1975		    p->canon_exp = canon_rtx (p->exp);
1976		  if (check_dependence (p->canon_exp, x, full_mode, addr))
1977		    remove_from_table (p, i);
1978		}
1979	    }
1980	}
1981      return;
1982
1983    default:
1984      gcc_unreachable ();
1985    }
1986}
1987
1988/* Invalidate DEST.  Used when DEST is not going to be added
1989   into the hash table for some reason, e.g. do_not_record
1990   flagged on it.  */
1991
1992static void
1993invalidate_dest (rtx dest)
1994{
1995  if (REG_P (dest)
1996      || GET_CODE (dest) == SUBREG
1997      || MEM_P (dest))
1998    invalidate (dest, VOIDmode);
1999  else if (GET_CODE (dest) == STRICT_LOW_PART
2000	   || GET_CODE (dest) == ZERO_EXTRACT)
2001    invalidate (XEXP (dest, 0), GET_MODE (dest));
2002}
2003
2004/* Remove all expressions that refer to register REGNO,
2005   since they are already invalid, and we are about to
2006   mark that register valid again and don't want the old
2007   expressions to reappear as valid.  */
2008
2009static void
2010remove_invalid_refs (unsigned int regno)
2011{
2012  unsigned int i;
2013  struct table_elt *p, *next;
2014
2015  for (i = 0; i < HASH_SIZE; i++)
2016    for (p = table[i]; p; p = next)
2017      {
2018	next = p->next_same_hash;
2019	if (!REG_P (p->exp) && refers_to_regno_p (regno, p->exp))
2020	  remove_from_table (p, i);
2021      }
2022}
2023
2024/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2025   and mode MODE.  */
2026static void
2027remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
2028			    machine_mode mode)
2029{
2030  unsigned int i;
2031  struct table_elt *p, *next;
2032  unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2033
2034  for (i = 0; i < HASH_SIZE; i++)
2035    for (p = table[i]; p; p = next)
2036      {
2037	rtx exp = p->exp;
2038	next = p->next_same_hash;
2039
2040	if (!REG_P (exp)
2041	    && (GET_CODE (exp) != SUBREG
2042		|| !REG_P (SUBREG_REG (exp))
2043		|| REGNO (SUBREG_REG (exp)) != regno
2044		|| (((SUBREG_BYTE (exp)
2045		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2046		    && SUBREG_BYTE (exp) <= end))
2047	    && refers_to_regno_p (regno, p->exp))
2048	  remove_from_table (p, i);
2049      }
2050}
2051
2052/* Recompute the hash codes of any valid entries in the hash table that
2053   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2054
2055   This is called when we make a jump equivalence.  */
2056
2057static void
2058rehash_using_reg (rtx x)
2059{
2060  unsigned int i;
2061  struct table_elt *p, *next;
2062  unsigned hash;
2063
2064  if (GET_CODE (x) == SUBREG)
2065    x = SUBREG_REG (x);
2066
2067  /* If X is not a register or if the register is known not to be in any
2068     valid entries in the table, we have no work to do.  */
2069
2070  if (!REG_P (x)
2071      || REG_IN_TABLE (REGNO (x)) < 0
2072      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2073    return;
2074
2075  /* Scan all hash chains looking for valid entries that mention X.
2076     If we find one and it is in the wrong hash chain, move it.  */
2077
2078  for (i = 0; i < HASH_SIZE; i++)
2079    for (p = table[i]; p; p = next)
2080      {
2081	next = p->next_same_hash;
2082	if (reg_mentioned_p (x, p->exp)
2083	    && exp_equiv_p (p->exp, p->exp, 1, false)
2084	    && i != (hash = SAFE_HASH (p->exp, p->mode)))
2085	  {
2086	    if (p->next_same_hash)
2087	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2088
2089	    if (p->prev_same_hash)
2090	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2091	    else
2092	      table[i] = p->next_same_hash;
2093
2094	    p->next_same_hash = table[hash];
2095	    p->prev_same_hash = 0;
2096	    if (table[hash])
2097	      table[hash]->prev_same_hash = p;
2098	    table[hash] = p;
2099	  }
2100      }
2101}
2102
2103/* Remove from the hash table any expression that is a call-clobbered
2104   register.  Also update their TICK values.  */
2105
2106static void
2107invalidate_for_call (void)
2108{
2109  unsigned int regno, endregno;
2110  unsigned int i;
2111  unsigned hash;
2112  struct table_elt *p, *next;
2113  int in_table = 0;
2114  hard_reg_set_iterator hrsi;
2115
2116  /* Go through all the hard registers.  For each that is clobbered in
2117     a CALL_INSN, remove the register from quantity chains and update
2118     reg_tick if defined.  Also see if any of these registers is currently
2119     in the table.  */
2120  EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
2121    {
2122      delete_reg_equiv (regno);
2123      if (REG_TICK (regno) >= 0)
2124	{
2125	  REG_TICK (regno)++;
2126	  SUBREG_TICKED (regno) = -1;
2127	}
2128      in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2129    }
2130
2131  /* In the case where we have no call-clobbered hard registers in the
2132     table, we are done.  Otherwise, scan the table and remove any
2133     entry that overlaps a call-clobbered register.  */
2134
2135  if (in_table)
2136    for (hash = 0; hash < HASH_SIZE; hash++)
2137      for (p = table[hash]; p; p = next)
2138	{
2139	  next = p->next_same_hash;
2140
2141	  if (!REG_P (p->exp)
2142	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2143	    continue;
2144
2145	  regno = REGNO (p->exp);
2146	  endregno = END_HARD_REGNO (p->exp);
2147
2148	  for (i = regno; i < endregno; i++)
2149	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2150	      {
2151		remove_from_table (p, hash);
2152		break;
2153	      }
2154	}
2155}
2156
2157/* Given an expression X of type CONST,
2158   and ELT which is its table entry (or 0 if it
2159   is not in the hash table),
2160   return an alternate expression for X as a register plus integer.
2161   If none can be found, return 0.  */
2162
2163static rtx
2164use_related_value (rtx x, struct table_elt *elt)
2165{
2166  struct table_elt *relt = 0;
2167  struct table_elt *p, *q;
2168  HOST_WIDE_INT offset;
2169
2170  /* First, is there anything related known?
2171     If we have a table element, we can tell from that.
2172     Otherwise, must look it up.  */
2173
2174  if (elt != 0 && elt->related_value != 0)
2175    relt = elt;
2176  else if (elt == 0 && GET_CODE (x) == CONST)
2177    {
2178      rtx subexp = get_related_value (x);
2179      if (subexp != 0)
2180	relt = lookup (subexp,
2181		       SAFE_HASH (subexp, GET_MODE (subexp)),
2182		       GET_MODE (subexp));
2183    }
2184
2185  if (relt == 0)
2186    return 0;
2187
2188  /* Search all related table entries for one that has an
2189     equivalent register.  */
2190
2191  p = relt;
2192  while (1)
2193    {
2194      /* This loop is strange in that it is executed in two different cases.
2195	 The first is when X is already in the table.  Then it is searching
2196	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2197	 X is not in the table.  Then RELT points to a class for the related
2198	 value.
2199
2200	 Ensure that, whatever case we are in, that we ignore classes that have
2201	 the same value as X.  */
2202
2203      if (rtx_equal_p (x, p->exp))
2204	q = 0;
2205      else
2206	for (q = p->first_same_value; q; q = q->next_same_value)
2207	  if (REG_P (q->exp))
2208	    break;
2209
2210      if (q)
2211	break;
2212
2213      p = p->related_value;
2214
2215      /* We went all the way around, so there is nothing to be found.
2216	 Alternatively, perhaps RELT was in the table for some other reason
2217	 and it has no related values recorded.  */
2218      if (p == relt || p == 0)
2219	break;
2220    }
2221
2222  if (q == 0)
2223    return 0;
2224
2225  offset = (get_integer_term (x) - get_integer_term (p->exp));
2226  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2227  return plus_constant (q->mode, q->exp, offset);
2228}
2229
2230
2231/* Hash a string.  Just add its bytes up.  */
2232static inline unsigned
2233hash_rtx_string (const char *ps)
2234{
2235  unsigned hash = 0;
2236  const unsigned char *p = (const unsigned char *) ps;
2237
2238  if (p)
2239    while (*p)
2240      hash += *p++;
2241
2242  return hash;
2243}
2244
2245/* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2246   When the callback returns true, we continue with the new rtx.  */
2247
2248unsigned
2249hash_rtx_cb (const_rtx x, machine_mode mode,
2250             int *do_not_record_p, int *hash_arg_in_memory_p,
2251             bool have_reg_qty, hash_rtx_callback_function cb)
2252{
2253  int i, j;
2254  unsigned hash = 0;
2255  enum rtx_code code;
2256  const char *fmt;
2257  machine_mode newmode;
2258  rtx newx;
2259
2260  /* Used to turn recursion into iteration.  We can't rely on GCC's
2261     tail-recursion elimination since we need to keep accumulating values
2262     in HASH.  */
2263 repeat:
2264  if (x == 0)
2265    return hash;
2266
2267  /* Invoke the callback first.  */
2268  if (cb != NULL
2269      && ((*cb) (x, mode, &newx, &newmode)))
2270    {
2271      hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2272                           hash_arg_in_memory_p, have_reg_qty, cb);
2273      return hash;
2274    }
2275
2276  code = GET_CODE (x);
2277  switch (code)
2278    {
2279    case REG:
2280      {
2281	unsigned int regno = REGNO (x);
2282
2283	if (do_not_record_p && !reload_completed)
2284	  {
2285	    /* On some machines, we can't record any non-fixed hard register,
2286	       because extending its life will cause reload problems.  We
2287	       consider ap, fp, sp, gp to be fixed for this purpose.
2288
2289	       We also consider CCmode registers to be fixed for this purpose;
2290	       failure to do so leads to failure to simplify 0<100 type of
2291	       conditionals.
2292
2293	       On all machines, we can't record any global registers.
2294	       Nor should we record any register that is in a small
2295	       class, as defined by TARGET_CLASS_LIKELY_SPILLED_P.  */
2296	    bool record;
2297
2298	    if (regno >= FIRST_PSEUDO_REGISTER)
2299	      record = true;
2300	    else if (x == frame_pointer_rtx
2301		     || x == hard_frame_pointer_rtx
2302		     || x == arg_pointer_rtx
2303		     || x == stack_pointer_rtx
2304		     || x == pic_offset_table_rtx)
2305	      record = true;
2306	    else if (global_regs[regno])
2307	      record = false;
2308	    else if (fixed_regs[regno])
2309	      record = true;
2310	    else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2311	      record = true;
2312	    else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2313	      record = false;
2314	    else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
2315	      record = false;
2316	    else
2317	      record = true;
2318
2319	    if (!record)
2320	      {
2321		*do_not_record_p = 1;
2322		return 0;
2323	      }
2324	  }
2325
2326	hash += ((unsigned int) REG << 7);
2327        hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2328	return hash;
2329      }
2330
2331    /* We handle SUBREG of a REG specially because the underlying
2332       reg changes its hash value with every value change; we don't
2333       want to have to forget unrelated subregs when one subreg changes.  */
2334    case SUBREG:
2335      {
2336	if (REG_P (SUBREG_REG (x)))
2337	  {
2338	    hash += (((unsigned int) SUBREG << 7)
2339		     + REGNO (SUBREG_REG (x))
2340		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2341	    return hash;
2342	  }
2343	break;
2344      }
2345
2346    case CONST_INT:
2347      hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2348               + (unsigned int) INTVAL (x));
2349      return hash;
2350
2351    case CONST_WIDE_INT:
2352      for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
2353	hash += CONST_WIDE_INT_ELT (x, i);
2354      return hash;
2355
2356    case CONST_DOUBLE:
2357      /* This is like the general case, except that it only counts
2358	 the integers representing the constant.  */
2359      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2360      if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
2361	hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2362		 + (unsigned int) CONST_DOUBLE_HIGH (x));
2363      else
2364	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2365      return hash;
2366
2367    case CONST_FIXED:
2368      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2369      hash += fixed_hash (CONST_FIXED_VALUE (x));
2370      return hash;
2371
2372    case CONST_VECTOR:
2373      {
2374	int units;
2375	rtx elt;
2376
2377	units = CONST_VECTOR_NUNITS (x);
2378
2379	for (i = 0; i < units; ++i)
2380	  {
2381	    elt = CONST_VECTOR_ELT (x, i);
2382	    hash += hash_rtx_cb (elt, GET_MODE (elt),
2383                                 do_not_record_p, hash_arg_in_memory_p,
2384                                 have_reg_qty, cb);
2385	  }
2386
2387	return hash;
2388      }
2389
2390      /* Assume there is only one rtx object for any given label.  */
2391    case LABEL_REF:
2392      /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2393	 differences and differences between each stage's debugging dumps.  */
2394	 hash += (((unsigned int) LABEL_REF << 7)
2395		  + CODE_LABEL_NUMBER (LABEL_REF_LABEL (x)));
2396      return hash;
2397
2398    case SYMBOL_REF:
2399      {
2400	/* Don't hash on the symbol's address to avoid bootstrap differences.
2401	   Different hash values may cause expressions to be recorded in
2402	   different orders and thus different registers to be used in the
2403	   final assembler.  This also avoids differences in the dump files
2404	   between various stages.  */
2405	unsigned int h = 0;
2406	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2407
2408	while (*p)
2409	  h += (h << 7) + *p++; /* ??? revisit */
2410
2411	hash += ((unsigned int) SYMBOL_REF << 7) + h;
2412	return hash;
2413      }
2414
2415    case MEM:
2416      /* We don't record if marked volatile or if BLKmode since we don't
2417	 know the size of the move.  */
2418      if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2419	{
2420	  *do_not_record_p = 1;
2421	  return 0;
2422	}
2423      if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2424	*hash_arg_in_memory_p = 1;
2425
2426      /* Now that we have already found this special case,
2427	 might as well speed it up as much as possible.  */
2428      hash += (unsigned) MEM;
2429      x = XEXP (x, 0);
2430      goto repeat;
2431
2432    case USE:
2433      /* A USE that mentions non-volatile memory needs special
2434	 handling since the MEM may be BLKmode which normally
2435	 prevents an entry from being made.  Pure calls are
2436	 marked by a USE which mentions BLKmode memory.
2437	 See calls.c:emit_call_1.  */
2438      if (MEM_P (XEXP (x, 0))
2439	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2440	{
2441	  hash += (unsigned) USE;
2442	  x = XEXP (x, 0);
2443
2444	  if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2445	    *hash_arg_in_memory_p = 1;
2446
2447	  /* Now that we have already found this special case,
2448	     might as well speed it up as much as possible.  */
2449	  hash += (unsigned) MEM;
2450	  x = XEXP (x, 0);
2451	  goto repeat;
2452	}
2453      break;
2454
2455    case PRE_DEC:
2456    case PRE_INC:
2457    case POST_DEC:
2458    case POST_INC:
2459    case PRE_MODIFY:
2460    case POST_MODIFY:
2461    case PC:
2462    case CC0:
2463    case CALL:
2464    case UNSPEC_VOLATILE:
2465      if (do_not_record_p) {
2466        *do_not_record_p = 1;
2467        return 0;
2468      }
2469      else
2470        return hash;
2471      break;
2472
2473    case ASM_OPERANDS:
2474      if (do_not_record_p && MEM_VOLATILE_P (x))
2475	{
2476	  *do_not_record_p = 1;
2477	  return 0;
2478	}
2479      else
2480	{
2481	  /* We don't want to take the filename and line into account.  */
2482	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2483	    + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2484	    + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2485	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2486
2487	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2488	    {
2489	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2490		{
2491		  hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2492                                        GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2493                                        do_not_record_p, hash_arg_in_memory_p,
2494                                        have_reg_qty, cb)
2495			   + hash_rtx_string
2496                           (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2497		}
2498
2499	      hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2500	      x = ASM_OPERANDS_INPUT (x, 0);
2501	      mode = GET_MODE (x);
2502	      goto repeat;
2503	    }
2504
2505	  return hash;
2506	}
2507      break;
2508
2509    default:
2510      break;
2511    }
2512
2513  i = GET_RTX_LENGTH (code) - 1;
2514  hash += (unsigned) code + (unsigned) GET_MODE (x);
2515  fmt = GET_RTX_FORMAT (code);
2516  for (; i >= 0; i--)
2517    {
2518      switch (fmt[i])
2519	{
2520	case 'e':
2521	  /* If we are about to do the last recursive call
2522	     needed at this level, change it into iteration.
2523	     This function  is called enough to be worth it.  */
2524	  if (i == 0)
2525	    {
2526	      x = XEXP (x, i);
2527	      goto repeat;
2528	    }
2529
2530	  hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2531                               hash_arg_in_memory_p,
2532                               have_reg_qty, cb);
2533	  break;
2534
2535	case 'E':
2536	  for (j = 0; j < XVECLEN (x, i); j++)
2537	    hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2538                                 hash_arg_in_memory_p,
2539                                 have_reg_qty, cb);
2540	  break;
2541
2542	case 's':
2543	  hash += hash_rtx_string (XSTR (x, i));
2544	  break;
2545
2546	case 'i':
2547	  hash += (unsigned int) XINT (x, i);
2548	  break;
2549
2550	case '0': case 't':
2551	  /* Unused.  */
2552	  break;
2553
2554	default:
2555	  gcc_unreachable ();
2556	}
2557    }
2558
2559  return hash;
2560}
2561
2562/* Hash an rtx.  We are careful to make sure the value is never negative.
2563   Equivalent registers hash identically.
2564   MODE is used in hashing for CONST_INTs only;
2565   otherwise the mode of X is used.
2566
2567   Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2568
2569   If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2570   a MEM rtx which does not have the MEM_READONLY_P flag set.
2571
2572   Note that cse_insn knows that the hash code of a MEM expression
2573   is just (int) MEM plus the hash code of the address.  */
2574
2575unsigned
2576hash_rtx (const_rtx x, machine_mode mode, int *do_not_record_p,
2577	  int *hash_arg_in_memory_p, bool have_reg_qty)
2578{
2579  return hash_rtx_cb (x, mode, do_not_record_p,
2580                      hash_arg_in_memory_p, have_reg_qty, NULL);
2581}
2582
2583/* Hash an rtx X for cse via hash_rtx.
2584   Stores 1 in do_not_record if any subexpression is volatile.
2585   Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2586   does not have the MEM_READONLY_P flag set.  */
2587
2588static inline unsigned
2589canon_hash (rtx x, machine_mode mode)
2590{
2591  return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2592}
2593
2594/* Like canon_hash but with no side effects, i.e. do_not_record
2595   and hash_arg_in_memory are not changed.  */
2596
2597static inline unsigned
2598safe_hash (rtx x, machine_mode mode)
2599{
2600  int dummy_do_not_record;
2601  return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2602}
2603
2604/* Return 1 iff X and Y would canonicalize into the same thing,
2605   without actually constructing the canonicalization of either one.
2606   If VALIDATE is nonzero,
2607   we assume X is an expression being processed from the rtl
2608   and Y was found in the hash table.  We check register refs
2609   in Y for being marked as valid.
2610
2611   If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
2612
2613int
2614exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2615{
2616  int i, j;
2617  enum rtx_code code;
2618  const char *fmt;
2619
2620  /* Note: it is incorrect to assume an expression is equivalent to itself
2621     if VALIDATE is nonzero.  */
2622  if (x == y && !validate)
2623    return 1;
2624
2625  if (x == 0 || y == 0)
2626    return x == y;
2627
2628  code = GET_CODE (x);
2629  if (code != GET_CODE (y))
2630    return 0;
2631
2632  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2633  if (GET_MODE (x) != GET_MODE (y))
2634    return 0;
2635
2636  /* MEMs referring to different address space are not equivalent.  */
2637  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2638    return 0;
2639
2640  switch (code)
2641    {
2642    case PC:
2643    case CC0:
2644    CASE_CONST_UNIQUE:
2645      return x == y;
2646
2647    case LABEL_REF:
2648      return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2649
2650    case SYMBOL_REF:
2651      return XSTR (x, 0) == XSTR (y, 0);
2652
2653    case REG:
2654      if (for_gcse)
2655	return REGNO (x) == REGNO (y);
2656      else
2657	{
2658	  unsigned int regno = REGNO (y);
2659	  unsigned int i;
2660	  unsigned int endregno = END_REGNO (y);
2661
2662	  /* If the quantities are not the same, the expressions are not
2663	     equivalent.  If there are and we are not to validate, they
2664	     are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2665
2666	  if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2667	    return 0;
2668
2669	  if (! validate)
2670	    return 1;
2671
2672	  for (i = regno; i < endregno; i++)
2673	    if (REG_IN_TABLE (i) != REG_TICK (i))
2674	      return 0;
2675
2676	  return 1;
2677	}
2678
2679    case MEM:
2680      if (for_gcse)
2681	{
2682	  /* A volatile mem should not be considered equivalent to any
2683	     other.  */
2684	  if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2685	    return 0;
2686
2687	  /* Can't merge two expressions in different alias sets, since we
2688	     can decide that the expression is transparent in a block when
2689	     it isn't, due to it being set with the different alias set.
2690
2691	     Also, can't merge two expressions with different MEM_ATTRS.
2692	     They could e.g. be two different entities allocated into the
2693	     same space on the stack (see e.g. PR25130).  In that case, the
2694	     MEM addresses can be the same, even though the two MEMs are
2695	     absolutely not equivalent.
2696
2697	     But because really all MEM attributes should be the same for
2698	     equivalent MEMs, we just use the invariant that MEMs that have
2699	     the same attributes share the same mem_attrs data structure.  */
2700	  if (!mem_attrs_eq_p (MEM_ATTRS (x), MEM_ATTRS (y)))
2701	    return 0;
2702
2703	  /* If we are handling exceptions, we cannot consider two expressions
2704	     with different trapping status as equivalent, because simple_mem
2705	     might accept one and reject the other.  */
2706	  if (cfun->can_throw_non_call_exceptions
2707	      && (MEM_NOTRAP_P (x) != MEM_NOTRAP_P (y)))
2708	    return 0;
2709	}
2710      break;
2711
2712    /*  For commutative operations, check both orders.  */
2713    case PLUS:
2714    case MULT:
2715    case AND:
2716    case IOR:
2717    case XOR:
2718    case NE:
2719    case EQ:
2720      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2721			     validate, for_gcse)
2722	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2723				validate, for_gcse))
2724	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2725				validate, for_gcse)
2726		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2727				   validate, for_gcse)));
2728
2729    case ASM_OPERANDS:
2730      /* We don't use the generic code below because we want to
2731	 disregard filename and line numbers.  */
2732
2733      /* A volatile asm isn't equivalent to any other.  */
2734      if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2735	return 0;
2736
2737      if (GET_MODE (x) != GET_MODE (y)
2738	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2739	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2740		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2741	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2742	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2743	return 0;
2744
2745      if (ASM_OPERANDS_INPUT_LENGTH (x))
2746	{
2747	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2748	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2749			       ASM_OPERANDS_INPUT (y, i),
2750			       validate, for_gcse)
2751		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2752			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2753	      return 0;
2754	}
2755
2756      return 1;
2757
2758    default:
2759      break;
2760    }
2761
2762  /* Compare the elements.  If any pair of corresponding elements
2763     fail to match, return 0 for the whole thing.  */
2764
2765  fmt = GET_RTX_FORMAT (code);
2766  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2767    {
2768      switch (fmt[i])
2769	{
2770	case 'e':
2771	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2772			      validate, for_gcse))
2773	    return 0;
2774	  break;
2775
2776	case 'E':
2777	  if (XVECLEN (x, i) != XVECLEN (y, i))
2778	    return 0;
2779	  for (j = 0; j < XVECLEN (x, i); j++)
2780	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2781				validate, for_gcse))
2782	      return 0;
2783	  break;
2784
2785	case 's':
2786	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2787	    return 0;
2788	  break;
2789
2790	case 'i':
2791	  if (XINT (x, i) != XINT (y, i))
2792	    return 0;
2793	  break;
2794
2795	case 'w':
2796	  if (XWINT (x, i) != XWINT (y, i))
2797	    return 0;
2798	  break;
2799
2800	case '0':
2801	case 't':
2802	  break;
2803
2804	default:
2805	  gcc_unreachable ();
2806	}
2807    }
2808
2809  return 1;
2810}
2811
2812/* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
2813   the result if necessary.  INSN is as for canon_reg.  */
2814
2815static void
2816validate_canon_reg (rtx *xloc, rtx_insn *insn)
2817{
2818  if (*xloc)
2819    {
2820      rtx new_rtx = canon_reg (*xloc, insn);
2821
2822      /* If replacing pseudo with hard reg or vice versa, ensure the
2823         insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2824      gcc_assert (insn && new_rtx);
2825      validate_change (insn, xloc, new_rtx, 1);
2826    }
2827}
2828
2829/* Canonicalize an expression:
2830   replace each register reference inside it
2831   with the "oldest" equivalent register.
2832
2833   If INSN is nonzero validate_change is used to ensure that INSN remains valid
2834   after we make our substitution.  The calls are made with IN_GROUP nonzero
2835   so apply_change_group must be called upon the outermost return from this
2836   function (unless INSN is zero).  The result of apply_change_group can
2837   generally be discarded since the changes we are making are optional.  */
2838
2839static rtx
2840canon_reg (rtx x, rtx_insn *insn)
2841{
2842  int i;
2843  enum rtx_code code;
2844  const char *fmt;
2845
2846  if (x == 0)
2847    return x;
2848
2849  code = GET_CODE (x);
2850  switch (code)
2851    {
2852    case PC:
2853    case CC0:
2854    case CONST:
2855    CASE_CONST_ANY:
2856    case SYMBOL_REF:
2857    case LABEL_REF:
2858    case ADDR_VEC:
2859    case ADDR_DIFF_VEC:
2860      return x;
2861
2862    case REG:
2863      {
2864	int first;
2865	int q;
2866	struct qty_table_elem *ent;
2867
2868	/* Never replace a hard reg, because hard regs can appear
2869	   in more than one machine mode, and we must preserve the mode
2870	   of each occurrence.  Also, some hard regs appear in
2871	   MEMs that are shared and mustn't be altered.  Don't try to
2872	   replace any reg that maps to a reg of class NO_REGS.  */
2873	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2874	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2875	  return x;
2876
2877	q = REG_QTY (REGNO (x));
2878	ent = &qty_table[q];
2879	first = ent->first_reg;
2880	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2881		: REGNO_REG_CLASS (first) == NO_REGS ? x
2882		: gen_rtx_REG (ent->mode, first));
2883      }
2884
2885    default:
2886      break;
2887    }
2888
2889  fmt = GET_RTX_FORMAT (code);
2890  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2891    {
2892      int j;
2893
2894      if (fmt[i] == 'e')
2895	validate_canon_reg (&XEXP (x, i), insn);
2896      else if (fmt[i] == 'E')
2897	for (j = 0; j < XVECLEN (x, i); j++)
2898	  validate_canon_reg (&XVECEXP (x, i, j), insn);
2899    }
2900
2901  return x;
2902}
2903
2904/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2905   operation (EQ, NE, GT, etc.), follow it back through the hash table and
2906   what values are being compared.
2907
2908   *PARG1 and *PARG2 are updated to contain the rtx representing the values
2909   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
2910   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2911   compared to produce cc0.
2912
2913   The return value is the comparison operator and is either the code of
2914   A or the code corresponding to the inverse of the comparison.  */
2915
2916static enum rtx_code
2917find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2918		      machine_mode *pmode1, machine_mode *pmode2)
2919{
2920  rtx arg1, arg2;
2921  hash_set<rtx> *visited = NULL;
2922  /* Set nonzero when we find something of interest.  */
2923  rtx x = NULL;
2924
2925  arg1 = *parg1, arg2 = *parg2;
2926
2927  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
2928
2929  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2930    {
2931      int reverse_code = 0;
2932      struct table_elt *p = 0;
2933
2934      /* Remember state from previous iteration.  */
2935      if (x)
2936	{
2937	  if (!visited)
2938	    visited = new hash_set<rtx>;
2939	  visited->add (x);
2940	  x = 0;
2941	}
2942
2943      /* If arg1 is a COMPARE, extract the comparison arguments from it.
2944	 On machines with CC0, this is the only case that can occur, since
2945	 fold_rtx will return the COMPARE or item being compared with zero
2946	 when given CC0.  */
2947
2948      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2949	x = arg1;
2950
2951      /* If ARG1 is a comparison operator and CODE is testing for
2952	 STORE_FLAG_VALUE, get the inner arguments.  */
2953
2954      else if (COMPARISON_P (arg1))
2955	{
2956#ifdef FLOAT_STORE_FLAG_VALUE
2957	  REAL_VALUE_TYPE fsfv;
2958#endif
2959
2960	  if (code == NE
2961	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2962		  && code == LT && STORE_FLAG_VALUE == -1)
2963#ifdef FLOAT_STORE_FLAG_VALUE
2964	      || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2965		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2966		      REAL_VALUE_NEGATIVE (fsfv)))
2967#endif
2968	      )
2969	    x = arg1;
2970	  else if (code == EQ
2971		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2972		       && code == GE && STORE_FLAG_VALUE == -1)
2973#ifdef FLOAT_STORE_FLAG_VALUE
2974		   || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2975		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2976			   REAL_VALUE_NEGATIVE (fsfv)))
2977#endif
2978		   )
2979	    x = arg1, reverse_code = 1;
2980	}
2981
2982      /* ??? We could also check for
2983
2984	 (ne (and (eq (...) (const_int 1))) (const_int 0))
2985
2986	 and related forms, but let's wait until we see them occurring.  */
2987
2988      if (x == 0)
2989	/* Look up ARG1 in the hash table and see if it has an equivalence
2990	   that lets us see what is being compared.  */
2991	p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
2992      if (p)
2993	{
2994	  p = p->first_same_value;
2995
2996	  /* If what we compare is already known to be constant, that is as
2997	     good as it gets.
2998	     We need to break the loop in this case, because otherwise we
2999	     can have an infinite loop when looking at a reg that is known
3000	     to be a constant which is the same as a comparison of a reg
3001	     against zero which appears later in the insn stream, which in
3002	     turn is constant and the same as the comparison of the first reg
3003	     against zero...  */
3004	  if (p->is_const)
3005	    break;
3006	}
3007
3008      for (; p; p = p->next_same_value)
3009	{
3010	  machine_mode inner_mode = GET_MODE (p->exp);
3011#ifdef FLOAT_STORE_FLAG_VALUE
3012	  REAL_VALUE_TYPE fsfv;
3013#endif
3014
3015	  /* If the entry isn't valid, skip it.  */
3016	  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3017	    continue;
3018
3019	  /* If it's a comparison we've used before, skip it.  */
3020	  if (visited && visited->contains (p->exp))
3021	    continue;
3022
3023	  if (GET_CODE (p->exp) == COMPARE
3024	      /* Another possibility is that this machine has a compare insn
3025		 that includes the comparison code.  In that case, ARG1 would
3026		 be equivalent to a comparison operation that would set ARG1 to
3027		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3028		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3029		 we must reverse ORIG_CODE.  On machine with a negative value
3030		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3031	      || ((code == NE
3032		   || (code == LT
3033		       && val_signbit_known_set_p (inner_mode,
3034						   STORE_FLAG_VALUE))
3035#ifdef FLOAT_STORE_FLAG_VALUE
3036		   || (code == LT
3037		       && SCALAR_FLOAT_MODE_P (inner_mode)
3038		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3039			   REAL_VALUE_NEGATIVE (fsfv)))
3040#endif
3041		   )
3042		  && COMPARISON_P (p->exp)))
3043	    {
3044	      x = p->exp;
3045	      break;
3046	    }
3047	  else if ((code == EQ
3048		    || (code == GE
3049			&& val_signbit_known_set_p (inner_mode,
3050						    STORE_FLAG_VALUE))
3051#ifdef FLOAT_STORE_FLAG_VALUE
3052		    || (code == GE
3053			&& SCALAR_FLOAT_MODE_P (inner_mode)
3054			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3055			    REAL_VALUE_NEGATIVE (fsfv)))
3056#endif
3057		    )
3058		   && COMPARISON_P (p->exp))
3059	    {
3060	      reverse_code = 1;
3061	      x = p->exp;
3062	      break;
3063	    }
3064
3065	  /* If this non-trapping address, e.g. fp + constant, the
3066	     equivalent is a better operand since it may let us predict
3067	     the value of the comparison.  */
3068	  else if (!rtx_addr_can_trap_p (p->exp))
3069	    {
3070	      arg1 = p->exp;
3071	      continue;
3072	    }
3073	}
3074
3075      /* If we didn't find a useful equivalence for ARG1, we are done.
3076	 Otherwise, set up for the next iteration.  */
3077      if (x == 0)
3078	break;
3079
3080      /* If we need to reverse the comparison, make sure that that is
3081	 possible -- we can't necessarily infer the value of GE from LT
3082	 with floating-point operands.  */
3083      if (reverse_code)
3084	{
3085	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3086	  if (reversed == UNKNOWN)
3087	    break;
3088	  else
3089	    code = reversed;
3090	}
3091      else if (COMPARISON_P (x))
3092	code = GET_CODE (x);
3093      arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3094    }
3095
3096  /* Return our results.  Return the modes from before fold_rtx
3097     because fold_rtx might produce const_int, and then it's too late.  */
3098  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3099  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3100
3101  if (visited)
3102    delete visited;
3103  return code;
3104}
3105
3106/* If X is a nontrivial arithmetic operation on an argument for which
3107   a constant value can be determined, return the result of operating
3108   on that value, as a constant.  Otherwise, return X, possibly with
3109   one or more operands changed to a forward-propagated constant.
3110
3111   If X is a register whose contents are known, we do NOT return
3112   those contents here; equiv_constant is called to perform that task.
3113   For SUBREGs and MEMs, we do that both here and in equiv_constant.
3114
3115   INSN is the insn that we may be modifying.  If it is 0, make a copy
3116   of X before modifying it.  */
3117
3118static rtx
3119fold_rtx (rtx x, rtx_insn *insn)
3120{
3121  enum rtx_code code;
3122  machine_mode mode;
3123  const char *fmt;
3124  int i;
3125  rtx new_rtx = 0;
3126  int changed = 0;
3127
3128  /* Operands of X.  */
3129  /* Workaround -Wmaybe-uninitialized false positive during
3130     profiledbootstrap by initializing them.  */
3131  rtx folded_arg0 = NULL_RTX;
3132  rtx folded_arg1 = NULL_RTX;
3133
3134  /* Constant equivalents of first three operands of X;
3135     0 when no such equivalent is known.  */
3136  rtx const_arg0;
3137  rtx const_arg1;
3138  rtx const_arg2;
3139
3140  /* The mode of the first operand of X.  We need this for sign and zero
3141     extends.  */
3142  machine_mode mode_arg0;
3143
3144  if (x == 0)
3145    return x;
3146
3147  /* Try to perform some initial simplifications on X.  */
3148  code = GET_CODE (x);
3149  switch (code)
3150    {
3151    case MEM:
3152    case SUBREG:
3153      if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3154        return new_rtx;
3155      return x;
3156
3157    case CONST:
3158    CASE_CONST_ANY:
3159    case SYMBOL_REF:
3160    case LABEL_REF:
3161    case REG:
3162    case PC:
3163      /* No use simplifying an EXPR_LIST
3164	 since they are used only for lists of args
3165	 in a function call's REG_EQUAL note.  */
3166    case EXPR_LIST:
3167      return x;
3168
3169#ifdef HAVE_cc0
3170    case CC0:
3171      return prev_insn_cc0;
3172#endif
3173
3174    case ASM_OPERANDS:
3175      if (insn)
3176	{
3177	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3178	    validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3179			     fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3180	}
3181      return x;
3182
3183#ifdef NO_FUNCTION_CSE
3184    case CALL:
3185      if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3186	return x;
3187      break;
3188#endif
3189
3190    /* Anything else goes through the loop below.  */
3191    default:
3192      break;
3193    }
3194
3195  mode = GET_MODE (x);
3196  const_arg0 = 0;
3197  const_arg1 = 0;
3198  const_arg2 = 0;
3199  mode_arg0 = VOIDmode;
3200
3201  /* Try folding our operands.
3202     Then see which ones have constant values known.  */
3203
3204  fmt = GET_RTX_FORMAT (code);
3205  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3206    if (fmt[i] == 'e')
3207      {
3208	rtx folded_arg = XEXP (x, i), const_arg;
3209	machine_mode mode_arg = GET_MODE (folded_arg);
3210
3211	switch (GET_CODE (folded_arg))
3212	  {
3213	  case MEM:
3214	  case REG:
3215	  case SUBREG:
3216	    const_arg = equiv_constant (folded_arg);
3217	    break;
3218
3219	  case CONST:
3220	  CASE_CONST_ANY:
3221	  case SYMBOL_REF:
3222	  case LABEL_REF:
3223	    const_arg = folded_arg;
3224	    break;
3225
3226#ifdef HAVE_cc0
3227	  case CC0:
3228	    /* The cc0-user and cc0-setter may be in different blocks if
3229	       the cc0-setter potentially traps.  In that case PREV_INSN_CC0
3230	       will have been cleared as we exited the block with the
3231	       setter.
3232
3233	       While we could potentially track cc0 in this case, it just
3234	       doesn't seem to be worth it given that cc0 targets are not
3235	       terribly common or important these days and trapping math
3236	       is rarely used.  The combination of those two conditions
3237	       necessary to trip this situation is exceedingly rare in the
3238	       real world.  */
3239	    if (!prev_insn_cc0)
3240	      {
3241		const_arg = NULL_RTX;
3242	      }
3243	    else
3244	      {
3245		folded_arg = prev_insn_cc0;
3246		mode_arg = prev_insn_cc0_mode;
3247		const_arg = equiv_constant (folded_arg);
3248	      }
3249	    break;
3250#endif
3251
3252	  default:
3253	    folded_arg = fold_rtx (folded_arg, insn);
3254	    const_arg = equiv_constant (folded_arg);
3255	    break;
3256	  }
3257
3258	/* For the first three operands, see if the operand
3259	   is constant or equivalent to a constant.  */
3260	switch (i)
3261	  {
3262	  case 0:
3263	    folded_arg0 = folded_arg;
3264	    const_arg0 = const_arg;
3265	    mode_arg0 = mode_arg;
3266	    break;
3267	  case 1:
3268	    folded_arg1 = folded_arg;
3269	    const_arg1 = const_arg;
3270	    break;
3271	  case 2:
3272	    const_arg2 = const_arg;
3273	    break;
3274	  }
3275
3276	/* Pick the least expensive of the argument and an equivalent constant
3277	   argument.  */
3278	if (const_arg != 0
3279	    && const_arg != folded_arg
3280	    && COST_IN (const_arg, code, i) <= COST_IN (folded_arg, code, i)
3281
3282	    /* It's not safe to substitute the operand of a conversion
3283	       operator with a constant, as the conversion's identity
3284	       depends upon the mode of its operand.  This optimization
3285	       is handled by the call to simplify_unary_operation.  */
3286	    && (GET_RTX_CLASS (code) != RTX_UNARY
3287		|| GET_MODE (const_arg) == mode_arg0
3288		|| (code != ZERO_EXTEND
3289		    && code != SIGN_EXTEND
3290		    && code != TRUNCATE
3291		    && code != FLOAT_TRUNCATE
3292		    && code != FLOAT_EXTEND
3293		    && code != FLOAT
3294		    && code != FIX
3295		    && code != UNSIGNED_FLOAT
3296		    && code != UNSIGNED_FIX)))
3297	  folded_arg = const_arg;
3298
3299	if (folded_arg == XEXP (x, i))
3300	  continue;
3301
3302	if (insn == NULL_RTX && !changed)
3303	  x = copy_rtx (x);
3304	changed = 1;
3305	validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3306      }
3307
3308  if (changed)
3309    {
3310      /* Canonicalize X if necessary, and keep const_argN and folded_argN
3311	 consistent with the order in X.  */
3312      if (canonicalize_change_group (insn, x))
3313	{
3314	  rtx tem;
3315	  tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3316	  tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3317	}
3318
3319      apply_change_group ();
3320    }
3321
3322  /* If X is an arithmetic operation, see if we can simplify it.  */
3323
3324  switch (GET_RTX_CLASS (code))
3325    {
3326    case RTX_UNARY:
3327      {
3328	/* We can't simplify extension ops unless we know the
3329	   original mode.  */
3330	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3331	    && mode_arg0 == VOIDmode)
3332	  break;
3333
3334	new_rtx = simplify_unary_operation (code, mode,
3335					    const_arg0 ? const_arg0 : folded_arg0,
3336					    mode_arg0);
3337      }
3338      break;
3339
3340    case RTX_COMPARE:
3341    case RTX_COMM_COMPARE:
3342      /* See what items are actually being compared and set FOLDED_ARG[01]
3343	 to those values and CODE to the actual comparison code.  If any are
3344	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3345	 do anything if both operands are already known to be constant.  */
3346
3347      /* ??? Vector mode comparisons are not supported yet.  */
3348      if (VECTOR_MODE_P (mode))
3349	break;
3350
3351      if (const_arg0 == 0 || const_arg1 == 0)
3352	{
3353	  struct table_elt *p0, *p1;
3354	  rtx true_rtx, false_rtx;
3355	  machine_mode mode_arg1;
3356
3357	  if (SCALAR_FLOAT_MODE_P (mode))
3358	    {
3359#ifdef FLOAT_STORE_FLAG_VALUE
3360	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3361			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
3362#else
3363	      true_rtx = NULL_RTX;
3364#endif
3365	      false_rtx = CONST0_RTX (mode);
3366	    }
3367	  else
3368	    {
3369	      true_rtx = const_true_rtx;
3370	      false_rtx = const0_rtx;
3371	    }
3372
3373	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3374				       &mode_arg0, &mode_arg1);
3375
3376	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3377	     what kinds of things are being compared, so we can't do
3378	     anything with this comparison.  */
3379
3380	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3381	    break;
3382
3383	  const_arg0 = equiv_constant (folded_arg0);
3384	  const_arg1 = equiv_constant (folded_arg1);
3385
3386	  /* If we do not now have two constants being compared, see
3387	     if we can nevertheless deduce some things about the
3388	     comparison.  */
3389	  if (const_arg0 == 0 || const_arg1 == 0)
3390	    {
3391	      if (const_arg1 != NULL)
3392		{
3393		  rtx cheapest_simplification;
3394		  int cheapest_cost;
3395		  rtx simp_result;
3396		  struct table_elt *p;
3397
3398		  /* See if we can find an equivalent of folded_arg0
3399		     that gets us a cheaper expression, possibly a
3400		     constant through simplifications.  */
3401		  p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3402			      mode_arg0);
3403
3404		  if (p != NULL)
3405		    {
3406		      cheapest_simplification = x;
3407		      cheapest_cost = COST (x);
3408
3409		      for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3410			{
3411			  int cost;
3412
3413			  /* If the entry isn't valid, skip it.  */
3414			  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3415			    continue;
3416
3417			  /* Try to simplify using this equivalence.  */
3418			  simp_result
3419			    = simplify_relational_operation (code, mode,
3420							     mode_arg0,
3421							     p->exp,
3422							     const_arg1);
3423
3424			  if (simp_result == NULL)
3425			    continue;
3426
3427			  cost = COST (simp_result);
3428			  if (cost < cheapest_cost)
3429			    {
3430			      cheapest_cost = cost;
3431			      cheapest_simplification = simp_result;
3432			    }
3433			}
3434
3435		      /* If we have a cheaper expression now, use that
3436			 and try folding it further, from the top.  */
3437		      if (cheapest_simplification != x)
3438			return fold_rtx (copy_rtx (cheapest_simplification),
3439					 insn);
3440		    }
3441		}
3442
3443	      /* See if the two operands are the same.  */
3444
3445	      if ((REG_P (folded_arg0)
3446		   && REG_P (folded_arg1)
3447		   && (REG_QTY (REGNO (folded_arg0))
3448		       == REG_QTY (REGNO (folded_arg1))))
3449		  || ((p0 = lookup (folded_arg0,
3450				    SAFE_HASH (folded_arg0, mode_arg0),
3451				    mode_arg0))
3452		      && (p1 = lookup (folded_arg1,
3453				       SAFE_HASH (folded_arg1, mode_arg0),
3454				       mode_arg0))
3455		      && p0->first_same_value == p1->first_same_value))
3456		folded_arg1 = folded_arg0;
3457
3458	      /* If FOLDED_ARG0 is a register, see if the comparison we are
3459		 doing now is either the same as we did before or the reverse
3460		 (we only check the reverse if not floating-point).  */
3461	      else if (REG_P (folded_arg0))
3462		{
3463		  int qty = REG_QTY (REGNO (folded_arg0));
3464
3465		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3466		    {
3467		      struct qty_table_elem *ent = &qty_table[qty];
3468
3469		      if ((comparison_dominates_p (ent->comparison_code, code)
3470			   || (! FLOAT_MODE_P (mode_arg0)
3471			       && comparison_dominates_p (ent->comparison_code,
3472						          reverse_condition (code))))
3473			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
3474			      || (const_arg1
3475				  && rtx_equal_p (ent->comparison_const,
3476						  const_arg1))
3477			      || (REG_P (folded_arg1)
3478				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3479			{
3480			  if (comparison_dominates_p (ent->comparison_code, code))
3481			    {
3482			      if (true_rtx)
3483				return true_rtx;
3484			      else
3485				break;
3486			    }
3487			  else
3488			    return false_rtx;
3489			}
3490		    }
3491		}
3492	    }
3493	}
3494
3495      /* If we are comparing against zero, see if the first operand is
3496	 equivalent to an IOR with a constant.  If so, we may be able to
3497	 determine the result of this comparison.  */
3498      if (const_arg1 == const0_rtx && !const_arg0)
3499	{
3500	  rtx y = lookup_as_function (folded_arg0, IOR);
3501	  rtx inner_const;
3502
3503	  if (y != 0
3504	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3505	      && CONST_INT_P (inner_const)
3506	      && INTVAL (inner_const) != 0)
3507	    folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3508	}
3509
3510      {
3511	rtx op0 = const_arg0 ? const_arg0 : copy_rtx (folded_arg0);
3512	rtx op1 = const_arg1 ? const_arg1 : copy_rtx (folded_arg1);
3513	new_rtx = simplify_relational_operation (code, mode, mode_arg0,
3514						 op0, op1);
3515      }
3516      break;
3517
3518    case RTX_BIN_ARITH:
3519    case RTX_COMM_ARITH:
3520      switch (code)
3521	{
3522	case PLUS:
3523	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
3524	     with that LABEL_REF as its second operand.  If so, the result is
3525	     the first operand of that MINUS.  This handles switches with an
3526	     ADDR_DIFF_VEC table.  */
3527	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3528	    {
3529	      rtx y
3530		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
3531		: lookup_as_function (folded_arg0, MINUS);
3532
3533	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3534		  && LABEL_REF_LABEL (XEXP (y, 1)) == LABEL_REF_LABEL (const_arg1))
3535		return XEXP (y, 0);
3536
3537	      /* Now try for a CONST of a MINUS like the above.  */
3538	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3539			: lookup_as_function (folded_arg0, CONST))) != 0
3540		  && GET_CODE (XEXP (y, 0)) == MINUS
3541		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3542		  && LABEL_REF_LABEL (XEXP (XEXP (y, 0), 1)) == LABEL_REF_LABEL (const_arg1))
3543		return XEXP (XEXP (y, 0), 0);
3544	    }
3545
3546	  /* Likewise if the operands are in the other order.  */
3547	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3548	    {
3549	      rtx y
3550		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
3551		: lookup_as_function (folded_arg1, MINUS);
3552
3553	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3554		  && LABEL_REF_LABEL (XEXP (y, 1)) == LABEL_REF_LABEL (const_arg0))
3555		return XEXP (y, 0);
3556
3557	      /* Now try for a CONST of a MINUS like the above.  */
3558	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3559			: lookup_as_function (folded_arg1, CONST))) != 0
3560		  && GET_CODE (XEXP (y, 0)) == MINUS
3561		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3562		  && LABEL_REF_LABEL (XEXP (XEXP (y, 0), 1)) == LABEL_REF_LABEL (const_arg0))
3563		return XEXP (XEXP (y, 0), 0);
3564	    }
3565
3566	  /* If second operand is a register equivalent to a negative
3567	     CONST_INT, see if we can find a register equivalent to the
3568	     positive constant.  Make a MINUS if so.  Don't do this for
3569	     a non-negative constant since we might then alternate between
3570	     choosing positive and negative constants.  Having the positive
3571	     constant previously-used is the more common case.  Be sure
3572	     the resulting constant is non-negative; if const_arg1 were
3573	     the smallest negative number this would overflow: depending
3574	     on the mode, this would either just be the same value (and
3575	     hence not save anything) or be incorrect.  */
3576	  if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3577	      && INTVAL (const_arg1) < 0
3578	      /* This used to test
3579
3580	         -INTVAL (const_arg1) >= 0
3581
3582		 But The Sun V5.0 compilers mis-compiled that test.  So
3583		 instead we test for the problematic value in a more direct
3584		 manner and hope the Sun compilers get it correct.  */
3585	      && INTVAL (const_arg1) !=
3586	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3587	      && REG_P (folded_arg1))
3588	    {
3589	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
3590	      struct table_elt *p
3591		= lookup (new_const, SAFE_HASH (new_const, mode), mode);
3592
3593	      if (p)
3594		for (p = p->first_same_value; p; p = p->next_same_value)
3595		  if (REG_P (p->exp))
3596		    return simplify_gen_binary (MINUS, mode, folded_arg0,
3597						canon_reg (p->exp, NULL));
3598	    }
3599	  goto from_plus;
3600
3601	case MINUS:
3602	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3603	     If so, produce (PLUS Z C2-C).  */
3604	  if (const_arg1 != 0 && CONST_INT_P (const_arg1))
3605	    {
3606	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3607	      if (y && CONST_INT_P (XEXP (y, 1)))
3608		return fold_rtx (plus_constant (mode, copy_rtx (y),
3609						-INTVAL (const_arg1)),
3610				 NULL);
3611	    }
3612
3613	  /* Fall through.  */
3614
3615	from_plus:
3616	case SMIN:    case SMAX:      case UMIN:    case UMAX:
3617	case IOR:     case AND:       case XOR:
3618	case MULT:
3619	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
3620	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3621	     is known to be of similar form, we may be able to replace the
3622	     operation with a combined operation.  This may eliminate the
3623	     intermediate operation if every use is simplified in this way.
3624	     Note that the similar optimization done by combine.c only works
3625	     if the intermediate operation's result has only one reference.  */
3626
3627	  if (REG_P (folded_arg0)
3628	      && const_arg1 && CONST_INT_P (const_arg1))
3629	    {
3630	      int is_shift
3631		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3632	      rtx y, inner_const, new_const;
3633	      rtx canon_const_arg1 = const_arg1;
3634	      enum rtx_code associate_code;
3635
3636	      if (is_shift
3637		  && (INTVAL (const_arg1) >= GET_MODE_PRECISION (mode)
3638		      || INTVAL (const_arg1) < 0))
3639		{
3640		  if (SHIFT_COUNT_TRUNCATED)
3641		    canon_const_arg1 = GEN_INT (INTVAL (const_arg1)
3642						& (GET_MODE_BITSIZE (mode)
3643						   - 1));
3644		  else
3645		    break;
3646		}
3647
3648	      y = lookup_as_function (folded_arg0, code);
3649	      if (y == 0)
3650		break;
3651
3652	      /* If we have compiled a statement like
3653		 "if (x == (x & mask1))", and now are looking at
3654		 "x & mask2", we will have a case where the first operand
3655		 of Y is the same as our first operand.  Unless we detect
3656		 this case, an infinite loop will result.  */
3657	      if (XEXP (y, 0) == folded_arg0)
3658		break;
3659
3660	      inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3661	      if (!inner_const || !CONST_INT_P (inner_const))
3662		break;
3663
3664	      /* Don't associate these operations if they are a PLUS with the
3665		 same constant and it is a power of two.  These might be doable
3666		 with a pre- or post-increment.  Similarly for two subtracts of
3667		 identical powers of two with post decrement.  */
3668
3669	      if (code == PLUS && const_arg1 == inner_const
3670		  && ((HAVE_PRE_INCREMENT
3671			  && exact_log2 (INTVAL (const_arg1)) >= 0)
3672		      || (HAVE_POST_INCREMENT
3673			  && exact_log2 (INTVAL (const_arg1)) >= 0)
3674		      || (HAVE_PRE_DECREMENT
3675			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
3676		      || (HAVE_POST_DECREMENT
3677			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3678		break;
3679
3680	      /* ??? Vector mode shifts by scalar
3681		 shift operand are not supported yet.  */
3682	      if (is_shift && VECTOR_MODE_P (mode))
3683                break;
3684
3685	      if (is_shift
3686		  && (INTVAL (inner_const) >= GET_MODE_PRECISION (mode)
3687		      || INTVAL (inner_const) < 0))
3688		{
3689		  if (SHIFT_COUNT_TRUNCATED)
3690		    inner_const = GEN_INT (INTVAL (inner_const)
3691					   & (GET_MODE_BITSIZE (mode) - 1));
3692		  else
3693		    break;
3694		}
3695
3696	      /* Compute the code used to compose the constants.  For example,
3697		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
3698
3699	      associate_code = (is_shift || code == MINUS ? PLUS : code);
3700
3701	      new_const = simplify_binary_operation (associate_code, mode,
3702						     canon_const_arg1,
3703						     inner_const);
3704
3705	      if (new_const == 0)
3706		break;
3707
3708	      /* If we are associating shift operations, don't let this
3709		 produce a shift of the size of the object or larger.
3710		 This could occur when we follow a sign-extend by a right
3711		 shift on a machine that does a sign-extend as a pair
3712		 of shifts.  */
3713
3714	      if (is_shift
3715		  && CONST_INT_P (new_const)
3716		  && INTVAL (new_const) >= GET_MODE_PRECISION (mode))
3717		{
3718		  /* As an exception, we can turn an ASHIFTRT of this
3719		     form into a shift of the number of bits - 1.  */
3720		  if (code == ASHIFTRT)
3721		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3722		  else if (!side_effects_p (XEXP (y, 0)))
3723		    return CONST0_RTX (mode);
3724		  else
3725		    break;
3726		}
3727
3728	      y = copy_rtx (XEXP (y, 0));
3729
3730	      /* If Y contains our first operand (the most common way this
3731		 can happen is if Y is a MEM), we would do into an infinite
3732		 loop if we tried to fold it.  So don't in that case.  */
3733
3734	      if (! reg_mentioned_p (folded_arg0, y))
3735		y = fold_rtx (y, insn);
3736
3737	      return simplify_gen_binary (code, mode, y, new_const);
3738	    }
3739	  break;
3740
3741	case DIV:       case UDIV:
3742	  /* ??? The associative optimization performed immediately above is
3743	     also possible for DIV and UDIV using associate_code of MULT.
3744	     However, we would need extra code to verify that the
3745	     multiplication does not overflow, that is, there is no overflow
3746	     in the calculation of new_const.  */
3747	  break;
3748
3749	default:
3750	  break;
3751	}
3752
3753      new_rtx = simplify_binary_operation (code, mode,
3754				       const_arg0 ? const_arg0 : folded_arg0,
3755				       const_arg1 ? const_arg1 : folded_arg1);
3756      break;
3757
3758    case RTX_OBJ:
3759      /* (lo_sum (high X) X) is simply X.  */
3760      if (code == LO_SUM && const_arg0 != 0
3761	  && GET_CODE (const_arg0) == HIGH
3762	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3763	return const_arg1;
3764      break;
3765
3766    case RTX_TERNARY:
3767    case RTX_BITFIELD_OPS:
3768      new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3769					const_arg0 ? const_arg0 : folded_arg0,
3770					const_arg1 ? const_arg1 : folded_arg1,
3771					const_arg2 ? const_arg2 : XEXP (x, 2));
3772      break;
3773
3774    default:
3775      break;
3776    }
3777
3778  return new_rtx ? new_rtx : x;
3779}
3780
3781/* Return a constant value currently equivalent to X.
3782   Return 0 if we don't know one.  */
3783
3784static rtx
3785equiv_constant (rtx x)
3786{
3787  if (REG_P (x)
3788      && REGNO_QTY_VALID_P (REGNO (x)))
3789    {
3790      int x_q = REG_QTY (REGNO (x));
3791      struct qty_table_elem *x_ent = &qty_table[x_q];
3792
3793      if (x_ent->const_rtx)
3794	x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3795    }
3796
3797  if (x == 0 || CONSTANT_P (x))
3798    return x;
3799
3800  if (GET_CODE (x) == SUBREG)
3801    {
3802      machine_mode mode = GET_MODE (x);
3803      machine_mode imode = GET_MODE (SUBREG_REG (x));
3804      rtx new_rtx;
3805
3806      /* See if we previously assigned a constant value to this SUBREG.  */
3807      if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3808	  || (new_rtx = lookup_as_function (x, CONST_WIDE_INT)) != 0
3809          || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3810          || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3811        return new_rtx;
3812
3813      /* If we didn't and if doing so makes sense, see if we previously
3814	 assigned a constant value to the enclosing word mode SUBREG.  */
3815      if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)
3816	  && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode))
3817	{
3818	  int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode);
3819	  if (byte >= 0 && (byte % UNITS_PER_WORD) == 0)
3820	    {
3821	      rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3822	      new_rtx = lookup_as_function (y, CONST_INT);
3823	      if (new_rtx)
3824		return gen_lowpart (mode, new_rtx);
3825	    }
3826	}
3827
3828      /* Otherwise see if we already have a constant for the inner REG,
3829	 and if that is enough to calculate an equivalent constant for
3830	 the subreg.  Note that the upper bits of paradoxical subregs
3831	 are undefined, so they cannot be said to equal anything.  */
3832      if (REG_P (SUBREG_REG (x))
3833	  && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (imode)
3834	  && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3835        return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3836
3837      return 0;
3838    }
3839
3840  /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3841     the hash table in case its value was seen before.  */
3842
3843  if (MEM_P (x))
3844    {
3845      struct table_elt *elt;
3846
3847      x = avoid_constant_pool_reference (x);
3848      if (CONSTANT_P (x))
3849	return x;
3850
3851      elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3852      if (elt == 0)
3853	return 0;
3854
3855      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3856	if (elt->is_const && CONSTANT_P (elt->exp))
3857	  return elt->exp;
3858    }
3859
3860  return 0;
3861}
3862
3863/* Given INSN, a jump insn, TAKEN indicates if we are following the
3864   "taken" branch.
3865
3866   In certain cases, this can cause us to add an equivalence.  For example,
3867   if we are following the taken case of
3868	if (i == 2)
3869   we can add the fact that `i' and '2' are now equivalent.
3870
3871   In any case, we can record that this comparison was passed.  If the same
3872   comparison is seen later, we will know its value.  */
3873
3874static void
3875record_jump_equiv (rtx_insn *insn, bool taken)
3876{
3877  int cond_known_true;
3878  rtx op0, op1;
3879  rtx set;
3880  machine_mode mode, mode0, mode1;
3881  int reversed_nonequality = 0;
3882  enum rtx_code code;
3883
3884  /* Ensure this is the right kind of insn.  */
3885  gcc_assert (any_condjump_p (insn));
3886
3887  set = pc_set (insn);
3888
3889  /* See if this jump condition is known true or false.  */
3890  if (taken)
3891    cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3892  else
3893    cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3894
3895  /* Get the type of comparison being done and the operands being compared.
3896     If we had to reverse a non-equality condition, record that fact so we
3897     know that it isn't valid for floating-point.  */
3898  code = GET_CODE (XEXP (SET_SRC (set), 0));
3899  op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3900  op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3901
3902  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3903  if (! cond_known_true)
3904    {
3905      code = reversed_comparison_code_parts (code, op0, op1, insn);
3906
3907      /* Don't remember if we can't find the inverse.  */
3908      if (code == UNKNOWN)
3909	return;
3910    }
3911
3912  /* The mode is the mode of the non-constant.  */
3913  mode = mode0;
3914  if (mode1 != VOIDmode)
3915    mode = mode1;
3916
3917  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3918}
3919
3920/* Yet another form of subreg creation.  In this case, we want something in
3921   MODE, and we should assume OP has MODE iff it is naturally modeless.  */
3922
3923static rtx
3924record_jump_cond_subreg (machine_mode mode, rtx op)
3925{
3926  machine_mode op_mode = GET_MODE (op);
3927  if (op_mode == mode || op_mode == VOIDmode)
3928    return op;
3929  return lowpart_subreg (mode, op, op_mode);
3930}
3931
3932/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3933   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3934   Make any useful entries we can with that information.  Called from
3935   above function and called recursively.  */
3936
3937static void
3938record_jump_cond (enum rtx_code code, machine_mode mode, rtx op0,
3939		  rtx op1, int reversed_nonequality)
3940{
3941  unsigned op0_hash, op1_hash;
3942  int op0_in_memory, op1_in_memory;
3943  struct table_elt *op0_elt, *op1_elt;
3944
3945  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3946     we know that they are also equal in the smaller mode (this is also
3947     true for all smaller modes whether or not there is a SUBREG, but
3948     is not worth testing for with no SUBREG).  */
3949
3950  /* Note that GET_MODE (op0) may not equal MODE.  */
3951  if (code == EQ && paradoxical_subreg_p (op0))
3952    {
3953      machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3954      rtx tem = record_jump_cond_subreg (inner_mode, op1);
3955      if (tem)
3956	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3957			  reversed_nonequality);
3958    }
3959
3960  if (code == EQ && paradoxical_subreg_p (op1))
3961    {
3962      machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3963      rtx tem = record_jump_cond_subreg (inner_mode, op0);
3964      if (tem)
3965	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3966			  reversed_nonequality);
3967    }
3968
3969  /* Similarly, if this is an NE comparison, and either is a SUBREG
3970     making a smaller mode, we know the whole thing is also NE.  */
3971
3972  /* Note that GET_MODE (op0) may not equal MODE;
3973     if we test MODE instead, we can get an infinite recursion
3974     alternating between two modes each wider than MODE.  */
3975
3976  if (code == NE && GET_CODE (op0) == SUBREG
3977      && subreg_lowpart_p (op0)
3978      && (GET_MODE_SIZE (GET_MODE (op0))
3979	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3980    {
3981      machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3982      rtx tem = record_jump_cond_subreg (inner_mode, op1);
3983      if (tem)
3984	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3985			  reversed_nonequality);
3986    }
3987
3988  if (code == NE && GET_CODE (op1) == SUBREG
3989      && subreg_lowpart_p (op1)
3990      && (GET_MODE_SIZE (GET_MODE (op1))
3991	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
3992    {
3993      machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3994      rtx tem = record_jump_cond_subreg (inner_mode, op0);
3995      if (tem)
3996	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3997			  reversed_nonequality);
3998    }
3999
4000  /* Hash both operands.  */
4001
4002  do_not_record = 0;
4003  hash_arg_in_memory = 0;
4004  op0_hash = HASH (op0, mode);
4005  op0_in_memory = hash_arg_in_memory;
4006
4007  if (do_not_record)
4008    return;
4009
4010  do_not_record = 0;
4011  hash_arg_in_memory = 0;
4012  op1_hash = HASH (op1, mode);
4013  op1_in_memory = hash_arg_in_memory;
4014
4015  if (do_not_record)
4016    return;
4017
4018  /* Look up both operands.  */
4019  op0_elt = lookup (op0, op0_hash, mode);
4020  op1_elt = lookup (op1, op1_hash, mode);
4021
4022  /* If both operands are already equivalent or if they are not in the
4023     table but are identical, do nothing.  */
4024  if ((op0_elt != 0 && op1_elt != 0
4025       && op0_elt->first_same_value == op1_elt->first_same_value)
4026      || op0 == op1 || rtx_equal_p (op0, op1))
4027    return;
4028
4029  /* If we aren't setting two things equal all we can do is save this
4030     comparison.   Similarly if this is floating-point.  In the latter
4031     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4032     If we record the equality, we might inadvertently delete code
4033     whose intent was to change -0 to +0.  */
4034
4035  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4036    {
4037      struct qty_table_elem *ent;
4038      int qty;
4039
4040      /* If we reversed a floating-point comparison, if OP0 is not a
4041	 register, or if OP1 is neither a register or constant, we can't
4042	 do anything.  */
4043
4044      if (!REG_P (op1))
4045	op1 = equiv_constant (op1);
4046
4047      if ((reversed_nonequality && FLOAT_MODE_P (mode))
4048	  || !REG_P (op0) || op1 == 0)
4049	return;
4050
4051      /* Put OP0 in the hash table if it isn't already.  This gives it a
4052	 new quantity number.  */
4053      if (op0_elt == 0)
4054	{
4055	  if (insert_regs (op0, NULL, 0))
4056	    {
4057	      rehash_using_reg (op0);
4058	      op0_hash = HASH (op0, mode);
4059
4060	      /* If OP0 is contained in OP1, this changes its hash code
4061		 as well.  Faster to rehash than to check, except
4062		 for the simple case of a constant.  */
4063	      if (! CONSTANT_P (op1))
4064		op1_hash = HASH (op1,mode);
4065	    }
4066
4067	  op0_elt = insert (op0, NULL, op0_hash, mode);
4068	  op0_elt->in_memory = op0_in_memory;
4069	}
4070
4071      qty = REG_QTY (REGNO (op0));
4072      ent = &qty_table[qty];
4073
4074      ent->comparison_code = code;
4075      if (REG_P (op1))
4076	{
4077	  /* Look it up again--in case op0 and op1 are the same.  */
4078	  op1_elt = lookup (op1, op1_hash, mode);
4079
4080	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4081	  if (op1_elt == 0)
4082	    {
4083	      if (insert_regs (op1, NULL, 0))
4084		{
4085		  rehash_using_reg (op1);
4086		  op1_hash = HASH (op1, mode);
4087		}
4088
4089	      op1_elt = insert (op1, NULL, op1_hash, mode);
4090	      op1_elt->in_memory = op1_in_memory;
4091	    }
4092
4093	  ent->comparison_const = NULL_RTX;
4094	  ent->comparison_qty = REG_QTY (REGNO (op1));
4095	}
4096      else
4097	{
4098	  ent->comparison_const = op1;
4099	  ent->comparison_qty = -1;
4100	}
4101
4102      return;
4103    }
4104
4105  /* If either side is still missing an equivalence, make it now,
4106     then merge the equivalences.  */
4107
4108  if (op0_elt == 0)
4109    {
4110      if (insert_regs (op0, NULL, 0))
4111	{
4112	  rehash_using_reg (op0);
4113	  op0_hash = HASH (op0, mode);
4114	}
4115
4116      op0_elt = insert (op0, NULL, op0_hash, mode);
4117      op0_elt->in_memory = op0_in_memory;
4118    }
4119
4120  if (op1_elt == 0)
4121    {
4122      if (insert_regs (op1, NULL, 0))
4123	{
4124	  rehash_using_reg (op1);
4125	  op1_hash = HASH (op1, mode);
4126	}
4127
4128      op1_elt = insert (op1, NULL, op1_hash, mode);
4129      op1_elt->in_memory = op1_in_memory;
4130    }
4131
4132  merge_equiv_classes (op0_elt, op1_elt);
4133}
4134
4135/* CSE processing for one instruction.
4136
4137   Most "true" common subexpressions are mostly optimized away in GIMPLE,
4138   but the few that "leak through" are cleaned up by cse_insn, and complex
4139   addressing modes are often formed here.
4140
4141   The main function is cse_insn, and between here and that function
4142   a couple of helper functions is defined to keep the size of cse_insn
4143   within reasonable proportions.
4144
4145   Data is shared between the main and helper functions via STRUCT SET,
4146   that contains all data related for every set in the instruction that
4147   is being processed.
4148
4149   Note that cse_main processes all sets in the instruction.  Most
4150   passes in GCC only process simple SET insns or single_set insns, but
4151   CSE processes insns with multiple sets as well.  */
4152
4153/* Data on one SET contained in the instruction.  */
4154
4155struct set
4156{
4157  /* The SET rtx itself.  */
4158  rtx rtl;
4159  /* The SET_SRC of the rtx (the original value, if it is changing).  */
4160  rtx src;
4161  /* The hash-table element for the SET_SRC of the SET.  */
4162  struct table_elt *src_elt;
4163  /* Hash value for the SET_SRC.  */
4164  unsigned src_hash;
4165  /* Hash value for the SET_DEST.  */
4166  unsigned dest_hash;
4167  /* The SET_DEST, with SUBREG, etc., stripped.  */
4168  rtx inner_dest;
4169  /* Nonzero if the SET_SRC is in memory.  */
4170  char src_in_memory;
4171  /* Nonzero if the SET_SRC contains something
4172     whose value cannot be predicted and understood.  */
4173  char src_volatile;
4174  /* Original machine mode, in case it becomes a CONST_INT.
4175     The size of this field should match the size of the mode
4176     field of struct rtx_def (see rtl.h).  */
4177  ENUM_BITFIELD(machine_mode) mode : 8;
4178  /* A constant equivalent for SET_SRC, if any.  */
4179  rtx src_const;
4180  /* Hash value of constant equivalent for SET_SRC.  */
4181  unsigned src_const_hash;
4182  /* Table entry for constant equivalent for SET_SRC, if any.  */
4183  struct table_elt *src_const_elt;
4184  /* Table entry for the destination address.  */
4185  struct table_elt *dest_addr_elt;
4186};
4187
4188/* Special handling for (set REG0 REG1) where REG0 is the
4189   "cheapest", cheaper than REG1.  After cse, REG1 will probably not
4190   be used in the sequel, so (if easily done) change this insn to
4191   (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4192   that computed their value.  Then REG1 will become a dead store
4193   and won't cloud the situation for later optimizations.
4194
4195   Do not make this change if REG1 is a hard register, because it will
4196   then be used in the sequel and we may be changing a two-operand insn
4197   into a three-operand insn.
4198
4199   This is the last transformation that cse_insn will try to do.  */
4200
4201static void
4202try_back_substitute_reg (rtx set, rtx_insn *insn)
4203{
4204  rtx dest = SET_DEST (set);
4205  rtx src = SET_SRC (set);
4206
4207  if (REG_P (dest)
4208      && REG_P (src) && ! HARD_REGISTER_P (src)
4209      && REGNO_QTY_VALID_P (REGNO (src)))
4210    {
4211      int src_q = REG_QTY (REGNO (src));
4212      struct qty_table_elem *src_ent = &qty_table[src_q];
4213
4214      if (src_ent->first_reg == REGNO (dest))
4215	{
4216	  /* Scan for the previous nonnote insn, but stop at a basic
4217	     block boundary.  */
4218	  rtx_insn *prev = insn;
4219	  rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
4220	  do
4221	    {
4222	      prev = PREV_INSN (prev);
4223	    }
4224	  while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
4225
4226	  /* Do not swap the registers around if the previous instruction
4227	     attaches a REG_EQUIV note to REG1.
4228
4229	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
4230	     from the pseudo that originally shadowed an incoming argument
4231	     to another register.  Some uses of REG_EQUIV might rely on it
4232	     being attached to REG1 rather than REG2.
4233
4234	     This section previously turned the REG_EQUIV into a REG_EQUAL
4235	     note.  We cannot do that because REG_EQUIV may provide an
4236	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
4237	  if (NONJUMP_INSN_P (prev)
4238	      && GET_CODE (PATTERN (prev)) == SET
4239	      && SET_DEST (PATTERN (prev)) == src
4240	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
4241	    {
4242	      rtx note;
4243
4244	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
4245	      validate_change (insn, &SET_DEST (set), src, 1);
4246	      validate_change (insn, &SET_SRC (set), dest, 1);
4247	      apply_change_group ();
4248
4249	      /* If INSN has a REG_EQUAL note, and this note mentions
4250		 REG0, then we must delete it, because the value in
4251		 REG0 has changed.  If the note's value is REG1, we must
4252		 also delete it because that is now this insn's dest.  */
4253	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4254	      if (note != 0
4255		  && (reg_mentioned_p (dest, XEXP (note, 0))
4256		      || rtx_equal_p (src, XEXP (note, 0))))
4257		remove_note (insn, note);
4258	    }
4259	}
4260    }
4261}
4262
4263/* Record all the SETs in this instruction into SETS_PTR,
4264   and return the number of recorded sets.  */
4265static int
4266find_sets_in_insn (rtx_insn *insn, struct set **psets)
4267{
4268  struct set *sets = *psets;
4269  int n_sets = 0;
4270  rtx x = PATTERN (insn);
4271
4272  if (GET_CODE (x) == SET)
4273    {
4274      /* Ignore SETs that are unconditional jumps.
4275	 They never need cse processing, so this does not hurt.
4276	 The reason is not efficiency but rather
4277	 so that we can test at the end for instructions
4278	 that have been simplified to unconditional jumps
4279	 and not be misled by unchanged instructions
4280	 that were unconditional jumps to begin with.  */
4281      if (SET_DEST (x) == pc_rtx
4282	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4283	;
4284      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4285	 The hard function value register is used only once, to copy to
4286	 someplace else, so it isn't worth cse'ing.  */
4287      else if (GET_CODE (SET_SRC (x)) == CALL)
4288	;
4289      else
4290	sets[n_sets++].rtl = x;
4291    }
4292  else if (GET_CODE (x) == PARALLEL)
4293    {
4294      int i, lim = XVECLEN (x, 0);
4295
4296      /* Go over the expressions of the PARALLEL in forward order, to
4297	 put them in the same order in the SETS array.  */
4298      for (i = 0; i < lim; i++)
4299	{
4300	  rtx y = XVECEXP (x, 0, i);
4301	  if (GET_CODE (y) == SET)
4302	    {
4303	      /* As above, we ignore unconditional jumps and call-insns and
4304		 ignore the result of apply_change_group.  */
4305	      if (SET_DEST (y) == pc_rtx
4306		  && GET_CODE (SET_SRC (y)) == LABEL_REF)
4307		;
4308	      else if (GET_CODE (SET_SRC (y)) == CALL)
4309		;
4310	      else
4311		sets[n_sets++].rtl = y;
4312	    }
4313	}
4314    }
4315
4316  return n_sets;
4317}
4318
4319/* Where possible, substitute every register reference in the N_SETS
4320   number of SETS in INSN with the the canonical register.
4321
4322   Register canonicalization propagatest the earliest register (i.e.
4323   one that is set before INSN) with the same value.  This is a very
4324   useful, simple form of CSE, to clean up warts from expanding GIMPLE
4325   to RTL.  For instance, a CONST for an address is usually expanded
4326   multiple times to loads into different registers, thus creating many
4327   subexpressions of the form:
4328
4329   (set (reg1) (some_const))
4330   (set (mem (... reg1 ...) (thing)))
4331   (set (reg2) (some_const))
4332   (set (mem (... reg2 ...) (thing)))
4333
4334   After canonicalizing, the code takes the following form:
4335
4336   (set (reg1) (some_const))
4337   (set (mem (... reg1 ...) (thing)))
4338   (set (reg2) (some_const))
4339   (set (mem (... reg1 ...) (thing)))
4340
4341   The set to reg2 is now trivially dead, and the memory reference (or
4342   address, or whatever) may be a candidate for further CSEing.
4343
4344   In this function, the result of apply_change_group can be ignored;
4345   see canon_reg.  */
4346
4347static void
4348canonicalize_insn (rtx_insn *insn, struct set **psets, int n_sets)
4349{
4350  struct set *sets = *psets;
4351  rtx tem;
4352  rtx x = PATTERN (insn);
4353  int i;
4354
4355  if (CALL_P (insn))
4356    {
4357      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4358	if (GET_CODE (XEXP (tem, 0)) != SET)
4359	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4360    }
4361
4362  if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
4363    {
4364      canon_reg (SET_SRC (x), insn);
4365      apply_change_group ();
4366      fold_rtx (SET_SRC (x), insn);
4367    }
4368  else if (GET_CODE (x) == CLOBBER)
4369    {
4370      /* If we clobber memory, canon the address.
4371	 This does nothing when a register is clobbered
4372	 because we have already invalidated the reg.  */
4373      if (MEM_P (XEXP (x, 0)))
4374	canon_reg (XEXP (x, 0), insn);
4375    }
4376  else if (GET_CODE (x) == USE
4377	   && ! (REG_P (XEXP (x, 0))
4378		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4379    /* Canonicalize a USE of a pseudo register or memory location.  */
4380    canon_reg (x, insn);
4381  else if (GET_CODE (x) == ASM_OPERANDS)
4382    {
4383      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4384	{
4385	  rtx input = ASM_OPERANDS_INPUT (x, i);
4386	  if (!(REG_P (input) && REGNO (input) < FIRST_PSEUDO_REGISTER))
4387	    {
4388	      input = canon_reg (input, insn);
4389	      validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4390	    }
4391	}
4392    }
4393  else if (GET_CODE (x) == CALL)
4394    {
4395      canon_reg (x, insn);
4396      apply_change_group ();
4397      fold_rtx (x, insn);
4398    }
4399  else if (DEBUG_INSN_P (insn))
4400    canon_reg (PATTERN (insn), insn);
4401  else if (GET_CODE (x) == PARALLEL)
4402    {
4403      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4404	{
4405	  rtx y = XVECEXP (x, 0, i);
4406	  if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
4407	    {
4408	      canon_reg (SET_SRC (y), insn);
4409	      apply_change_group ();
4410	      fold_rtx (SET_SRC (y), insn);
4411	    }
4412	  else if (GET_CODE (y) == CLOBBER)
4413	    {
4414	      if (MEM_P (XEXP (y, 0)))
4415		canon_reg (XEXP (y, 0), insn);
4416	    }
4417	  else if (GET_CODE (y) == USE
4418		   && ! (REG_P (XEXP (y, 0))
4419			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4420	    canon_reg (y, insn);
4421	  else if (GET_CODE (y) == CALL)
4422	    {
4423	      canon_reg (y, insn);
4424	      apply_change_group ();
4425	      fold_rtx (y, insn);
4426	    }
4427	}
4428    }
4429
4430  if (n_sets == 1 && REG_NOTES (insn) != 0
4431      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4432    {
4433      /* We potentially will process this insn many times.  Therefore,
4434	 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4435	 unique set in INSN.
4436
4437	 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4438	 because cse_insn handles those specially.  */
4439      if (GET_CODE (SET_DEST (sets[0].rtl)) != STRICT_LOW_PART
4440	  && rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
4441	remove_note (insn, tem);
4442      else
4443	{
4444	  canon_reg (XEXP (tem, 0), insn);
4445	  apply_change_group ();
4446	  XEXP (tem, 0) = fold_rtx (XEXP (tem, 0), insn);
4447	  df_notes_rescan (insn);
4448	}
4449    }
4450
4451  /* Canonicalize sources and addresses of destinations.
4452     We do this in a separate pass to avoid problems when a MATCH_DUP is
4453     present in the insn pattern.  In that case, we want to ensure that
4454     we don't break the duplicate nature of the pattern.  So we will replace
4455     both operands at the same time.  Otherwise, we would fail to find an
4456     equivalent substitution in the loop calling validate_change below.
4457
4458     We used to suppress canonicalization of DEST if it appears in SRC,
4459     but we don't do this any more.  */
4460
4461  for (i = 0; i < n_sets; i++)
4462    {
4463      rtx dest = SET_DEST (sets[i].rtl);
4464      rtx src = SET_SRC (sets[i].rtl);
4465      rtx new_rtx = canon_reg (src, insn);
4466
4467      validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4468
4469      if (GET_CODE (dest) == ZERO_EXTRACT)
4470	{
4471	  validate_change (insn, &XEXP (dest, 1),
4472			   canon_reg (XEXP (dest, 1), insn), 1);
4473	  validate_change (insn, &XEXP (dest, 2),
4474			   canon_reg (XEXP (dest, 2), insn), 1);
4475	}
4476
4477      while (GET_CODE (dest) == SUBREG
4478	     || GET_CODE (dest) == ZERO_EXTRACT
4479	     || GET_CODE (dest) == STRICT_LOW_PART)
4480	dest = XEXP (dest, 0);
4481
4482      if (MEM_P (dest))
4483	canon_reg (dest, insn);
4484    }
4485
4486  /* Now that we have done all the replacements, we can apply the change
4487     group and see if they all work.  Note that this will cause some
4488     canonicalizations that would have worked individually not to be applied
4489     because some other canonicalization didn't work, but this should not
4490     occur often.
4491
4492     The result of apply_change_group can be ignored; see canon_reg.  */
4493
4494  apply_change_group ();
4495}
4496
4497/* Main function of CSE.
4498   First simplify sources and addresses of all assignments
4499   in the instruction, using previously-computed equivalents values.
4500   Then install the new sources and destinations in the table
4501   of available values.  */
4502
4503static void
4504cse_insn (rtx_insn *insn)
4505{
4506  rtx x = PATTERN (insn);
4507  int i;
4508  rtx tem;
4509  int n_sets = 0;
4510
4511  rtx src_eqv = 0;
4512  struct table_elt *src_eqv_elt = 0;
4513  int src_eqv_volatile = 0;
4514  int src_eqv_in_memory = 0;
4515  unsigned src_eqv_hash = 0;
4516
4517  struct set *sets = (struct set *) 0;
4518
4519  if (GET_CODE (x) == SET)
4520    sets = XALLOCA (struct set);
4521  else if (GET_CODE (x) == PARALLEL)
4522    sets = XALLOCAVEC (struct set, XVECLEN (x, 0));
4523
4524  this_insn = insn;
4525#ifdef HAVE_cc0
4526  /* Records what this insn does to set CC0.  */
4527  this_insn_cc0 = 0;
4528  this_insn_cc0_mode = VOIDmode;
4529#endif
4530
4531  /* Find all regs explicitly clobbered in this insn,
4532     to ensure they are not replaced with any other regs
4533     elsewhere in this insn.  */
4534  invalidate_from_sets_and_clobbers (insn);
4535
4536  /* Record all the SETs in this instruction.  */
4537  n_sets = find_sets_in_insn (insn, &sets);
4538
4539  /* Substitute the canonical register where possible.  */
4540  canonicalize_insn (insn, &sets, n_sets);
4541
4542  /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4543     if different, or if the DEST is a STRICT_LOW_PART.  The latter condition
4544     is necessary because SRC_EQV is handled specially for this case, and if
4545     it isn't set, then there will be no equivalence for the destination.  */
4546  if (n_sets == 1 && REG_NOTES (insn) != 0
4547      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4548      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4549	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4550    src_eqv = copy_rtx (XEXP (tem, 0));
4551
4552  /* Set sets[i].src_elt to the class each source belongs to.
4553     Detect assignments from or to volatile things
4554     and set set[i] to zero so they will be ignored
4555     in the rest of this function.
4556
4557     Nothing in this loop changes the hash table or the register chains.  */
4558
4559  for (i = 0; i < n_sets; i++)
4560    {
4561      bool repeat = false;
4562      rtx src, dest;
4563      rtx src_folded;
4564      struct table_elt *elt = 0, *p;
4565      machine_mode mode;
4566      rtx src_eqv_here;
4567      rtx src_const = 0;
4568      rtx src_related = 0;
4569      bool src_related_is_const_anchor = false;
4570      struct table_elt *src_const_elt = 0;
4571      int src_cost = MAX_COST;
4572      int src_eqv_cost = MAX_COST;
4573      int src_folded_cost = MAX_COST;
4574      int src_related_cost = MAX_COST;
4575      int src_elt_cost = MAX_COST;
4576      int src_regcost = MAX_COST;
4577      int src_eqv_regcost = MAX_COST;
4578      int src_folded_regcost = MAX_COST;
4579      int src_related_regcost = MAX_COST;
4580      int src_elt_regcost = MAX_COST;
4581      /* Set nonzero if we need to call force_const_mem on with the
4582	 contents of src_folded before using it.  */
4583      int src_folded_force_flag = 0;
4584
4585      dest = SET_DEST (sets[i].rtl);
4586      src = SET_SRC (sets[i].rtl);
4587
4588      /* If SRC is a constant that has no machine mode,
4589	 hash it with the destination's machine mode.
4590	 This way we can keep different modes separate.  */
4591
4592      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4593      sets[i].mode = mode;
4594
4595      if (src_eqv)
4596	{
4597	  machine_mode eqvmode = mode;
4598	  if (GET_CODE (dest) == STRICT_LOW_PART)
4599	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4600	  do_not_record = 0;
4601	  hash_arg_in_memory = 0;
4602	  src_eqv_hash = HASH (src_eqv, eqvmode);
4603
4604	  /* Find the equivalence class for the equivalent expression.  */
4605
4606	  if (!do_not_record)
4607	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4608
4609	  src_eqv_volatile = do_not_record;
4610	  src_eqv_in_memory = hash_arg_in_memory;
4611	}
4612
4613      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4614	 value of the INNER register, not the destination.  So it is not
4615	 a valid substitution for the source.  But save it for later.  */
4616      if (GET_CODE (dest) == STRICT_LOW_PART)
4617	src_eqv_here = 0;
4618      else
4619	src_eqv_here = src_eqv;
4620
4621      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
4622	 simplified result, which may not necessarily be valid.  */
4623      src_folded = fold_rtx (src, insn);
4624
4625#if 0
4626      /* ??? This caused bad code to be generated for the m68k port with -O2.
4627	 Suppose src is (CONST_INT -1), and that after truncation src_folded
4628	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
4629	 At the end we will add src and src_const to the same equivalence
4630	 class.  We now have 3 and -1 on the same equivalence class.  This
4631	 causes later instructions to be mis-optimized.  */
4632      /* If storing a constant in a bitfield, pre-truncate the constant
4633	 so we will be able to record it later.  */
4634      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4635	{
4636	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4637
4638	  if (CONST_INT_P (src)
4639	      && CONST_INT_P (width)
4640	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4641	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4642	    src_folded
4643	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4644					  << INTVAL (width)) - 1));
4645	}
4646#endif
4647
4648      /* Compute SRC's hash code, and also notice if it
4649	 should not be recorded at all.  In that case,
4650	 prevent any further processing of this assignment.  */
4651      do_not_record = 0;
4652      hash_arg_in_memory = 0;
4653
4654      sets[i].src = src;
4655      sets[i].src_hash = HASH (src, mode);
4656      sets[i].src_volatile = do_not_record;
4657      sets[i].src_in_memory = hash_arg_in_memory;
4658
4659      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4660	 a pseudo, do not record SRC.  Using SRC as a replacement for
4661	 anything else will be incorrect in that situation.  Note that
4662	 this usually occurs only for stack slots, in which case all the
4663	 RTL would be referring to SRC, so we don't lose any optimization
4664	 opportunities by not having SRC in the hash table.  */
4665
4666      if (MEM_P (src)
4667	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4668	  && REG_P (dest)
4669	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4670	sets[i].src_volatile = 1;
4671
4672      else if (GET_CODE (src) == ASM_OPERANDS
4673	       && GET_CODE (x) == PARALLEL)
4674	{
4675	  /* Do not record result of a non-volatile inline asm with
4676	     more than one result.  */
4677	  if (n_sets > 1)
4678	    sets[i].src_volatile = 1;
4679
4680	  int j, lim = XVECLEN (x, 0);
4681	  for (j = 0; j < lim; j++)
4682	    {
4683	      rtx y = XVECEXP (x, 0, j);
4684	      /* And do not record result of a non-volatile inline asm
4685		 with "memory" clobber.  */
4686	      if (GET_CODE (y) == CLOBBER && MEM_P (XEXP (y, 0)))
4687		{
4688		  sets[i].src_volatile = 1;
4689		  break;
4690		}
4691	    }
4692	}
4693
4694#if 0
4695      /* It is no longer clear why we used to do this, but it doesn't
4696	 appear to still be needed.  So let's try without it since this
4697	 code hurts cse'ing widened ops.  */
4698      /* If source is a paradoxical subreg (such as QI treated as an SI),
4699	 treat it as volatile.  It may do the work of an SI in one context
4700	 where the extra bits are not being used, but cannot replace an SI
4701	 in general.  */
4702      if (paradoxical_subreg_p (src))
4703	sets[i].src_volatile = 1;
4704#endif
4705
4706      /* Locate all possible equivalent forms for SRC.  Try to replace
4707         SRC in the insn with each cheaper equivalent.
4708
4709         We have the following types of equivalents: SRC itself, a folded
4710         version, a value given in a REG_EQUAL note, or a value related
4711	 to a constant.
4712
4713         Each of these equivalents may be part of an additional class
4714         of equivalents (if more than one is in the table, they must be in
4715         the same class; we check for this).
4716
4717	 If the source is volatile, we don't do any table lookups.
4718
4719         We note any constant equivalent for possible later use in a
4720         REG_NOTE.  */
4721
4722      if (!sets[i].src_volatile)
4723	elt = lookup (src, sets[i].src_hash, mode);
4724
4725      sets[i].src_elt = elt;
4726
4727      if (elt && src_eqv_here && src_eqv_elt)
4728	{
4729	  if (elt->first_same_value != src_eqv_elt->first_same_value)
4730	    {
4731	      /* The REG_EQUAL is indicating that two formerly distinct
4732		 classes are now equivalent.  So merge them.  */
4733	      merge_equiv_classes (elt, src_eqv_elt);
4734	      src_eqv_hash = HASH (src_eqv, elt->mode);
4735	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4736	    }
4737
4738	  src_eqv_here = 0;
4739	}
4740
4741      else if (src_eqv_elt)
4742	elt = src_eqv_elt;
4743
4744      /* Try to find a constant somewhere and record it in `src_const'.
4745	 Record its table element, if any, in `src_const_elt'.  Look in
4746	 any known equivalences first.  (If the constant is not in the
4747	 table, also set `sets[i].src_const_hash').  */
4748      if (elt)
4749	for (p = elt->first_same_value; p; p = p->next_same_value)
4750	  if (p->is_const)
4751	    {
4752	      src_const = p->exp;
4753	      src_const_elt = elt;
4754	      break;
4755	    }
4756
4757      if (src_const == 0
4758	  && (CONSTANT_P (src_folded)
4759	      /* Consider (minus (label_ref L1) (label_ref L2)) as
4760		 "constant" here so we will record it. This allows us
4761		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
4762	      || (GET_CODE (src_folded) == MINUS
4763		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4764		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4765	src_const = src_folded, src_const_elt = elt;
4766      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4767	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4768
4769      /* If we don't know if the constant is in the table, get its
4770	 hash code and look it up.  */
4771      if (src_const && src_const_elt == 0)
4772	{
4773	  sets[i].src_const_hash = HASH (src_const, mode);
4774	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4775	}
4776
4777      sets[i].src_const = src_const;
4778      sets[i].src_const_elt = src_const_elt;
4779
4780      /* If the constant and our source are both in the table, mark them as
4781	 equivalent.  Otherwise, if a constant is in the table but the source
4782	 isn't, set ELT to it.  */
4783      if (src_const_elt && elt
4784	  && src_const_elt->first_same_value != elt->first_same_value)
4785	merge_equiv_classes (elt, src_const_elt);
4786      else if (src_const_elt && elt == 0)
4787	elt = src_const_elt;
4788
4789      /* See if there is a register linearly related to a constant
4790         equivalent of SRC.  */
4791      if (src_const
4792	  && (GET_CODE (src_const) == CONST
4793	      || (src_const_elt && src_const_elt->related_value != 0)))
4794	{
4795	  src_related = use_related_value (src_const, src_const_elt);
4796	  if (src_related)
4797	    {
4798	      struct table_elt *src_related_elt
4799		= lookup (src_related, HASH (src_related, mode), mode);
4800	      if (src_related_elt && elt)
4801		{
4802		  if (elt->first_same_value
4803		      != src_related_elt->first_same_value)
4804		    /* This can occur when we previously saw a CONST
4805		       involving a SYMBOL_REF and then see the SYMBOL_REF
4806		       twice.  Merge the involved classes.  */
4807		    merge_equiv_classes (elt, src_related_elt);
4808
4809		  src_related = 0;
4810		  src_related_elt = 0;
4811		}
4812	      else if (src_related_elt && elt == 0)
4813		elt = src_related_elt;
4814	    }
4815	}
4816
4817      /* See if we have a CONST_INT that is already in a register in a
4818	 wider mode.  */
4819
4820      if (src_const && src_related == 0 && CONST_INT_P (src_const)
4821	  && GET_MODE_CLASS (mode) == MODE_INT
4822	  && GET_MODE_PRECISION (mode) < BITS_PER_WORD)
4823	{
4824	  machine_mode wider_mode;
4825
4826	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
4827	       wider_mode != VOIDmode
4828	       && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD
4829	       && src_related == 0;
4830	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4831	    {
4832	      struct table_elt *const_elt
4833		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4834
4835	      if (const_elt == 0)
4836		continue;
4837
4838	      for (const_elt = const_elt->first_same_value;
4839		   const_elt; const_elt = const_elt->next_same_value)
4840		if (REG_P (const_elt->exp))
4841		  {
4842		    src_related = gen_lowpart (mode, const_elt->exp);
4843		    break;
4844		  }
4845	    }
4846	}
4847
4848      /* Another possibility is that we have an AND with a constant in
4849	 a mode narrower than a word.  If so, it might have been generated
4850	 as part of an "if" which would narrow the AND.  If we already
4851	 have done the AND in a wider mode, we can use a SUBREG of that
4852	 value.  */
4853
4854      if (flag_expensive_optimizations && ! src_related
4855	  && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4856	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4857	{
4858	  machine_mode tmode;
4859	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4860
4861	  for (tmode = GET_MODE_WIDER_MODE (mode);
4862	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4863	       tmode = GET_MODE_WIDER_MODE (tmode))
4864	    {
4865	      rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4866	      struct table_elt *larger_elt;
4867
4868	      if (inner)
4869		{
4870		  PUT_MODE (new_and, tmode);
4871		  XEXP (new_and, 0) = inner;
4872		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4873		  if (larger_elt == 0)
4874		    continue;
4875
4876		  for (larger_elt = larger_elt->first_same_value;
4877		       larger_elt; larger_elt = larger_elt->next_same_value)
4878		    if (REG_P (larger_elt->exp))
4879		      {
4880			src_related
4881			  = gen_lowpart (mode, larger_elt->exp);
4882			break;
4883		      }
4884
4885		  if (src_related)
4886		    break;
4887		}
4888	    }
4889	}
4890
4891#ifdef LOAD_EXTEND_OP
4892      /* See if a MEM has already been loaded with a widening operation;
4893	 if it has, we can use a subreg of that.  Many CISC machines
4894	 also have such operations, but this is only likely to be
4895	 beneficial on these machines.  */
4896
4897      if (flag_expensive_optimizations && src_related == 0
4898	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4899	  && GET_MODE_CLASS (mode) == MODE_INT
4900	  && MEM_P (src) && ! do_not_record
4901	  && LOAD_EXTEND_OP (mode) != UNKNOWN)
4902	{
4903	  struct rtx_def memory_extend_buf;
4904	  rtx memory_extend_rtx = &memory_extend_buf;
4905	  machine_mode tmode;
4906
4907	  /* Set what we are trying to extend and the operation it might
4908	     have been extended with.  */
4909	  memset (memory_extend_rtx, 0, sizeof (*memory_extend_rtx));
4910	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4911	  XEXP (memory_extend_rtx, 0) = src;
4912
4913	  for (tmode = GET_MODE_WIDER_MODE (mode);
4914	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4915	       tmode = GET_MODE_WIDER_MODE (tmode))
4916	    {
4917	      struct table_elt *larger_elt;
4918
4919	      PUT_MODE (memory_extend_rtx, tmode);
4920	      larger_elt = lookup (memory_extend_rtx,
4921				   HASH (memory_extend_rtx, tmode), tmode);
4922	      if (larger_elt == 0)
4923		continue;
4924
4925	      for (larger_elt = larger_elt->first_same_value;
4926		   larger_elt; larger_elt = larger_elt->next_same_value)
4927		if (REG_P (larger_elt->exp))
4928		  {
4929		    src_related = gen_lowpart (mode, larger_elt->exp);
4930		    break;
4931		  }
4932
4933	      if (src_related)
4934		break;
4935	    }
4936	}
4937#endif /* LOAD_EXTEND_OP */
4938
4939      /* Try to express the constant using a register+offset expression
4940	 derived from a constant anchor.  */
4941
4942      if (targetm.const_anchor
4943	  && !src_related
4944	  && src_const
4945	  && GET_CODE (src_const) == CONST_INT)
4946	{
4947	  src_related = try_const_anchors (src_const, mode);
4948	  src_related_is_const_anchor = src_related != NULL_RTX;
4949	}
4950
4951
4952      if (src == src_folded)
4953	src_folded = 0;
4954
4955      /* At this point, ELT, if nonzero, points to a class of expressions
4956         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4957	 and SRC_RELATED, if nonzero, each contain additional equivalent
4958	 expressions.  Prune these latter expressions by deleting expressions
4959	 already in the equivalence class.
4960
4961	 Check for an equivalent identical to the destination.  If found,
4962	 this is the preferred equivalent since it will likely lead to
4963	 elimination of the insn.  Indicate this by placing it in
4964	 `src_related'.  */
4965
4966      if (elt)
4967	elt = elt->first_same_value;
4968      for (p = elt; p; p = p->next_same_value)
4969	{
4970	  enum rtx_code code = GET_CODE (p->exp);
4971
4972	  /* If the expression is not valid, ignore it.  Then we do not
4973	     have to check for validity below.  In most cases, we can use
4974	     `rtx_equal_p', since canonicalization has already been done.  */
4975	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
4976	    continue;
4977
4978	  /* Also skip paradoxical subregs, unless that's what we're
4979	     looking for.  */
4980	  if (paradoxical_subreg_p (p->exp)
4981	      && ! (src != 0
4982		    && GET_CODE (src) == SUBREG
4983		    && GET_MODE (src) == GET_MODE (p->exp)
4984		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4985			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
4986	    continue;
4987
4988	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
4989	    src = 0;
4990	  else if (src_folded && GET_CODE (src_folded) == code
4991		   && rtx_equal_p (src_folded, p->exp))
4992	    src_folded = 0;
4993	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
4994		   && rtx_equal_p (src_eqv_here, p->exp))
4995	    src_eqv_here = 0;
4996	  else if (src_related && GET_CODE (src_related) == code
4997		   && rtx_equal_p (src_related, p->exp))
4998	    src_related = 0;
4999
5000	  /* This is the same as the destination of the insns, we want
5001	     to prefer it.  Copy it to src_related.  The code below will
5002	     then give it a negative cost.  */
5003	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5004	    src_related = dest;
5005	}
5006
5007      /* Find the cheapest valid equivalent, trying all the available
5008         possibilities.  Prefer items not in the hash table to ones
5009         that are when they are equal cost.  Note that we can never
5010         worsen an insn as the current contents will also succeed.
5011	 If we find an equivalent identical to the destination, use it as best,
5012	 since this insn will probably be eliminated in that case.  */
5013      if (src)
5014	{
5015	  if (rtx_equal_p (src, dest))
5016	    src_cost = src_regcost = -1;
5017	  else
5018	    {
5019	      src_cost = COST (src);
5020	      src_regcost = approx_reg_cost (src);
5021	    }
5022	}
5023
5024      if (src_eqv_here)
5025	{
5026	  if (rtx_equal_p (src_eqv_here, dest))
5027	    src_eqv_cost = src_eqv_regcost = -1;
5028	  else
5029	    {
5030	      src_eqv_cost = COST (src_eqv_here);
5031	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5032	    }
5033	}
5034
5035      if (src_folded)
5036	{
5037	  if (rtx_equal_p (src_folded, dest))
5038	    src_folded_cost = src_folded_regcost = -1;
5039	  else
5040	    {
5041	      src_folded_cost = COST (src_folded);
5042	      src_folded_regcost = approx_reg_cost (src_folded);
5043	    }
5044	}
5045
5046      if (src_related)
5047	{
5048	  if (rtx_equal_p (src_related, dest))
5049	    src_related_cost = src_related_regcost = -1;
5050	  else
5051	    {
5052	      src_related_cost = COST (src_related);
5053	      src_related_regcost = approx_reg_cost (src_related);
5054
5055	      /* If a const-anchor is used to synthesize a constant that
5056		 normally requires multiple instructions then slightly prefer
5057		 it over the original sequence.  These instructions are likely
5058		 to become redundant now.  We can't compare against the cost
5059		 of src_eqv_here because, on MIPS for example, multi-insn
5060		 constants have zero cost; they are assumed to be hoisted from
5061		 loops.  */
5062	      if (src_related_is_const_anchor
5063		  && src_related_cost == src_cost
5064		  && src_eqv_here)
5065		src_related_cost--;
5066	    }
5067	}
5068
5069      /* If this was an indirect jump insn, a known label will really be
5070	 cheaper even though it looks more expensive.  */
5071      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5072	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5073
5074      /* Terminate loop when replacement made.  This must terminate since
5075         the current contents will be tested and will always be valid.  */
5076      while (1)
5077	{
5078	  rtx trial;
5079
5080	  /* Skip invalid entries.  */
5081	  while (elt && !REG_P (elt->exp)
5082		 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5083	    elt = elt->next_same_value;
5084
5085	  /* A paradoxical subreg would be bad here: it'll be the right
5086	     size, but later may be adjusted so that the upper bits aren't
5087	     what we want.  So reject it.  */
5088	  if (elt != 0
5089	      && paradoxical_subreg_p (elt->exp)
5090	      /* It is okay, though, if the rtx we're trying to match
5091		 will ignore any of the bits we can't predict.  */
5092	      && ! (src != 0
5093		    && GET_CODE (src) == SUBREG
5094		    && GET_MODE (src) == GET_MODE (elt->exp)
5095		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5096			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5097	    {
5098	      elt = elt->next_same_value;
5099	      continue;
5100	    }
5101
5102	  if (elt)
5103	    {
5104	      src_elt_cost = elt->cost;
5105	      src_elt_regcost = elt->regcost;
5106	    }
5107
5108	  /* Find cheapest and skip it for the next time.   For items
5109	     of equal cost, use this order:
5110	     src_folded, src, src_eqv, src_related and hash table entry.  */
5111	  if (src_folded
5112	      && preferable (src_folded_cost, src_folded_regcost,
5113			     src_cost, src_regcost) <= 0
5114	      && preferable (src_folded_cost, src_folded_regcost,
5115			     src_eqv_cost, src_eqv_regcost) <= 0
5116	      && preferable (src_folded_cost, src_folded_regcost,
5117			     src_related_cost, src_related_regcost) <= 0
5118	      && preferable (src_folded_cost, src_folded_regcost,
5119			     src_elt_cost, src_elt_regcost) <= 0)
5120	    {
5121	      trial = src_folded, src_folded_cost = MAX_COST;
5122	      if (src_folded_force_flag)
5123		{
5124		  rtx forced = force_const_mem (mode, trial);
5125		  if (forced)
5126		    trial = forced;
5127		}
5128	    }
5129	  else if (src
5130		   && preferable (src_cost, src_regcost,
5131				  src_eqv_cost, src_eqv_regcost) <= 0
5132		   && preferable (src_cost, src_regcost,
5133				  src_related_cost, src_related_regcost) <= 0
5134		   && preferable (src_cost, src_regcost,
5135				  src_elt_cost, src_elt_regcost) <= 0)
5136	    trial = src, src_cost = MAX_COST;
5137	  else if (src_eqv_here
5138		   && preferable (src_eqv_cost, src_eqv_regcost,
5139				  src_related_cost, src_related_regcost) <= 0
5140		   && preferable (src_eqv_cost, src_eqv_regcost,
5141				  src_elt_cost, src_elt_regcost) <= 0)
5142	    trial = src_eqv_here, src_eqv_cost = MAX_COST;
5143	  else if (src_related
5144		   && preferable (src_related_cost, src_related_regcost,
5145				  src_elt_cost, src_elt_regcost) <= 0)
5146	    trial = src_related, src_related_cost = MAX_COST;
5147	  else
5148	    {
5149	      trial = elt->exp;
5150	      elt = elt->next_same_value;
5151	      src_elt_cost = MAX_COST;
5152	    }
5153
5154	  /* Avoid creation of overlapping memory moves.  */
5155	  if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl)))
5156	    {
5157	      rtx src, dest;
5158
5159	      /* BLKmode moves are not handled by cse anyway.  */
5160	      if (GET_MODE (trial) == BLKmode)
5161		break;
5162
5163	      src = canon_rtx (trial);
5164	      dest = canon_rtx (SET_DEST (sets[i].rtl));
5165
5166	      if (!MEM_P (src) || !MEM_P (dest)
5167		  || !nonoverlapping_memrefs_p (src, dest, false))
5168		break;
5169	    }
5170
5171	  /* Try to optimize
5172	     (set (reg:M N) (const_int A))
5173	     (set (reg:M2 O) (const_int B))
5174	     (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5175		  (reg:M2 O)).  */
5176	  if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5177	      && CONST_INT_P (trial)
5178	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5179	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5180	      && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5181	      && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl)))
5182		  >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
5183	      && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5184		  + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5185		  <= HOST_BITS_PER_WIDE_INT))
5186	    {
5187	      rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5188	      rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5189	      rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5190	      unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5191	      struct table_elt *dest_elt
5192		= lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5193	      rtx dest_cst = NULL;
5194
5195	      if (dest_elt)
5196		for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5197		  if (p->is_const && CONST_INT_P (p->exp))
5198		    {
5199		      dest_cst = p->exp;
5200		      break;
5201		    }
5202	      if (dest_cst)
5203		{
5204		  HOST_WIDE_INT val = INTVAL (dest_cst);
5205		  HOST_WIDE_INT mask;
5206		  unsigned int shift;
5207		  if (BITS_BIG_ENDIAN)
5208		    shift = GET_MODE_PRECISION (GET_MODE (dest_reg))
5209			    - INTVAL (pos) - INTVAL (width);
5210		  else
5211		    shift = INTVAL (pos);
5212		  if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5213		    mask = ~(HOST_WIDE_INT) 0;
5214		  else
5215		    mask = ((HOST_WIDE_INT) 1 << INTVAL (width)) - 1;
5216		  val &= ~(mask << shift);
5217		  val |= (INTVAL (trial) & mask) << shift;
5218		  val = trunc_int_for_mode (val, GET_MODE (dest_reg));
5219		  validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5220					   dest_reg, 1);
5221		  validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5222					   GEN_INT (val), 1);
5223		  if (apply_change_group ())
5224		    {
5225		      rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5226		      if (note)
5227			{
5228			  remove_note (insn, note);
5229			  df_notes_rescan (insn);
5230			}
5231		      src_eqv = NULL_RTX;
5232		      src_eqv_elt = NULL;
5233		      src_eqv_volatile = 0;
5234		      src_eqv_in_memory = 0;
5235		      src_eqv_hash = 0;
5236		      repeat = true;
5237		      break;
5238		    }
5239		}
5240	    }
5241
5242	  /* We don't normally have an insn matching (set (pc) (pc)), so
5243	     check for this separately here.  We will delete such an
5244	     insn below.
5245
5246	     For other cases such as a table jump or conditional jump
5247	     where we know the ultimate target, go ahead and replace the
5248	     operand.  While that may not make a valid insn, we will
5249	     reemit the jump below (and also insert any necessary
5250	     barriers).  */
5251	  if (n_sets == 1 && dest == pc_rtx
5252	      && (trial == pc_rtx
5253		  || (GET_CODE (trial) == LABEL_REF
5254		      && ! condjump_p (insn))))
5255	    {
5256	      /* Don't substitute non-local labels, this confuses CFG.  */
5257	      if (GET_CODE (trial) == LABEL_REF
5258		  && LABEL_REF_NONLOCAL_P (trial))
5259		continue;
5260
5261	      SET_SRC (sets[i].rtl) = trial;
5262	      cse_jumps_altered = true;
5263	      break;
5264	    }
5265
5266	  /* Reject certain invalid forms of CONST that we create.  */
5267	  else if (CONSTANT_P (trial)
5268		   && GET_CODE (trial) == CONST
5269		   /* Reject cases that will cause decode_rtx_const to
5270		      die.  On the alpha when simplifying a switch, we
5271		      get (const (truncate (minus (label_ref)
5272		      (label_ref)))).  */
5273		   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5274		       /* Likewise on IA-64, except without the
5275			  truncate.  */
5276		       || (GET_CODE (XEXP (trial, 0)) == MINUS
5277			   && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5278			   && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5279	    /* Do nothing for this case.  */
5280	    ;
5281
5282	  /* Look for a substitution that makes a valid insn.  */
5283	  else if (validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5284					    trial, 0))
5285	    {
5286	      rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5287
5288	      /* The result of apply_change_group can be ignored; see
5289		 canon_reg.  */
5290
5291	      validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5292	      apply_change_group ();
5293
5294	      break;
5295	    }
5296
5297	  /* If we previously found constant pool entries for
5298	     constants and this is a constant, try making a
5299	     pool entry.  Put it in src_folded unless we already have done
5300	     this since that is where it likely came from.  */
5301
5302	  else if (constant_pool_entries_cost
5303		   && CONSTANT_P (trial)
5304		   && (src_folded == 0
5305		       || (!MEM_P (src_folded)
5306			   && ! src_folded_force_flag))
5307		   && GET_MODE_CLASS (mode) != MODE_CC
5308		   && mode != VOIDmode)
5309	    {
5310	      src_folded_force_flag = 1;
5311	      src_folded = trial;
5312	      src_folded_cost = constant_pool_entries_cost;
5313	      src_folded_regcost = constant_pool_entries_regcost;
5314	    }
5315	}
5316
5317      /* If we changed the insn too much, handle this set from scratch.  */
5318      if (repeat)
5319	{
5320	  i--;
5321	  continue;
5322	}
5323
5324      src = SET_SRC (sets[i].rtl);
5325
5326      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5327	 However, there is an important exception:  If both are registers
5328	 that are not the head of their equivalence class, replace SET_SRC
5329	 with the head of the class.  If we do not do this, we will have
5330	 both registers live over a portion of the basic block.  This way,
5331	 their lifetimes will likely abut instead of overlapping.  */
5332      if (REG_P (dest)
5333	  && REGNO_QTY_VALID_P (REGNO (dest)))
5334	{
5335	  int dest_q = REG_QTY (REGNO (dest));
5336	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5337
5338	  if (dest_ent->mode == GET_MODE (dest)
5339	      && dest_ent->first_reg != REGNO (dest)
5340	      && REG_P (src) && REGNO (src) == REGNO (dest)
5341	      /* Don't do this if the original insn had a hard reg as
5342		 SET_SRC or SET_DEST.  */
5343	      && (!REG_P (sets[i].src)
5344		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5345	      && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5346	    /* We can't call canon_reg here because it won't do anything if
5347	       SRC is a hard register.  */
5348	    {
5349	      int src_q = REG_QTY (REGNO (src));
5350	      struct qty_table_elem *src_ent = &qty_table[src_q];
5351	      int first = src_ent->first_reg;
5352	      rtx new_src
5353		= (first >= FIRST_PSEUDO_REGISTER
5354		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5355
5356	      /* We must use validate-change even for this, because this
5357		 might be a special no-op instruction, suitable only to
5358		 tag notes onto.  */
5359	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5360		{
5361		  src = new_src;
5362		  /* If we had a constant that is cheaper than what we are now
5363		     setting SRC to, use that constant.  We ignored it when we
5364		     thought we could make this into a no-op.  */
5365		  if (src_const && COST (src_const) < COST (src)
5366		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5367					  src_const, 0))
5368		    src = src_const;
5369		}
5370	    }
5371	}
5372
5373      /* If we made a change, recompute SRC values.  */
5374      if (src != sets[i].src)
5375	{
5376	  do_not_record = 0;
5377	  hash_arg_in_memory = 0;
5378	  sets[i].src = src;
5379	  sets[i].src_hash = HASH (src, mode);
5380	  sets[i].src_volatile = do_not_record;
5381	  sets[i].src_in_memory = hash_arg_in_memory;
5382	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5383	}
5384
5385      /* If this is a single SET, we are setting a register, and we have an
5386	 equivalent constant, we want to add a REG_EQUAL note if the constant
5387	 is different from the source.  We don't want to do it for a constant
5388	 pseudo since verifying that this pseudo hasn't been eliminated is a
5389	 pain; moreover such a note won't help anything.
5390
5391	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5392	 which can be created for a reference to a compile time computable
5393	 entry in a jump table.  */
5394      if (n_sets == 1
5395	  && REG_P (dest)
5396	  && src_const
5397	  && !REG_P (src_const)
5398	  && !(GET_CODE (src_const) == SUBREG
5399	       && REG_P (SUBREG_REG (src_const)))
5400	  && !(GET_CODE (src_const) == CONST
5401	       && GET_CODE (XEXP (src_const, 0)) == MINUS
5402	       && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5403	       && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF)
5404	  && !rtx_equal_p (src, src_const))
5405	{
5406	  /* Make sure that the rtx is not shared.  */
5407	  src_const = copy_rtx (src_const);
5408
5409	  /* Record the actual constant value in a REG_EQUAL note,
5410	     making a new one if one does not already exist.  */
5411	  set_unique_reg_note (insn, REG_EQUAL, src_const);
5412	  df_notes_rescan (insn);
5413	}
5414
5415      /* Now deal with the destination.  */
5416      do_not_record = 0;
5417
5418      /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
5419      while (GET_CODE (dest) == SUBREG
5420	     || GET_CODE (dest) == ZERO_EXTRACT
5421	     || GET_CODE (dest) == STRICT_LOW_PART)
5422	dest = XEXP (dest, 0);
5423
5424      sets[i].inner_dest = dest;
5425
5426      if (MEM_P (dest))
5427	{
5428#ifdef PUSH_ROUNDING
5429	  /* Stack pushes invalidate the stack pointer.  */
5430	  rtx addr = XEXP (dest, 0);
5431	  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5432	      && XEXP (addr, 0) == stack_pointer_rtx)
5433	    invalidate (stack_pointer_rtx, VOIDmode);
5434#endif
5435	  dest = fold_rtx (dest, insn);
5436	}
5437
5438      /* Compute the hash code of the destination now,
5439	 before the effects of this instruction are recorded,
5440	 since the register values used in the address computation
5441	 are those before this instruction.  */
5442      sets[i].dest_hash = HASH (dest, mode);
5443
5444      /* Don't enter a bit-field in the hash table
5445	 because the value in it after the store
5446	 may not equal what was stored, due to truncation.  */
5447
5448      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5449	{
5450	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5451
5452	  if (src_const != 0 && CONST_INT_P (src_const)
5453	      && CONST_INT_P (width)
5454	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5455	      && ! (INTVAL (src_const)
5456		    & (HOST_WIDE_INT_M1U << INTVAL (width))))
5457	    /* Exception: if the value is constant,
5458	       and it won't be truncated, record it.  */
5459	    ;
5460	  else
5461	    {
5462	      /* This is chosen so that the destination will be invalidated
5463		 but no new value will be recorded.
5464		 We must invalidate because sometimes constant
5465		 values can be recorded for bitfields.  */
5466	      sets[i].src_elt = 0;
5467	      sets[i].src_volatile = 1;
5468	      src_eqv = 0;
5469	      src_eqv_elt = 0;
5470	    }
5471	}
5472
5473      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5474	 the insn.  */
5475      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5476	{
5477	  /* One less use of the label this insn used to jump to.  */
5478	  delete_insn_and_edges (insn);
5479	  cse_jumps_altered = true;
5480	  /* No more processing for this set.  */
5481	  sets[i].rtl = 0;
5482	}
5483
5484      /* If this SET is now setting PC to a label, we know it used to
5485	 be a conditional or computed branch.  */
5486      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5487	       && !LABEL_REF_NONLOCAL_P (src))
5488	{
5489	  /* We reemit the jump in as many cases as possible just in
5490	     case the form of an unconditional jump is significantly
5491	     different than a computed jump or conditional jump.
5492
5493	     If this insn has multiple sets, then reemitting the
5494	     jump is nontrivial.  So instead we just force rerecognition
5495	     and hope for the best.  */
5496	  if (n_sets == 1)
5497	    {
5498	      rtx_insn *new_rtx;
5499	      rtx note;
5500
5501	      new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5502	      JUMP_LABEL (new_rtx) = XEXP (src, 0);
5503	      LABEL_NUSES (XEXP (src, 0))++;
5504
5505	      /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
5506	      note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5507	      if (note)
5508		{
5509		  XEXP (note, 1) = NULL_RTX;
5510		  REG_NOTES (new_rtx) = note;
5511		}
5512
5513	      delete_insn_and_edges (insn);
5514	      insn = new_rtx;
5515	    }
5516	  else
5517	    INSN_CODE (insn) = -1;
5518
5519	  /* Do not bother deleting any unreachable code, let jump do it.  */
5520	  cse_jumps_altered = true;
5521	  sets[i].rtl = 0;
5522	}
5523
5524      /* If destination is volatile, invalidate it and then do no further
5525	 processing for this assignment.  */
5526
5527      else if (do_not_record)
5528	{
5529	  invalidate_dest (dest);
5530	  sets[i].rtl = 0;
5531	}
5532
5533      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5534	{
5535	  do_not_record = 0;
5536	  sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5537	  if (do_not_record)
5538	    {
5539	      invalidate_dest (SET_DEST (sets[i].rtl));
5540	      sets[i].rtl = 0;
5541	    }
5542	}
5543
5544#ifdef HAVE_cc0
5545      /* If setting CC0, record what it was set to, or a constant, if it
5546	 is equivalent to a constant.  If it is being set to a floating-point
5547	 value, make a COMPARE with the appropriate constant of 0.  If we
5548	 don't do this, later code can interpret this as a test against
5549	 const0_rtx, which can cause problems if we try to put it into an
5550	 insn as a floating-point operand.  */
5551      if (dest == cc0_rtx)
5552	{
5553	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5554	  this_insn_cc0_mode = mode;
5555	  if (FLOAT_MODE_P (mode))
5556	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5557					     CONST0_RTX (mode));
5558	}
5559#endif
5560    }
5561
5562  /* Now enter all non-volatile source expressions in the hash table
5563     if they are not already present.
5564     Record their equivalence classes in src_elt.
5565     This way we can insert the corresponding destinations into
5566     the same classes even if the actual sources are no longer in them
5567     (having been invalidated).  */
5568
5569  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5570      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5571    {
5572      struct table_elt *elt;
5573      struct table_elt *classp = sets[0].src_elt;
5574      rtx dest = SET_DEST (sets[0].rtl);
5575      machine_mode eqvmode = GET_MODE (dest);
5576
5577      if (GET_CODE (dest) == STRICT_LOW_PART)
5578	{
5579	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5580	  classp = 0;
5581	}
5582      if (insert_regs (src_eqv, classp, 0))
5583	{
5584	  rehash_using_reg (src_eqv);
5585	  src_eqv_hash = HASH (src_eqv, eqvmode);
5586	}
5587      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5588      elt->in_memory = src_eqv_in_memory;
5589      src_eqv_elt = elt;
5590
5591      /* Check to see if src_eqv_elt is the same as a set source which
5592	 does not yet have an elt, and if so set the elt of the set source
5593	 to src_eqv_elt.  */
5594      for (i = 0; i < n_sets; i++)
5595	if (sets[i].rtl && sets[i].src_elt == 0
5596	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5597	  sets[i].src_elt = src_eqv_elt;
5598    }
5599
5600  for (i = 0; i < n_sets; i++)
5601    if (sets[i].rtl && ! sets[i].src_volatile
5602	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5603      {
5604	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5605	  {
5606	    /* REG_EQUAL in setting a STRICT_LOW_PART
5607	       gives an equivalent for the entire destination register,
5608	       not just for the subreg being stored in now.
5609	       This is a more interesting equivalence, so we arrange later
5610	       to treat the entire reg as the destination.  */
5611	    sets[i].src_elt = src_eqv_elt;
5612	    sets[i].src_hash = src_eqv_hash;
5613	  }
5614	else
5615	  {
5616	    /* Insert source and constant equivalent into hash table, if not
5617	       already present.  */
5618	    struct table_elt *classp = src_eqv_elt;
5619	    rtx src = sets[i].src;
5620	    rtx dest = SET_DEST (sets[i].rtl);
5621	    machine_mode mode
5622	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5623
5624	    /* It's possible that we have a source value known to be
5625	       constant but don't have a REG_EQUAL note on the insn.
5626	       Lack of a note will mean src_eqv_elt will be NULL.  This
5627	       can happen where we've generated a SUBREG to access a
5628	       CONST_INT that is already in a register in a wider mode.
5629	       Ensure that the source expression is put in the proper
5630	       constant class.  */
5631	    if (!classp)
5632	      classp = sets[i].src_const_elt;
5633
5634	    if (sets[i].src_elt == 0)
5635	      {
5636		struct table_elt *elt;
5637
5638		/* Note that these insert_regs calls cannot remove
5639		   any of the src_elt's, because they would have failed to
5640		   match if not still valid.  */
5641		if (insert_regs (src, classp, 0))
5642		  {
5643		    rehash_using_reg (src);
5644		    sets[i].src_hash = HASH (src, mode);
5645		  }
5646		elt = insert (src, classp, sets[i].src_hash, mode);
5647		elt->in_memory = sets[i].src_in_memory;
5648		/* If inline asm has any clobbers, ensure we only reuse
5649		   existing inline asms and never try to put the ASM_OPERANDS
5650		   into an insn that isn't inline asm.  */
5651		if (GET_CODE (src) == ASM_OPERANDS
5652		    && GET_CODE (x) == PARALLEL)
5653		  elt->cost = MAX_COST;
5654		sets[i].src_elt = classp = elt;
5655	      }
5656	    if (sets[i].src_const && sets[i].src_const_elt == 0
5657		&& src != sets[i].src_const
5658		&& ! rtx_equal_p (sets[i].src_const, src))
5659	      sets[i].src_elt = insert (sets[i].src_const, classp,
5660					sets[i].src_const_hash, mode);
5661	  }
5662      }
5663    else if (sets[i].src_elt == 0)
5664      /* If we did not insert the source into the hash table (e.g., it was
5665	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5666	 so that the destination goes into that class.  */
5667      sets[i].src_elt = src_eqv_elt;
5668
5669  /* Record destination addresses in the hash table.  This allows us to
5670     check if they are invalidated by other sets.  */
5671  for (i = 0; i < n_sets; i++)
5672    {
5673      if (sets[i].rtl)
5674	{
5675	  rtx x = sets[i].inner_dest;
5676	  struct table_elt *elt;
5677	  machine_mode mode;
5678	  unsigned hash;
5679
5680	  if (MEM_P (x))
5681	    {
5682	      x = XEXP (x, 0);
5683	      mode = GET_MODE (x);
5684	      hash = HASH (x, mode);
5685	      elt = lookup (x, hash, mode);
5686	      if (!elt)
5687		{
5688		  if (insert_regs (x, NULL, 0))
5689		    {
5690		      rtx dest = SET_DEST (sets[i].rtl);
5691
5692		      rehash_using_reg (x);
5693		      hash = HASH (x, mode);
5694		      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5695		    }
5696		  elt = insert (x, NULL, hash, mode);
5697		}
5698
5699	      sets[i].dest_addr_elt = elt;
5700	    }
5701	  else
5702	    sets[i].dest_addr_elt = NULL;
5703	}
5704    }
5705
5706  invalidate_from_clobbers (insn);
5707
5708  /* Some registers are invalidated by subroutine calls.  Memory is
5709     invalidated by non-constant calls.  */
5710
5711  if (CALL_P (insn))
5712    {
5713      if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5714	invalidate_memory ();
5715      invalidate_for_call ();
5716    }
5717
5718  /* Now invalidate everything set by this instruction.
5719     If a SUBREG or other funny destination is being set,
5720     sets[i].rtl is still nonzero, so here we invalidate the reg
5721     a part of which is being set.  */
5722
5723  for (i = 0; i < n_sets; i++)
5724    if (sets[i].rtl)
5725      {
5726	/* We can't use the inner dest, because the mode associated with
5727	   a ZERO_EXTRACT is significant.  */
5728	rtx dest = SET_DEST (sets[i].rtl);
5729
5730	/* Needed for registers to remove the register from its
5731	   previous quantity's chain.
5732	   Needed for memory if this is a nonvarying address, unless
5733	   we have just done an invalidate_memory that covers even those.  */
5734	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5735	  invalidate (dest, VOIDmode);
5736	else if (MEM_P (dest))
5737	  invalidate (dest, VOIDmode);
5738	else if (GET_CODE (dest) == STRICT_LOW_PART
5739		 || GET_CODE (dest) == ZERO_EXTRACT)
5740	  invalidate (XEXP (dest, 0), GET_MODE (dest));
5741      }
5742
5743  /* Don't cse over a call to setjmp; on some machines (eg VAX)
5744     the regs restored by the longjmp come from a later time
5745     than the setjmp.  */
5746  if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5747    {
5748      flush_hash_table ();
5749      goto done;
5750    }
5751
5752  /* Make sure registers mentioned in destinations
5753     are safe for use in an expression to be inserted.
5754     This removes from the hash table
5755     any invalid entry that refers to one of these registers.
5756
5757     We don't care about the return value from mention_regs because
5758     we are going to hash the SET_DEST values unconditionally.  */
5759
5760  for (i = 0; i < n_sets; i++)
5761    {
5762      if (sets[i].rtl)
5763	{
5764	  rtx x = SET_DEST (sets[i].rtl);
5765
5766	  if (!REG_P (x))
5767	    mention_regs (x);
5768	  else
5769	    {
5770	      /* We used to rely on all references to a register becoming
5771		 inaccessible when a register changes to a new quantity,
5772		 since that changes the hash code.  However, that is not
5773		 safe, since after HASH_SIZE new quantities we get a
5774		 hash 'collision' of a register with its own invalid
5775		 entries.  And since SUBREGs have been changed not to
5776		 change their hash code with the hash code of the register,
5777		 it wouldn't work any longer at all.  So we have to check
5778		 for any invalid references lying around now.
5779		 This code is similar to the REG case in mention_regs,
5780		 but it knows that reg_tick has been incremented, and
5781		 it leaves reg_in_table as -1 .  */
5782	      unsigned int regno = REGNO (x);
5783	      unsigned int endregno = END_REGNO (x);
5784	      unsigned int i;
5785
5786	      for (i = regno; i < endregno; i++)
5787		{
5788		  if (REG_IN_TABLE (i) >= 0)
5789		    {
5790		      remove_invalid_refs (i);
5791		      REG_IN_TABLE (i) = -1;
5792		    }
5793		}
5794	    }
5795	}
5796    }
5797
5798  /* We may have just removed some of the src_elt's from the hash table.
5799     So replace each one with the current head of the same class.
5800     Also check if destination addresses have been removed.  */
5801
5802  for (i = 0; i < n_sets; i++)
5803    if (sets[i].rtl)
5804      {
5805	if (sets[i].dest_addr_elt
5806	    && sets[i].dest_addr_elt->first_same_value == 0)
5807	  {
5808	    /* The elt was removed, which means this destination is not
5809	       valid after this instruction.  */
5810	    sets[i].rtl = NULL_RTX;
5811	  }
5812	else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5813	  /* If elt was removed, find current head of same class,
5814	     or 0 if nothing remains of that class.  */
5815	  {
5816	    struct table_elt *elt = sets[i].src_elt;
5817
5818	    while (elt && elt->prev_same_value)
5819	      elt = elt->prev_same_value;
5820
5821	    while (elt && elt->first_same_value == 0)
5822	      elt = elt->next_same_value;
5823	    sets[i].src_elt = elt ? elt->first_same_value : 0;
5824	  }
5825      }
5826
5827  /* Now insert the destinations into their equivalence classes.  */
5828
5829  for (i = 0; i < n_sets; i++)
5830    if (sets[i].rtl)
5831      {
5832	rtx dest = SET_DEST (sets[i].rtl);
5833	struct table_elt *elt;
5834
5835	/* Don't record value if we are not supposed to risk allocating
5836	   floating-point values in registers that might be wider than
5837	   memory.  */
5838	if ((flag_float_store
5839	     && MEM_P (dest)
5840	     && FLOAT_MODE_P (GET_MODE (dest)))
5841	    /* Don't record BLKmode values, because we don't know the
5842	       size of it, and can't be sure that other BLKmode values
5843	       have the same or smaller size.  */
5844	    || GET_MODE (dest) == BLKmode
5845	    /* If we didn't put a REG_EQUAL value or a source into the hash
5846	       table, there is no point is recording DEST.  */
5847	    || sets[i].src_elt == 0
5848	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5849	       or SIGN_EXTEND, don't record DEST since it can cause
5850	       some tracking to be wrong.
5851
5852	       ??? Think about this more later.  */
5853	    || (paradoxical_subreg_p (dest)
5854		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
5855		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5856	  continue;
5857
5858	/* STRICT_LOW_PART isn't part of the value BEING set,
5859	   and neither is the SUBREG inside it.
5860	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
5861	if (GET_CODE (dest) == STRICT_LOW_PART)
5862	  dest = SUBREG_REG (XEXP (dest, 0));
5863
5864	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5865	  /* Registers must also be inserted into chains for quantities.  */
5866	  if (insert_regs (dest, sets[i].src_elt, 1))
5867	    {
5868	      /* If `insert_regs' changes something, the hash code must be
5869		 recalculated.  */
5870	      rehash_using_reg (dest);
5871	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5872	    }
5873
5874	elt = insert (dest, sets[i].src_elt,
5875		      sets[i].dest_hash, GET_MODE (dest));
5876
5877	/* If this is a constant, insert the constant anchors with the
5878	   equivalent register-offset expressions using register DEST.  */
5879	if (targetm.const_anchor
5880	    && REG_P (dest)
5881	    && SCALAR_INT_MODE_P (GET_MODE (dest))
5882	    && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
5883	  insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
5884
5885	elt->in_memory = (MEM_P (sets[i].inner_dest)
5886			  && !MEM_READONLY_P (sets[i].inner_dest));
5887
5888	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5889	   narrower than M2, and both M1 and M2 are the same number of words,
5890	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5891	   make that equivalence as well.
5892
5893	   However, BAR may have equivalences for which gen_lowpart
5894	   will produce a simpler value than gen_lowpart applied to
5895	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5896	   BAR's equivalences.  If we don't get a simplified form, make
5897	   the SUBREG.  It will not be used in an equivalence, but will
5898	   cause two similar assignments to be detected.
5899
5900	   Note the loop below will find SUBREG_REG (DEST) since we have
5901	   already entered SRC and DEST of the SET in the table.  */
5902
5903	if (GET_CODE (dest) == SUBREG
5904	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5905		 / UNITS_PER_WORD)
5906		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5907	    && (GET_MODE_SIZE (GET_MODE (dest))
5908		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5909	    && sets[i].src_elt != 0)
5910	  {
5911	    machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5912	    struct table_elt *elt, *classp = 0;
5913
5914	    for (elt = sets[i].src_elt->first_same_value; elt;
5915		 elt = elt->next_same_value)
5916	      {
5917		rtx new_src = 0;
5918		unsigned src_hash;
5919		struct table_elt *src_elt;
5920		int byte = 0;
5921
5922		/* Ignore invalid entries.  */
5923		if (!REG_P (elt->exp)
5924		    && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5925		  continue;
5926
5927		/* We may have already been playing subreg games.  If the
5928		   mode is already correct for the destination, use it.  */
5929		if (GET_MODE (elt->exp) == new_mode)
5930		  new_src = elt->exp;
5931		else
5932		  {
5933		    /* Calculate big endian correction for the SUBREG_BYTE.
5934		       We have already checked that M1 (GET_MODE (dest))
5935		       is not narrower than M2 (new_mode).  */
5936		    if (BYTES_BIG_ENDIAN)
5937		      byte = (GET_MODE_SIZE (GET_MODE (dest))
5938			      - GET_MODE_SIZE (new_mode));
5939
5940		    new_src = simplify_gen_subreg (new_mode, elt->exp,
5941					           GET_MODE (dest), byte);
5942		  }
5943
5944		/* The call to simplify_gen_subreg fails if the value
5945		   is VOIDmode, yet we can't do any simplification, e.g.
5946		   for EXPR_LISTs denoting function call results.
5947		   It is invalid to construct a SUBREG with a VOIDmode
5948		   SUBREG_REG, hence a zero new_src means we can't do
5949		   this substitution.  */
5950		if (! new_src)
5951		  continue;
5952
5953		src_hash = HASH (new_src, new_mode);
5954		src_elt = lookup (new_src, src_hash, new_mode);
5955
5956		/* Put the new source in the hash table is if isn't
5957		   already.  */
5958		if (src_elt == 0)
5959		  {
5960		    if (insert_regs (new_src, classp, 0))
5961		      {
5962			rehash_using_reg (new_src);
5963			src_hash = HASH (new_src, new_mode);
5964		      }
5965		    src_elt = insert (new_src, classp, src_hash, new_mode);
5966		    src_elt->in_memory = elt->in_memory;
5967		    if (GET_CODE (new_src) == ASM_OPERANDS
5968			&& elt->cost == MAX_COST)
5969		      src_elt->cost = MAX_COST;
5970		  }
5971		else if (classp && classp != src_elt->first_same_value)
5972		  /* Show that two things that we've seen before are
5973		     actually the same.  */
5974		  merge_equiv_classes (src_elt, classp);
5975
5976		classp = src_elt->first_same_value;
5977		/* Ignore invalid entries.  */
5978		while (classp
5979		       && !REG_P (classp->exp)
5980		       && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
5981		  classp = classp->next_same_value;
5982	      }
5983	  }
5984      }
5985
5986  /* Special handling for (set REG0 REG1) where REG0 is the
5987     "cheapest", cheaper than REG1.  After cse, REG1 will probably not
5988     be used in the sequel, so (if easily done) change this insn to
5989     (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5990     that computed their value.  Then REG1 will become a dead store
5991     and won't cloud the situation for later optimizations.
5992
5993     Do not make this change if REG1 is a hard register, because it will
5994     then be used in the sequel and we may be changing a two-operand insn
5995     into a three-operand insn.
5996
5997     Also do not do this if we are operating on a copy of INSN.  */
5998
5999  if (n_sets == 1 && sets[0].rtl)
6000    try_back_substitute_reg (sets[0].rtl, insn);
6001
6002done:;
6003}
6004
6005/* Remove from the hash table all expressions that reference memory.  */
6006
6007static void
6008invalidate_memory (void)
6009{
6010  int i;
6011  struct table_elt *p, *next;
6012
6013  for (i = 0; i < HASH_SIZE; i++)
6014    for (p = table[i]; p; p = next)
6015      {
6016	next = p->next_same_hash;
6017	if (p->in_memory)
6018	  remove_from_table (p, i);
6019      }
6020}
6021
6022/* Perform invalidation on the basis of everything about INSN,
6023   except for invalidating the actual places that are SET in it.
6024   This includes the places CLOBBERed, and anything that might
6025   alias with something that is SET or CLOBBERed.  */
6026
6027static void
6028invalidate_from_clobbers (rtx_insn *insn)
6029{
6030  rtx x = PATTERN (insn);
6031
6032  if (GET_CODE (x) == CLOBBER)
6033    {
6034      rtx ref = XEXP (x, 0);
6035      if (ref)
6036	{
6037	  if (REG_P (ref) || GET_CODE (ref) == SUBREG
6038	      || MEM_P (ref))
6039	    invalidate (ref, VOIDmode);
6040	  else if (GET_CODE (ref) == STRICT_LOW_PART
6041		   || GET_CODE (ref) == ZERO_EXTRACT)
6042	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6043	}
6044    }
6045  else if (GET_CODE (x) == PARALLEL)
6046    {
6047      int i;
6048      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6049	{
6050	  rtx y = XVECEXP (x, 0, i);
6051	  if (GET_CODE (y) == CLOBBER)
6052	    {
6053	      rtx ref = XEXP (y, 0);
6054	      if (REG_P (ref) || GET_CODE (ref) == SUBREG
6055		  || MEM_P (ref))
6056		invalidate (ref, VOIDmode);
6057	      else if (GET_CODE (ref) == STRICT_LOW_PART
6058		       || GET_CODE (ref) == ZERO_EXTRACT)
6059		invalidate (XEXP (ref, 0), GET_MODE (ref));
6060	    }
6061	}
6062    }
6063}
6064
6065/* Perform invalidation on the basis of everything about INSN.
6066   This includes the places CLOBBERed, and anything that might
6067   alias with something that is SET or CLOBBERed.  */
6068
6069static void
6070invalidate_from_sets_and_clobbers (rtx_insn *insn)
6071{
6072  rtx tem;
6073  rtx x = PATTERN (insn);
6074
6075  if (CALL_P (insn))
6076    {
6077      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6078	if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6079	  invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6080    }
6081
6082  /* Ensure we invalidate the destination register of a CALL insn.
6083     This is necessary for machines where this register is a fixed_reg,
6084     because no other code would invalidate it.  */
6085  if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
6086    invalidate (SET_DEST (x), VOIDmode);
6087
6088  else if (GET_CODE (x) == PARALLEL)
6089    {
6090      int i;
6091
6092      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6093	{
6094	  rtx y = XVECEXP (x, 0, i);
6095	  if (GET_CODE (y) == CLOBBER)
6096	    {
6097	      rtx clobbered = XEXP (y, 0);
6098
6099	      if (REG_P (clobbered)
6100		  || GET_CODE (clobbered) == SUBREG)
6101		invalidate (clobbered, VOIDmode);
6102	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
6103		       || GET_CODE (clobbered) == ZERO_EXTRACT)
6104		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6105	    }
6106	  else if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
6107	    invalidate (SET_DEST (y), VOIDmode);
6108	}
6109    }
6110}
6111
6112/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6113   and replace any registers in them with either an equivalent constant
6114   or the canonical form of the register.  If we are inside an address,
6115   only do this if the address remains valid.
6116
6117   OBJECT is 0 except when within a MEM in which case it is the MEM.
6118
6119   Return the replacement for X.  */
6120
6121static rtx
6122cse_process_notes_1 (rtx x, rtx object, bool *changed)
6123{
6124  enum rtx_code code = GET_CODE (x);
6125  const char *fmt = GET_RTX_FORMAT (code);
6126  int i;
6127
6128  switch (code)
6129    {
6130    case CONST:
6131    case SYMBOL_REF:
6132    case LABEL_REF:
6133    CASE_CONST_ANY:
6134    case PC:
6135    case CC0:
6136    case LO_SUM:
6137      return x;
6138
6139    case MEM:
6140      validate_change (x, &XEXP (x, 0),
6141		       cse_process_notes (XEXP (x, 0), x, changed), 0);
6142      return x;
6143
6144    case EXPR_LIST:
6145      if (REG_NOTE_KIND (x) == REG_EQUAL)
6146	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
6147      /* Fall through.  */
6148
6149    case INSN_LIST:
6150    case INT_LIST:
6151      if (XEXP (x, 1))
6152	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
6153      return x;
6154
6155    case SIGN_EXTEND:
6156    case ZERO_EXTEND:
6157    case SUBREG:
6158      {
6159	rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6160	/* We don't substitute VOIDmode constants into these rtx,
6161	   since they would impede folding.  */
6162	if (GET_MODE (new_rtx) != VOIDmode)
6163	  validate_change (object, &XEXP (x, 0), new_rtx, 0);
6164	return x;
6165      }
6166
6167    case UNSIGNED_FLOAT:
6168      {
6169	rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6170	/* We don't substitute negative VOIDmode constants into these rtx,
6171	   since they would impede folding.  */
6172	if (GET_MODE (new_rtx) != VOIDmode
6173	    || (CONST_INT_P (new_rtx) && INTVAL (new_rtx) >= 0)
6174	    || (CONST_DOUBLE_P (new_rtx) && CONST_DOUBLE_HIGH (new_rtx) >= 0))
6175	  validate_change (object, &XEXP (x, 0), new_rtx, 0);
6176	return x;
6177      }
6178
6179    case REG:
6180      i = REG_QTY (REGNO (x));
6181
6182      /* Return a constant or a constant register.  */
6183      if (REGNO_QTY_VALID_P (REGNO (x)))
6184	{
6185	  struct qty_table_elem *ent = &qty_table[i];
6186
6187	  if (ent->const_rtx != NULL_RTX
6188	      && (CONSTANT_P (ent->const_rtx)
6189		  || REG_P (ent->const_rtx)))
6190	    {
6191	      rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6192	      if (new_rtx)
6193		return copy_rtx (new_rtx);
6194	    }
6195	}
6196
6197      /* Otherwise, canonicalize this register.  */
6198      return canon_reg (x, NULL);
6199
6200    default:
6201      break;
6202    }
6203
6204  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6205    if (fmt[i] == 'e')
6206      validate_change (object, &XEXP (x, i),
6207		       cse_process_notes (XEXP (x, i), object, changed), 0);
6208
6209  return x;
6210}
6211
6212static rtx
6213cse_process_notes (rtx x, rtx object, bool *changed)
6214{
6215  rtx new_rtx = cse_process_notes_1 (x, object, changed);
6216  if (new_rtx != x)
6217    *changed = true;
6218  return new_rtx;
6219}
6220
6221
6222/* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6223
6224   DATA is a pointer to a struct cse_basic_block_data, that is used to
6225   describe the path.
6226   It is filled with a queue of basic blocks, starting with FIRST_BB
6227   and following a trace through the CFG.
6228
6229   If all paths starting at FIRST_BB have been followed, or no new path
6230   starting at FIRST_BB can be constructed, this function returns FALSE.
6231   Otherwise, DATA->path is filled and the function returns TRUE indicating
6232   that a path to follow was found.
6233
6234   If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6235   block in the path will be FIRST_BB.  */
6236
6237static bool
6238cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6239	       int follow_jumps)
6240{
6241  basic_block bb;
6242  edge e;
6243  int path_size;
6244
6245  bitmap_set_bit (cse_visited_basic_blocks, first_bb->index);
6246
6247  /* See if there is a previous path.  */
6248  path_size = data->path_size;
6249
6250  /* There is a previous path.  Make sure it started with FIRST_BB.  */
6251  if (path_size)
6252    gcc_assert (data->path[0].bb == first_bb);
6253
6254  /* There was only one basic block in the last path.  Clear the path and
6255     return, so that paths starting at another basic block can be tried.  */
6256  if (path_size == 1)
6257    {
6258      path_size = 0;
6259      goto done;
6260    }
6261
6262  /* If the path was empty from the beginning, construct a new path.  */
6263  if (path_size == 0)
6264    data->path[path_size++].bb = first_bb;
6265  else
6266    {
6267      /* Otherwise, path_size must be equal to or greater than 2, because
6268	 a previous path exists that is at least two basic blocks long.
6269
6270	 Update the previous branch path, if any.  If the last branch was
6271	 previously along the branch edge, take the fallthrough edge now.  */
6272      while (path_size >= 2)
6273	{
6274	  basic_block last_bb_in_path, previous_bb_in_path;
6275	  edge e;
6276
6277	  --path_size;
6278	  last_bb_in_path = data->path[path_size].bb;
6279	  previous_bb_in_path = data->path[path_size - 1].bb;
6280
6281	  /* If we previously followed a path along the branch edge, try
6282	     the fallthru edge now.  */
6283	  if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6284	      && any_condjump_p (BB_END (previous_bb_in_path))
6285	      && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6286	      && e == BRANCH_EDGE (previous_bb_in_path))
6287	    {
6288	      bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6289	      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
6290		  && single_pred_p (bb)
6291		  /* We used to assert here that we would only see blocks
6292		     that we have not visited yet.  But we may end up
6293		     visiting basic blocks twice if the CFG has changed
6294		     in this run of cse_main, because when the CFG changes
6295		     the topological sort of the CFG also changes.  A basic
6296		     blocks that previously had more than two predecessors
6297		     may now have a single predecessor, and become part of
6298		     a path that starts at another basic block.
6299
6300		     We still want to visit each basic block only once, so
6301		     halt the path here if we have already visited BB.  */
6302		  && !bitmap_bit_p (cse_visited_basic_blocks, bb->index))
6303		{
6304		  bitmap_set_bit (cse_visited_basic_blocks, bb->index);
6305		  data->path[path_size++].bb = bb;
6306		  break;
6307		}
6308	    }
6309
6310	  data->path[path_size].bb = NULL;
6311	}
6312
6313      /* If only one block remains in the path, bail.  */
6314      if (path_size == 1)
6315	{
6316	  path_size = 0;
6317	  goto done;
6318	}
6319    }
6320
6321  /* Extend the path if possible.  */
6322  if (follow_jumps)
6323    {
6324      bb = data->path[path_size - 1].bb;
6325      while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
6326	{
6327	  if (single_succ_p (bb))
6328	    e = single_succ_edge (bb);
6329	  else if (EDGE_COUNT (bb->succs) == 2
6330		   && any_condjump_p (BB_END (bb)))
6331	    {
6332	      /* First try to follow the branch.  If that doesn't lead
6333		 to a useful path, follow the fallthru edge.  */
6334	      e = BRANCH_EDGE (bb);
6335	      if (!single_pred_p (e->dest))
6336		e = FALLTHRU_EDGE (bb);
6337	    }
6338	  else
6339	    e = NULL;
6340
6341	  if (e
6342	      && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
6343	      && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
6344	      && single_pred_p (e->dest)
6345	      /* Avoid visiting basic blocks twice.  The large comment
6346		 above explains why this can happen.  */
6347	      && !bitmap_bit_p (cse_visited_basic_blocks, e->dest->index))
6348	    {
6349	      basic_block bb2 = e->dest;
6350	      bitmap_set_bit (cse_visited_basic_blocks, bb2->index);
6351	      data->path[path_size++].bb = bb2;
6352	      bb = bb2;
6353	    }
6354	  else
6355	    bb = NULL;
6356	}
6357    }
6358
6359done:
6360  data->path_size = path_size;
6361  return path_size != 0;
6362}
6363
6364/* Dump the path in DATA to file F.  NSETS is the number of sets
6365   in the path.  */
6366
6367static void
6368cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6369{
6370  int path_entry;
6371
6372  fprintf (f, ";; Following path with %d sets: ", nsets);
6373  for (path_entry = 0; path_entry < data->path_size; path_entry++)
6374    fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6375  fputc ('\n', dump_file);
6376  fflush (f);
6377}
6378
6379
6380/* Return true if BB has exception handling successor edges.  */
6381
6382static bool
6383have_eh_succ_edges (basic_block bb)
6384{
6385  edge e;
6386  edge_iterator ei;
6387
6388  FOR_EACH_EDGE (e, ei, bb->succs)
6389    if (e->flags & EDGE_EH)
6390      return true;
6391
6392  return false;
6393}
6394
6395
6396/* Scan to the end of the path described by DATA.  Return an estimate of
6397   the total number of SETs of all insns in the path.  */
6398
6399static void
6400cse_prescan_path (struct cse_basic_block_data *data)
6401{
6402  int nsets = 0;
6403  int path_size = data->path_size;
6404  int path_entry;
6405
6406  /* Scan to end of each basic block in the path.  */
6407  for (path_entry = 0; path_entry < path_size; path_entry++)
6408    {
6409      basic_block bb;
6410      rtx_insn *insn;
6411
6412      bb = data->path[path_entry].bb;
6413
6414      FOR_BB_INSNS (bb, insn)
6415	{
6416	  if (!INSN_P (insn))
6417	    continue;
6418
6419	  /* A PARALLEL can have lots of SETs in it,
6420	     especially if it is really an ASM_OPERANDS.  */
6421	  if (GET_CODE (PATTERN (insn)) == PARALLEL)
6422	    nsets += XVECLEN (PATTERN (insn), 0);
6423	  else
6424	    nsets += 1;
6425	}
6426    }
6427
6428  data->nsets = nsets;
6429}
6430
6431/* Return true if the pattern of INSN uses a LABEL_REF for which
6432   there isn't a REG_LABEL_OPERAND note.  */
6433
6434static bool
6435check_for_label_ref (rtx_insn *insn)
6436{
6437  /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6438     note for it, we must rerun jump since it needs to place the note.  If
6439     this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6440     don't do this since no REG_LABEL_OPERAND will be added.  */
6441  subrtx_iterator::array_type array;
6442  FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
6443    {
6444      const_rtx x = *iter;
6445      if (GET_CODE (x) == LABEL_REF
6446	  && !LABEL_REF_NONLOCAL_P (x)
6447	  && (!JUMP_P (insn)
6448	      || !label_is_jump_target_p (LABEL_REF_LABEL (x), insn))
6449	  && LABEL_P (LABEL_REF_LABEL (x))
6450	  && INSN_UID (LABEL_REF_LABEL (x)) != 0
6451	  && !find_reg_note (insn, REG_LABEL_OPERAND, LABEL_REF_LABEL (x)))
6452	return true;
6453    }
6454  return false;
6455}
6456
6457/* Process a single extended basic block described by EBB_DATA.  */
6458
6459static void
6460cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6461{
6462  int path_size = ebb_data->path_size;
6463  int path_entry;
6464  int num_insns = 0;
6465
6466  /* Allocate the space needed by qty_table.  */
6467  qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6468
6469  new_basic_block ();
6470  cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6471  cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6472  for (path_entry = 0; path_entry < path_size; path_entry++)
6473    {
6474      basic_block bb;
6475      rtx_insn *insn;
6476
6477      bb = ebb_data->path[path_entry].bb;
6478
6479      /* Invalidate recorded information for eh regs if there is an EH
6480	 edge pointing to that bb.  */
6481      if (bb_has_eh_pred (bb))
6482	{
6483	  df_ref def;
6484
6485	  FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
6486	    if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6487	      invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6488	}
6489
6490      optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6491      FOR_BB_INSNS (bb, insn)
6492	{
6493	  /* If we have processed 1,000 insns, flush the hash table to
6494	     avoid extreme quadratic behavior.  We must not include NOTEs
6495	     in the count since there may be more of them when generating
6496	     debugging information.  If we clear the table at different
6497	     times, code generated with -g -O might be different than code
6498	     generated with -O but not -g.
6499
6500	     FIXME: This is a real kludge and needs to be done some other
6501		    way.  */
6502	  if (NONDEBUG_INSN_P (insn)
6503	      && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6504	    {
6505	      flush_hash_table ();
6506	      num_insns = 0;
6507	    }
6508
6509	  if (INSN_P (insn))
6510	    {
6511	      /* Process notes first so we have all notes in canonical forms
6512		 when looking for duplicate operations.  */
6513	      if (REG_NOTES (insn))
6514		{
6515		  bool changed = false;
6516		  REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6517						        NULL_RTX, &changed);
6518		  if (changed)
6519		    df_notes_rescan (insn);
6520		}
6521
6522	      cse_insn (insn);
6523
6524	      /* If we haven't already found an insn where we added a LABEL_REF,
6525		 check this one.  */
6526	      if (INSN_P (insn) && !recorded_label_ref
6527		  && check_for_label_ref (insn))
6528		recorded_label_ref = true;
6529
6530#ifdef HAVE_cc0
6531	      if (NONDEBUG_INSN_P (insn))
6532		{
6533		  /* If the previous insn sets CC0 and this insn no
6534		     longer references CC0, delete the previous insn.
6535		     Here we use fact that nothing expects CC0 to be
6536		     valid over an insn, which is true until the final
6537		     pass.  */
6538		  rtx_insn *prev_insn;
6539		  rtx tem;
6540
6541		  prev_insn = prev_nonnote_nondebug_insn (insn);
6542		  if (prev_insn && NONJUMP_INSN_P (prev_insn)
6543		      && (tem = single_set (prev_insn)) != NULL_RTX
6544		      && SET_DEST (tem) == cc0_rtx
6545		      && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6546		    delete_insn (prev_insn);
6547
6548		  /* If this insn is not the last insn in the basic
6549		     block, it will be PREV_INSN(insn) in the next
6550		     iteration.  If we recorded any CC0-related
6551		     information for this insn, remember it.  */
6552		  if (insn != BB_END (bb))
6553		    {
6554		      prev_insn_cc0 = this_insn_cc0;
6555		      prev_insn_cc0_mode = this_insn_cc0_mode;
6556		    }
6557		}
6558#endif
6559	    }
6560	}
6561
6562      /* With non-call exceptions, we are not always able to update
6563	 the CFG properly inside cse_insn.  So clean up possibly
6564	 redundant EH edges here.  */
6565      if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6566	cse_cfg_altered |= purge_dead_edges (bb);
6567
6568      /* If we changed a conditional jump, we may have terminated
6569	 the path we are following.  Check that by verifying that
6570	 the edge we would take still exists.  If the edge does
6571	 not exist anymore, purge the remainder of the path.
6572	 Note that this will cause us to return to the caller.  */
6573      if (path_entry < path_size - 1)
6574	{
6575	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6576	  if (!find_edge (bb, next_bb))
6577	    {
6578	      do
6579		{
6580		  path_size--;
6581
6582		  /* If we truncate the path, we must also reset the
6583		     visited bit on the remaining blocks in the path,
6584		     or we will never visit them at all.  */
6585		  bitmap_clear_bit (cse_visited_basic_blocks,
6586			     ebb_data->path[path_size].bb->index);
6587		  ebb_data->path[path_size].bb = NULL;
6588		}
6589	      while (path_size - 1 != path_entry);
6590	      ebb_data->path_size = path_size;
6591	    }
6592	}
6593
6594      /* If this is a conditional jump insn, record any known
6595	 equivalences due to the condition being tested.  */
6596      insn = BB_END (bb);
6597      if (path_entry < path_size - 1
6598	  && JUMP_P (insn)
6599	  && single_set (insn)
6600	  && any_condjump_p (insn))
6601	{
6602	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6603	  bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6604	  record_jump_equiv (insn, taken);
6605	}
6606
6607#ifdef HAVE_cc0
6608      /* Clear the CC0-tracking related insns, they can't provide
6609	 useful information across basic block boundaries.  */
6610      prev_insn_cc0 = 0;
6611#endif
6612    }
6613
6614  gcc_assert (next_qty <= max_qty);
6615
6616  free (qty_table);
6617}
6618
6619
6620/* Perform cse on the instructions of a function.
6621   F is the first instruction.
6622   NREGS is one plus the highest pseudo-reg number used in the instruction.
6623
6624   Return 2 if jump optimizations should be redone due to simplifications
6625   in conditional jump instructions.
6626   Return 1 if the CFG should be cleaned up because it has been modified.
6627   Return 0 otherwise.  */
6628
6629static int
6630cse_main (rtx_insn *f ATTRIBUTE_UNUSED, int nregs)
6631{
6632  struct cse_basic_block_data ebb_data;
6633  basic_block bb;
6634  int *rc_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6635  int i, n_blocks;
6636
6637  df_set_flags (DF_LR_RUN_DCE);
6638  df_note_add_problem ();
6639  df_analyze ();
6640  df_set_flags (DF_DEFER_INSN_RESCAN);
6641
6642  reg_scan (get_insns (), max_reg_num ());
6643  init_cse_reg_info (nregs);
6644
6645  ebb_data.path = XNEWVEC (struct branch_path,
6646			   PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6647
6648  cse_cfg_altered = false;
6649  cse_jumps_altered = false;
6650  recorded_label_ref = false;
6651  constant_pool_entries_cost = 0;
6652  constant_pool_entries_regcost = 0;
6653  ebb_data.path_size = 0;
6654  ebb_data.nsets = 0;
6655  rtl_hooks = cse_rtl_hooks;
6656
6657  init_recog ();
6658  init_alias_analysis ();
6659
6660  reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6661
6662  /* Set up the table of already visited basic blocks.  */
6663  cse_visited_basic_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6664  bitmap_clear (cse_visited_basic_blocks);
6665
6666  /* Loop over basic blocks in reverse completion order (RPO),
6667     excluding the ENTRY and EXIT blocks.  */
6668  n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6669  i = 0;
6670  while (i < n_blocks)
6671    {
6672      /* Find the first block in the RPO queue that we have not yet
6673	 processed before.  */
6674      do
6675	{
6676	  bb = BASIC_BLOCK_FOR_FN (cfun, rc_order[i++]);
6677	}
6678      while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
6679	     && i < n_blocks);
6680
6681      /* Find all paths starting with BB, and process them.  */
6682      while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6683	{
6684	  /* Pre-scan the path.  */
6685	  cse_prescan_path (&ebb_data);
6686
6687	  /* If this basic block has no sets, skip it.  */
6688	  if (ebb_data.nsets == 0)
6689	    continue;
6690
6691	  /* Get a reasonable estimate for the maximum number of qty's
6692	     needed for this path.  For this, we take the number of sets
6693	     and multiply that by MAX_RECOG_OPERANDS.  */
6694	  max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6695
6696	  /* Dump the path we're about to process.  */
6697	  if (dump_file)
6698	    cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6699
6700	  cse_extended_basic_block (&ebb_data);
6701	}
6702    }
6703
6704  /* Clean up.  */
6705  end_alias_analysis ();
6706  free (reg_eqv_table);
6707  free (ebb_data.path);
6708  sbitmap_free (cse_visited_basic_blocks);
6709  free (rc_order);
6710  rtl_hooks = general_rtl_hooks;
6711
6712  if (cse_jumps_altered || recorded_label_ref)
6713    return 2;
6714  else if (cse_cfg_altered)
6715    return 1;
6716  else
6717    return 0;
6718}
6719
6720/* Count the number of times registers are used (not set) in X.
6721   COUNTS is an array in which we accumulate the count, INCR is how much
6722   we count each register usage.
6723
6724   Don't count a usage of DEST, which is the SET_DEST of a SET which
6725   contains X in its SET_SRC.  This is because such a SET does not
6726   modify the liveness of DEST.
6727   DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6728   We must then count uses of a SET_DEST regardless, because the insn can't be
6729   deleted here.  */
6730
6731static void
6732count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6733{
6734  enum rtx_code code;
6735  rtx note;
6736  const char *fmt;
6737  int i, j;
6738
6739  if (x == 0)
6740    return;
6741
6742  switch (code = GET_CODE (x))
6743    {
6744    case REG:
6745      if (x != dest)
6746	counts[REGNO (x)] += incr;
6747      return;
6748
6749    case PC:
6750    case CC0:
6751    case CONST:
6752    CASE_CONST_ANY:
6753    case SYMBOL_REF:
6754    case LABEL_REF:
6755      return;
6756
6757    case CLOBBER:
6758      /* If we are clobbering a MEM, mark any registers inside the address
6759         as being used.  */
6760      if (MEM_P (XEXP (x, 0)))
6761	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6762      return;
6763
6764    case SET:
6765      /* Unless we are setting a REG, count everything in SET_DEST.  */
6766      if (!REG_P (SET_DEST (x)))
6767	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6768      count_reg_usage (SET_SRC (x), counts,
6769		       dest ? dest : SET_DEST (x),
6770		       incr);
6771      return;
6772
6773    case DEBUG_INSN:
6774      return;
6775
6776    case CALL_INSN:
6777    case INSN:
6778    case JUMP_INSN:
6779      /* We expect dest to be NULL_RTX here.  If the insn may throw,
6780	 or if it cannot be deleted due to side-effects, mark this fact
6781	 by setting DEST to pc_rtx.  */
6782      if ((!cfun->can_delete_dead_exceptions && !insn_nothrow_p (x))
6783	  || side_effects_p (PATTERN (x)))
6784	dest = pc_rtx;
6785      if (code == CALL_INSN)
6786	count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6787      count_reg_usage (PATTERN (x), counts, dest, incr);
6788
6789      /* Things used in a REG_EQUAL note aren't dead since loop may try to
6790	 use them.  */
6791
6792      note = find_reg_equal_equiv_note (x);
6793      if (note)
6794	{
6795	  rtx eqv = XEXP (note, 0);
6796
6797	  if (GET_CODE (eqv) == EXPR_LIST)
6798	  /* This REG_EQUAL note describes the result of a function call.
6799	     Process all the arguments.  */
6800	    do
6801	      {
6802		count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6803		eqv = XEXP (eqv, 1);
6804	      }
6805	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
6806	  else
6807	    count_reg_usage (eqv, counts, dest, incr);
6808	}
6809      return;
6810
6811    case EXPR_LIST:
6812      if (REG_NOTE_KIND (x) == REG_EQUAL
6813	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6814	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6815	     involving registers in the address.  */
6816	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
6817	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6818
6819      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6820      return;
6821
6822    case ASM_OPERANDS:
6823      /* Iterate over just the inputs, not the constraints as well.  */
6824      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6825	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6826      return;
6827
6828    case INSN_LIST:
6829    case INT_LIST:
6830      gcc_unreachable ();
6831
6832    default:
6833      break;
6834    }
6835
6836  fmt = GET_RTX_FORMAT (code);
6837  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6838    {
6839      if (fmt[i] == 'e')
6840	count_reg_usage (XEXP (x, i), counts, dest, incr);
6841      else if (fmt[i] == 'E')
6842	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6843	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6844    }
6845}
6846
6847/* Return true if X is a dead register.  */
6848
6849static inline int
6850is_dead_reg (const_rtx x, int *counts)
6851{
6852  return (REG_P (x)
6853	  && REGNO (x) >= FIRST_PSEUDO_REGISTER
6854	  && counts[REGNO (x)] == 0);
6855}
6856
6857/* Return true if set is live.  */
6858static bool
6859set_live_p (rtx set, rtx_insn *insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
6860	    int *counts)
6861{
6862#ifdef HAVE_cc0
6863  rtx tem;
6864#endif
6865
6866  if (set_noop_p (set))
6867    ;
6868
6869#ifdef HAVE_cc0
6870  else if (GET_CODE (SET_DEST (set)) == CC0
6871	   && !side_effects_p (SET_SRC (set))
6872	   && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
6873	       || !INSN_P (tem)
6874	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6875    return false;
6876#endif
6877  else if (!is_dead_reg (SET_DEST (set), counts)
6878	   || side_effects_p (SET_SRC (set)))
6879    return true;
6880  return false;
6881}
6882
6883/* Return true if insn is live.  */
6884
6885static bool
6886insn_live_p (rtx_insn *insn, int *counts)
6887{
6888  int i;
6889  if (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
6890    return true;
6891  else if (GET_CODE (PATTERN (insn)) == SET)
6892    return set_live_p (PATTERN (insn), insn, counts);
6893  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6894    {
6895      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6896	{
6897	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
6898
6899	  if (GET_CODE (elt) == SET)
6900	    {
6901	      if (set_live_p (elt, insn, counts))
6902		return true;
6903	    }
6904	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6905	    return true;
6906	}
6907      return false;
6908    }
6909  else if (DEBUG_INSN_P (insn))
6910    {
6911      rtx_insn *next;
6912
6913      for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6914	if (NOTE_P (next))
6915	  continue;
6916	else if (!DEBUG_INSN_P (next))
6917	  return true;
6918	else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6919	  return false;
6920
6921      return true;
6922    }
6923  else
6924    return true;
6925}
6926
6927/* Count the number of stores into pseudo.  Callback for note_stores.  */
6928
6929static void
6930count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
6931{
6932  int *counts = (int *) data;
6933  if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
6934    counts[REGNO (x)]++;
6935}
6936
6937/* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
6938   pseudo doesn't have a replacement.  COUNTS[X] is zero if register X
6939   is dead and REPLACEMENTS[X] is null if it has no replacemenet.
6940   Set *SEEN_REPL to true if we see a dead register that does have
6941   a replacement.  */
6942
6943static bool
6944is_dead_debug_insn (const_rtx pat, int *counts, rtx *replacements,
6945		    bool *seen_repl)
6946{
6947  subrtx_iterator::array_type array;
6948  FOR_EACH_SUBRTX (iter, array, pat, NONCONST)
6949    {
6950      const_rtx x = *iter;
6951      if (is_dead_reg (x, counts))
6952	{
6953	  if (replacements && replacements[REGNO (x)] != NULL_RTX)
6954	    *seen_repl = true;
6955	  else
6956	    return true;
6957	}
6958    }
6959  return false;
6960}
6961
6962/* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
6963   Callback for simplify_replace_fn_rtx.  */
6964
6965static rtx
6966replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
6967{
6968  rtx *replacements = (rtx *) data;
6969
6970  if (REG_P (x)
6971      && REGNO (x) >= FIRST_PSEUDO_REGISTER
6972      && replacements[REGNO (x)] != NULL_RTX)
6973    {
6974      if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
6975	return replacements[REGNO (x)];
6976      return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
6977			     GET_MODE (replacements[REGNO (x)]));
6978    }
6979  return NULL_RTX;
6980}
6981
6982/* Scan all the insns and delete any that are dead; i.e., they store a register
6983   that is never used or they copy a register to itself.
6984
6985   This is used to remove insns made obviously dead by cse, loop or other
6986   optimizations.  It improves the heuristics in loop since it won't try to
6987   move dead invariants out of loops or make givs for dead quantities.  The
6988   remaining passes of the compilation are also sped up.  */
6989
6990int
6991delete_trivially_dead_insns (rtx_insn *insns, int nreg)
6992{
6993  int *counts;
6994  rtx_insn *insn, *prev;
6995  rtx *replacements = NULL;
6996  int ndead = 0;
6997
6998  timevar_push (TV_DELETE_TRIVIALLY_DEAD);
6999  /* First count the number of times each register is used.  */
7000  if (MAY_HAVE_DEBUG_INSNS)
7001    {
7002      counts = XCNEWVEC (int, nreg * 3);
7003      for (insn = insns; insn; insn = NEXT_INSN (insn))
7004	if (DEBUG_INSN_P (insn))
7005	  count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7006			   NULL_RTX, 1);
7007	else if (INSN_P (insn))
7008	  {
7009	    count_reg_usage (insn, counts, NULL_RTX, 1);
7010	    note_stores (PATTERN (insn), count_stores, counts + nreg * 2);
7011	  }
7012      /* If there can be debug insns, COUNTS are 3 consecutive arrays.
7013	 First one counts how many times each pseudo is used outside
7014	 of debug insns, second counts how many times each pseudo is
7015	 used in debug insns and third counts how many times a pseudo
7016	 is stored.  */
7017    }
7018  else
7019    {
7020      counts = XCNEWVEC (int, nreg);
7021      for (insn = insns; insn; insn = NEXT_INSN (insn))
7022	if (INSN_P (insn))
7023	  count_reg_usage (insn, counts, NULL_RTX, 1);
7024      /* If no debug insns can be present, COUNTS is just an array
7025	 which counts how many times each pseudo is used.  */
7026    }
7027  /* Pseudo PIC register should be considered as used due to possible
7028     new usages generated.  */
7029  if (!reload_completed
7030      && pic_offset_table_rtx
7031      && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
7032    counts[REGNO (pic_offset_table_rtx)]++;
7033  /* Go from the last insn to the first and delete insns that only set unused
7034     registers or copy a register to itself.  As we delete an insn, remove
7035     usage counts for registers it uses.
7036
7037     The first jump optimization pass may leave a real insn as the last
7038     insn in the function.   We must not skip that insn or we may end
7039     up deleting code that is not really dead.
7040
7041     If some otherwise unused register is only used in DEBUG_INSNs,
7042     try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7043     the setter.  Then go through DEBUG_INSNs and if a DEBUG_EXPR
7044     has been created for the unused register, replace it with
7045     the DEBUG_EXPR, otherwise reset the DEBUG_INSN.  */
7046  for (insn = get_last_insn (); insn; insn = prev)
7047    {
7048      int live_insn = 0;
7049
7050      prev = PREV_INSN (insn);
7051      if (!INSN_P (insn))
7052	continue;
7053
7054      live_insn = insn_live_p (insn, counts);
7055
7056      /* If this is a dead insn, delete it and show registers in it aren't
7057	 being used.  */
7058
7059      if (! live_insn && dbg_cnt (delete_trivial_dead))
7060	{
7061	  if (DEBUG_INSN_P (insn))
7062	    count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7063			     NULL_RTX, -1);
7064	  else
7065	    {
7066	      rtx set;
7067	      if (MAY_HAVE_DEBUG_INSNS
7068		  && (set = single_set (insn)) != NULL_RTX
7069		  && is_dead_reg (SET_DEST (set), counts)
7070		  /* Used at least once in some DEBUG_INSN.  */
7071		  && counts[REGNO (SET_DEST (set)) + nreg] > 0
7072		  /* And set exactly once.  */
7073		  && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
7074		  && !side_effects_p (SET_SRC (set))
7075		  && asm_noperands (PATTERN (insn)) < 0)
7076		{
7077		  rtx dval, bind_var_loc;
7078		  rtx_insn *bind;
7079
7080		  /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
7081		  dval = make_debug_expr_from_rtl (SET_DEST (set));
7082
7083		  /* Emit a debug bind insn before the insn in which
7084		     reg dies.  */
7085		  bind_var_loc =
7086		    gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
7087					  DEBUG_EXPR_TREE_DECL (dval),
7088					  SET_SRC (set),
7089					  VAR_INIT_STATUS_INITIALIZED);
7090		  count_reg_usage (bind_var_loc, counts + nreg, NULL_RTX, 1);
7091
7092		  bind = emit_debug_insn_before (bind_var_loc, insn);
7093		  df_insn_rescan (bind);
7094
7095		  if (replacements == NULL)
7096		    replacements = XCNEWVEC (rtx, nreg);
7097		  replacements[REGNO (SET_DEST (set))] = dval;
7098		}
7099
7100	      count_reg_usage (insn, counts, NULL_RTX, -1);
7101	      ndead++;
7102	    }
7103	  delete_insn_and_edges (insn);
7104	}
7105    }
7106
7107  if (MAY_HAVE_DEBUG_INSNS)
7108    {
7109      for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7110	if (DEBUG_INSN_P (insn))
7111	  {
7112	    /* If this debug insn references a dead register that wasn't replaced
7113	       with an DEBUG_EXPR, reset the DEBUG_INSN.  */
7114	    bool seen_repl = false;
7115	    if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn),
7116				    counts, replacements, &seen_repl))
7117	      {
7118		INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
7119		df_insn_rescan (insn);
7120	      }
7121	    else if (seen_repl)
7122	      {
7123		INSN_VAR_LOCATION_LOC (insn)
7124		  = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
7125					     NULL_RTX, replace_dead_reg,
7126					     replacements);
7127		df_insn_rescan (insn);
7128	      }
7129	  }
7130      free (replacements);
7131    }
7132
7133  if (dump_file && ndead)
7134    fprintf (dump_file, "Deleted %i trivially dead insns\n",
7135	     ndead);
7136  /* Clean up.  */
7137  free (counts);
7138  timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7139  return ndead;
7140}
7141
7142/* If LOC contains references to NEWREG in a different mode, change them
7143   to use NEWREG instead.  */
7144
7145static void
7146cse_change_cc_mode (subrtx_ptr_iterator::array_type &array,
7147		    rtx *loc, rtx insn, rtx newreg)
7148{
7149  FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
7150    {
7151      rtx *loc = *iter;
7152      rtx x = *loc;
7153      if (x
7154	  && REG_P (x)
7155	  && REGNO (x) == REGNO (newreg)
7156	  && GET_MODE (x) != GET_MODE (newreg))
7157	{
7158	  validate_change (insn, loc, newreg, 1);
7159	  iter.skip_subrtxes ();
7160	}
7161    }
7162}
7163
7164/* Change the mode of any reference to the register REGNO (NEWREG) to
7165   GET_MODE (NEWREG) in INSN.  */
7166
7167static void
7168cse_change_cc_mode_insn (rtx_insn *insn, rtx newreg)
7169{
7170  int success;
7171
7172  if (!INSN_P (insn))
7173    return;
7174
7175  subrtx_ptr_iterator::array_type array;
7176  cse_change_cc_mode (array, &PATTERN (insn), insn, newreg);
7177  cse_change_cc_mode (array, &REG_NOTES (insn), insn, newreg);
7178
7179  /* If the following assertion was triggered, there is most probably
7180     something wrong with the cc_modes_compatible back end function.
7181     CC modes only can be considered compatible if the insn - with the mode
7182     replaced by any of the compatible modes - can still be recognized.  */
7183  success = apply_change_group ();
7184  gcc_assert (success);
7185}
7186
7187/* Change the mode of any reference to the register REGNO (NEWREG) to
7188   GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7189   any instruction which modifies NEWREG.  */
7190
7191static void
7192cse_change_cc_mode_insns (rtx_insn *start, rtx_insn *end, rtx newreg)
7193{
7194  rtx_insn *insn;
7195
7196  for (insn = start; insn != end; insn = NEXT_INSN (insn))
7197    {
7198      if (! INSN_P (insn))
7199	continue;
7200
7201      if (reg_set_p (newreg, insn))
7202	return;
7203
7204      cse_change_cc_mode_insn (insn, newreg);
7205    }
7206}
7207
7208/* BB is a basic block which finishes with CC_REG as a condition code
7209   register which is set to CC_SRC.  Look through the successors of BB
7210   to find blocks which have a single predecessor (i.e., this one),
7211   and look through those blocks for an assignment to CC_REG which is
7212   equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7213   permitted to change the mode of CC_SRC to a compatible mode.  This
7214   returns VOIDmode if no equivalent assignments were found.
7215   Otherwise it returns the mode which CC_SRC should wind up with.
7216   ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7217   but is passed unmodified down to recursive calls in order to prevent
7218   endless recursion.
7219
7220   The main complexity in this function is handling the mode issues.
7221   We may have more than one duplicate which we can eliminate, and we
7222   try to find a mode which will work for multiple duplicates.  */
7223
7224static machine_mode
7225cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7226	      bool can_change_mode)
7227{
7228  bool found_equiv;
7229  machine_mode mode;
7230  unsigned int insn_count;
7231  edge e;
7232  rtx_insn *insns[2];
7233  machine_mode modes[2];
7234  rtx_insn *last_insns[2];
7235  unsigned int i;
7236  rtx newreg;
7237  edge_iterator ei;
7238
7239  /* We expect to have two successors.  Look at both before picking
7240     the final mode for the comparison.  If we have more successors
7241     (i.e., some sort of table jump, although that seems unlikely),
7242     then we require all beyond the first two to use the same
7243     mode.  */
7244
7245  found_equiv = false;
7246  mode = GET_MODE (cc_src);
7247  insn_count = 0;
7248  FOR_EACH_EDGE (e, ei, bb->succs)
7249    {
7250      rtx_insn *insn;
7251      rtx_insn *end;
7252
7253      if (e->flags & EDGE_COMPLEX)
7254	continue;
7255
7256      if (EDGE_COUNT (e->dest->preds) != 1
7257	  || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
7258	  /* Avoid endless recursion on unreachable blocks.  */
7259	  || e->dest == orig_bb)
7260	continue;
7261
7262      end = NEXT_INSN (BB_END (e->dest));
7263      for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7264	{
7265	  rtx set;
7266
7267	  if (! INSN_P (insn))
7268	    continue;
7269
7270	  /* If CC_SRC is modified, we have to stop looking for
7271	     something which uses it.  */
7272	  if (modified_in_p (cc_src, insn))
7273	    break;
7274
7275	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7276	  set = single_set (insn);
7277	  if (set
7278	      && REG_P (SET_DEST (set))
7279	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7280	    {
7281	      bool found;
7282	      machine_mode set_mode;
7283	      machine_mode comp_mode;
7284
7285	      found = false;
7286	      set_mode = GET_MODE (SET_SRC (set));
7287	      comp_mode = set_mode;
7288	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7289		found = true;
7290	      else if (GET_CODE (cc_src) == COMPARE
7291		       && GET_CODE (SET_SRC (set)) == COMPARE
7292		       && mode != set_mode
7293		       && rtx_equal_p (XEXP (cc_src, 0),
7294				       XEXP (SET_SRC (set), 0))
7295		       && rtx_equal_p (XEXP (cc_src, 1),
7296				       XEXP (SET_SRC (set), 1)))
7297
7298		{
7299		  comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7300		  if (comp_mode != VOIDmode
7301		      && (can_change_mode || comp_mode == mode))
7302		    found = true;
7303		}
7304
7305	      if (found)
7306		{
7307		  found_equiv = true;
7308		  if (insn_count < ARRAY_SIZE (insns))
7309		    {
7310		      insns[insn_count] = insn;
7311		      modes[insn_count] = set_mode;
7312		      last_insns[insn_count] = end;
7313		      ++insn_count;
7314
7315		      if (mode != comp_mode)
7316			{
7317			  gcc_assert (can_change_mode);
7318			  mode = comp_mode;
7319
7320			  /* The modified insn will be re-recognized later.  */
7321			  PUT_MODE (cc_src, mode);
7322			}
7323		    }
7324		  else
7325		    {
7326		      if (set_mode != mode)
7327			{
7328			  /* We found a matching expression in the
7329			     wrong mode, but we don't have room to
7330			     store it in the array.  Punt.  This case
7331			     should be rare.  */
7332			  break;
7333			}
7334		      /* INSN sets CC_REG to a value equal to CC_SRC
7335			 with the right mode.  We can simply delete
7336			 it.  */
7337		      delete_insn (insn);
7338		    }
7339
7340		  /* We found an instruction to delete.  Keep looking,
7341		     in the hopes of finding a three-way jump.  */
7342		  continue;
7343		}
7344
7345	      /* We found an instruction which sets the condition
7346		 code, so don't look any farther.  */
7347	      break;
7348	    }
7349
7350	  /* If INSN sets CC_REG in some other way, don't look any
7351	     farther.  */
7352	  if (reg_set_p (cc_reg, insn))
7353	    break;
7354	}
7355
7356      /* If we fell off the bottom of the block, we can keep looking
7357	 through successors.  We pass CAN_CHANGE_MODE as false because
7358	 we aren't prepared to handle compatibility between the
7359	 further blocks and this block.  */
7360      if (insn == end)
7361	{
7362	  machine_mode submode;
7363
7364	  submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7365	  if (submode != VOIDmode)
7366	    {
7367	      gcc_assert (submode == mode);
7368	      found_equiv = true;
7369	      can_change_mode = false;
7370	    }
7371	}
7372    }
7373
7374  if (! found_equiv)
7375    return VOIDmode;
7376
7377  /* Now INSN_COUNT is the number of instructions we found which set
7378     CC_REG to a value equivalent to CC_SRC.  The instructions are in
7379     INSNS.  The modes used by those instructions are in MODES.  */
7380
7381  newreg = NULL_RTX;
7382  for (i = 0; i < insn_count; ++i)
7383    {
7384      if (modes[i] != mode)
7385	{
7386	  /* We need to change the mode of CC_REG in INSNS[i] and
7387	     subsequent instructions.  */
7388	  if (! newreg)
7389	    {
7390	      if (GET_MODE (cc_reg) == mode)
7391		newreg = cc_reg;
7392	      else
7393		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7394	    }
7395	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7396				    newreg);
7397	}
7398
7399      delete_insn_and_edges (insns[i]);
7400    }
7401
7402  return mode;
7403}
7404
7405/* If we have a fixed condition code register (or two), walk through
7406   the instructions and try to eliminate duplicate assignments.  */
7407
7408static void
7409cse_condition_code_reg (void)
7410{
7411  unsigned int cc_regno_1;
7412  unsigned int cc_regno_2;
7413  rtx cc_reg_1;
7414  rtx cc_reg_2;
7415  basic_block bb;
7416
7417  if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7418    return;
7419
7420  cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7421  if (cc_regno_2 != INVALID_REGNUM)
7422    cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7423  else
7424    cc_reg_2 = NULL_RTX;
7425
7426  FOR_EACH_BB_FN (bb, cfun)
7427    {
7428      rtx_insn *last_insn;
7429      rtx cc_reg;
7430      rtx_insn *insn;
7431      rtx_insn *cc_src_insn;
7432      rtx cc_src;
7433      machine_mode mode;
7434      machine_mode orig_mode;
7435
7436      /* Look for blocks which end with a conditional jump based on a
7437	 condition code register.  Then look for the instruction which
7438	 sets the condition code register.  Then look through the
7439	 successor blocks for instructions which set the condition
7440	 code register to the same value.  There are other possible
7441	 uses of the condition code register, but these are by far the
7442	 most common and the ones which we are most likely to be able
7443	 to optimize.  */
7444
7445      last_insn = BB_END (bb);
7446      if (!JUMP_P (last_insn))
7447	continue;
7448
7449      if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7450	cc_reg = cc_reg_1;
7451      else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7452	cc_reg = cc_reg_2;
7453      else
7454	continue;
7455
7456      cc_src_insn = NULL;
7457      cc_src = NULL_RTX;
7458      for (insn = PREV_INSN (last_insn);
7459	   insn && insn != PREV_INSN (BB_HEAD (bb));
7460	   insn = PREV_INSN (insn))
7461	{
7462	  rtx set;
7463
7464	  if (! INSN_P (insn))
7465	    continue;
7466	  set = single_set (insn);
7467	  if (set
7468	      && REG_P (SET_DEST (set))
7469	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7470	    {
7471	      cc_src_insn = insn;
7472	      cc_src = SET_SRC (set);
7473	      break;
7474	    }
7475	  else if (reg_set_p (cc_reg, insn))
7476	    break;
7477	}
7478
7479      if (! cc_src_insn)
7480	continue;
7481
7482      if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7483	continue;
7484
7485      /* Now CC_REG is a condition code register used for a
7486	 conditional jump at the end of the block, and CC_SRC, in
7487	 CC_SRC_INSN, is the value to which that condition code
7488	 register is set, and CC_SRC is still meaningful at the end of
7489	 the basic block.  */
7490
7491      orig_mode = GET_MODE (cc_src);
7492      mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7493      if (mode != VOIDmode)
7494	{
7495	  gcc_assert (mode == GET_MODE (cc_src));
7496	  if (mode != orig_mode)
7497	    {
7498	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7499
7500	      cse_change_cc_mode_insn (cc_src_insn, newreg);
7501
7502	      /* Do the same in the following insns that use the
7503		 current value of CC_REG within BB.  */
7504	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7505					NEXT_INSN (last_insn),
7506					newreg);
7507	    }
7508	}
7509    }
7510}
7511
7512
7513/* Perform common subexpression elimination.  Nonzero value from
7514   `cse_main' means that jumps were simplified and some code may now
7515   be unreachable, so do jump optimization again.  */
7516static unsigned int
7517rest_of_handle_cse (void)
7518{
7519  int tem;
7520
7521  if (dump_file)
7522    dump_flow_info (dump_file, dump_flags);
7523
7524  tem = cse_main (get_insns (), max_reg_num ());
7525
7526  /* If we are not running more CSE passes, then we are no longer
7527     expecting CSE to be run.  But always rerun it in a cheap mode.  */
7528  cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7529
7530  if (tem == 2)
7531    {
7532      timevar_push (TV_JUMP);
7533      rebuild_jump_labels (get_insns ());
7534      cleanup_cfg (CLEANUP_CFG_CHANGED);
7535      timevar_pop (TV_JUMP);
7536    }
7537  else if (tem == 1 || optimize > 1)
7538    cleanup_cfg (0);
7539
7540  return 0;
7541}
7542
7543namespace {
7544
7545const pass_data pass_data_cse =
7546{
7547  RTL_PASS, /* type */
7548  "cse1", /* name */
7549  OPTGROUP_NONE, /* optinfo_flags */
7550  TV_CSE, /* tv_id */
7551  0, /* properties_required */
7552  0, /* properties_provided */
7553  0, /* properties_destroyed */
7554  0, /* todo_flags_start */
7555  TODO_df_finish, /* todo_flags_finish */
7556};
7557
7558class pass_cse : public rtl_opt_pass
7559{
7560public:
7561  pass_cse (gcc::context *ctxt)
7562    : rtl_opt_pass (pass_data_cse, ctxt)
7563  {}
7564
7565  /* opt_pass methods: */
7566  virtual bool gate (function *) { return optimize > 0; }
7567  virtual unsigned int execute (function *) { return rest_of_handle_cse (); }
7568
7569}; // class pass_cse
7570
7571} // anon namespace
7572
7573rtl_opt_pass *
7574make_pass_cse (gcc::context *ctxt)
7575{
7576  return new pass_cse (ctxt);
7577}
7578
7579
7580/* Run second CSE pass after loop optimizations.  */
7581static unsigned int
7582rest_of_handle_cse2 (void)
7583{
7584  int tem;
7585
7586  if (dump_file)
7587    dump_flow_info (dump_file, dump_flags);
7588
7589  tem = cse_main (get_insns (), max_reg_num ());
7590
7591  /* Run a pass to eliminate duplicated assignments to condition code
7592     registers.  We have to run this after bypass_jumps, because it
7593     makes it harder for that pass to determine whether a jump can be
7594     bypassed safely.  */
7595  cse_condition_code_reg ();
7596
7597  delete_trivially_dead_insns (get_insns (), max_reg_num ());
7598
7599  if (tem == 2)
7600    {
7601      timevar_push (TV_JUMP);
7602      rebuild_jump_labels (get_insns ());
7603      cleanup_cfg (CLEANUP_CFG_CHANGED);
7604      timevar_pop (TV_JUMP);
7605    }
7606  else if (tem == 1)
7607    cleanup_cfg (0);
7608
7609  cse_not_expected = 1;
7610  return 0;
7611}
7612
7613
7614namespace {
7615
7616const pass_data pass_data_cse2 =
7617{
7618  RTL_PASS, /* type */
7619  "cse2", /* name */
7620  OPTGROUP_NONE, /* optinfo_flags */
7621  TV_CSE2, /* tv_id */
7622  0, /* properties_required */
7623  0, /* properties_provided */
7624  0, /* properties_destroyed */
7625  0, /* todo_flags_start */
7626  TODO_df_finish, /* todo_flags_finish */
7627};
7628
7629class pass_cse2 : public rtl_opt_pass
7630{
7631public:
7632  pass_cse2 (gcc::context *ctxt)
7633    : rtl_opt_pass (pass_data_cse2, ctxt)
7634  {}
7635
7636  /* opt_pass methods: */
7637  virtual bool gate (function *)
7638    {
7639      return optimize > 0 && flag_rerun_cse_after_loop;
7640    }
7641
7642  virtual unsigned int execute (function *) { return rest_of_handle_cse2 (); }
7643
7644}; // class pass_cse2
7645
7646} // anon namespace
7647
7648rtl_opt_pass *
7649make_pass_cse2 (gcc::context *ctxt)
7650{
7651  return new pass_cse2 (ctxt);
7652}
7653
7654/* Run second CSE pass after loop optimizations.  */
7655static unsigned int
7656rest_of_handle_cse_after_global_opts (void)
7657{
7658  int save_cfj;
7659  int tem;
7660
7661  /* We only want to do local CSE, so don't follow jumps.  */
7662  save_cfj = flag_cse_follow_jumps;
7663  flag_cse_follow_jumps = 0;
7664
7665  rebuild_jump_labels (get_insns ());
7666  tem = cse_main (get_insns (), max_reg_num ());
7667  purge_all_dead_edges ();
7668  delete_trivially_dead_insns (get_insns (), max_reg_num ());
7669
7670  cse_not_expected = !flag_rerun_cse_after_loop;
7671
7672  /* If cse altered any jumps, rerun jump opts to clean things up.  */
7673  if (tem == 2)
7674    {
7675      timevar_push (TV_JUMP);
7676      rebuild_jump_labels (get_insns ());
7677      cleanup_cfg (CLEANUP_CFG_CHANGED);
7678      timevar_pop (TV_JUMP);
7679    }
7680  else if (tem == 1)
7681    cleanup_cfg (0);
7682
7683  flag_cse_follow_jumps = save_cfj;
7684  return 0;
7685}
7686
7687namespace {
7688
7689const pass_data pass_data_cse_after_global_opts =
7690{
7691  RTL_PASS, /* type */
7692  "cse_local", /* name */
7693  OPTGROUP_NONE, /* optinfo_flags */
7694  TV_CSE, /* tv_id */
7695  0, /* properties_required */
7696  0, /* properties_provided */
7697  0, /* properties_destroyed */
7698  0, /* todo_flags_start */
7699  TODO_df_finish, /* todo_flags_finish */
7700};
7701
7702class pass_cse_after_global_opts : public rtl_opt_pass
7703{
7704public:
7705  pass_cse_after_global_opts (gcc::context *ctxt)
7706    : rtl_opt_pass (pass_data_cse_after_global_opts, ctxt)
7707  {}
7708
7709  /* opt_pass methods: */
7710  virtual bool gate (function *)
7711    {
7712      return optimize > 0 && flag_rerun_cse_after_global_opts;
7713    }
7714
7715  virtual unsigned int execute (function *)
7716    {
7717      return rest_of_handle_cse_after_global_opts ();
7718    }
7719
7720}; // class pass_cse_after_global_opts
7721
7722} // anon namespace
7723
7724rtl_opt_pass *
7725make_pass_cse_after_global_opts (gcc::context *ctxt)
7726{
7727  return new pass_cse_after_global_opts (ctxt);
7728}
7729