1/* SCC value numbering for trees
2   Copyright (C) 2006-2015 Free Software Foundation, Inc.
3   Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "hash-set.h"
26#include "machmode.h"
27#include "vec.h"
28#include "double-int.h"
29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"
35#include "fold-const.h"
36#include "stor-layout.h"
37#include "predict.h"
38#include "hard-reg-set.h"
39#include "function.h"
40#include "dominance.h"
41#include "cfg.h"
42#include "cfganal.h"
43#include "basic-block.h"
44#include "gimple-pretty-print.h"
45#include "tree-inline.h"
46#include "hash-table.h"
47#include "tree-ssa-alias.h"
48#include "internal-fn.h"
49#include "gimple-fold.h"
50#include "tree-eh.h"
51#include "gimple-expr.h"
52#include "is-a.h"
53#include "gimple.h"
54#include "gimplify.h"
55#include "gimple-ssa.h"
56#include "tree-phinodes.h"
57#include "ssa-iterators.h"
58#include "stringpool.h"
59#include "tree-ssanames.h"
60#include "hashtab.h"
61#include "rtl.h"
62#include "flags.h"
63#include "statistics.h"
64#include "real.h"
65#include "fixed-value.h"
66#include "insn-config.h"
67#include "expmed.h"
68#include "dojump.h"
69#include "explow.h"
70#include "calls.h"
71#include "emit-rtl.h"
72#include "varasm.h"
73#include "stmt.h"
74#include "expr.h"
75#include "tree-dfa.h"
76#include "tree-ssa.h"
77#include "dumpfile.h"
78#include "alloc-pool.h"
79#include "cfgloop.h"
80#include "params.h"
81#include "tree-ssa-propagate.h"
82#include "tree-ssa-sccvn.h"
83#include "tree-cfg.h"
84#include "domwalk.h"
85#include "ipa-ref.h"
86#include "plugin-api.h"
87#include "cgraph.h"
88
89/* This algorithm is based on the SCC algorithm presented by Keith
90   Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
91   (http://citeseer.ist.psu.edu/41805.html).  In
92   straight line code, it is equivalent to a regular hash based value
93   numbering that is performed in reverse postorder.
94
95   For code with cycles, there are two alternatives, both of which
96   require keeping the hashtables separate from the actual list of
97   value numbers for SSA names.
98
99   1. Iterate value numbering in an RPO walk of the blocks, removing
100   all the entries from the hashtable after each iteration (but
101   keeping the SSA name->value number mapping between iterations).
102   Iterate until it does not change.
103
104   2. Perform value numbering as part of an SCC walk on the SSA graph,
105   iterating only the cycles in the SSA graph until they do not change
106   (using a separate, optimistic hashtable for value numbering the SCC
107   operands).
108
109   The second is not just faster in practice (because most SSA graph
110   cycles do not involve all the variables in the graph), it also has
111   some nice properties.
112
113   One of these nice properties is that when we pop an SCC off the
114   stack, we are guaranteed to have processed all the operands coming from
115   *outside of that SCC*, so we do not need to do anything special to
116   ensure they have value numbers.
117
118   Another nice property is that the SCC walk is done as part of a DFS
119   of the SSA graph, which makes it easy to perform combining and
120   simplifying operations at the same time.
121
122   The code below is deliberately written in a way that makes it easy
123   to separate the SCC walk from the other work it does.
124
125   In order to propagate constants through the code, we track which
126   expressions contain constants, and use those while folding.  In
127   theory, we could also track expressions whose value numbers are
128   replaced, in case we end up folding based on expression
129   identities.
130
131   In order to value number memory, we assign value numbers to vuses.
132   This enables us to note that, for example, stores to the same
133   address of the same value from the same starting memory states are
134   equivalent.
135   TODO:
136
137   1. We can iterate only the changing portions of the SCC's, but
138   I have not seen an SCC big enough for this to be a win.
139   2. If you differentiate between phi nodes for loops and phi nodes
140   for if-then-else, you can properly consider phi nodes in different
141   blocks for equivalence.
142   3. We could value number vuses in more cases, particularly, whole
143   structure copies.
144*/
145
146
147/* vn_nary_op hashtable helpers.  */
148
149struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
150{
151  typedef vn_nary_op_s value_type;
152  typedef vn_nary_op_s compare_type;
153  static inline hashval_t hash (const value_type *);
154  static inline bool equal (const value_type *, const compare_type *);
155};
156
157/* Return the computed hashcode for nary operation P1.  */
158
159inline hashval_t
160vn_nary_op_hasher::hash (const value_type *vno1)
161{
162  return vno1->hashcode;
163}
164
165/* Compare nary operations P1 and P2 and return true if they are
166   equivalent.  */
167
168inline bool
169vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
170{
171  return vn_nary_op_eq (vno1, vno2);
172}
173
174typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
175typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
176
177
178/* vn_phi hashtable helpers.  */
179
180static int
181vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
182
183struct vn_phi_hasher
184{
185  typedef vn_phi_s value_type;
186  typedef vn_phi_s compare_type;
187  static inline hashval_t hash (const value_type *);
188  static inline bool equal (const value_type *, const compare_type *);
189  static inline void remove (value_type *);
190};
191
192/* Return the computed hashcode for phi operation P1.  */
193
194inline hashval_t
195vn_phi_hasher::hash (const value_type *vp1)
196{
197  return vp1->hashcode;
198}
199
200/* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
201
202inline bool
203vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
204{
205  return vn_phi_eq (vp1, vp2);
206}
207
208/* Free a phi operation structure VP.  */
209
210inline void
211vn_phi_hasher::remove (value_type *phi)
212{
213  phi->phiargs.release ();
214}
215
216typedef hash_table<vn_phi_hasher> vn_phi_table_type;
217typedef vn_phi_table_type::iterator vn_phi_iterator_type;
218
219
220/* Compare two reference operands P1 and P2 for equality.  Return true if
221   they are equal, and false otherwise.  */
222
223static int
224vn_reference_op_eq (const void *p1, const void *p2)
225{
226  const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
227  const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
228
229  return (vro1->opcode == vro2->opcode
230	  /* We do not care for differences in type qualification.  */
231	  && (vro1->type == vro2->type
232	      || (vro1->type && vro2->type
233		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
234					 TYPE_MAIN_VARIANT (vro2->type))))
235	  && expressions_equal_p (vro1->op0, vro2->op0)
236	  && expressions_equal_p (vro1->op1, vro2->op1)
237	  && expressions_equal_p (vro1->op2, vro2->op2));
238}
239
240/* Free a reference operation structure VP.  */
241
242static inline void
243free_reference (vn_reference_s *vr)
244{
245  vr->operands.release ();
246}
247
248
249/* vn_reference hashtable helpers.  */
250
251struct vn_reference_hasher
252{
253  typedef vn_reference_s value_type;
254  typedef vn_reference_s compare_type;
255  static inline hashval_t hash (const value_type *);
256  static inline bool equal (const value_type *, const compare_type *);
257  static inline void remove (value_type *);
258};
259
260/* Return the hashcode for a given reference operation P1.  */
261
262inline hashval_t
263vn_reference_hasher::hash (const value_type *vr1)
264{
265  return vr1->hashcode;
266}
267
268inline bool
269vn_reference_hasher::equal (const value_type *v, const compare_type *c)
270{
271  return vn_reference_eq (v, c);
272}
273
274inline void
275vn_reference_hasher::remove (value_type *v)
276{
277  free_reference (v);
278}
279
280typedef hash_table<vn_reference_hasher> vn_reference_table_type;
281typedef vn_reference_table_type::iterator vn_reference_iterator_type;
282
283
284/* The set of hashtables and alloc_pool's for their items.  */
285
286typedef struct vn_tables_s
287{
288  vn_nary_op_table_type *nary;
289  vn_phi_table_type *phis;
290  vn_reference_table_type *references;
291  struct obstack nary_obstack;
292  alloc_pool phis_pool;
293  alloc_pool references_pool;
294} *vn_tables_t;
295
296
297/* vn_constant hashtable helpers.  */
298
299struct vn_constant_hasher : typed_free_remove <vn_constant_s>
300{
301  typedef vn_constant_s value_type;
302  typedef vn_constant_s compare_type;
303  static inline hashval_t hash (const value_type *);
304  static inline bool equal (const value_type *, const compare_type *);
305};
306
307/* Hash table hash function for vn_constant_t.  */
308
309inline hashval_t
310vn_constant_hasher::hash (const value_type *vc1)
311{
312  return vc1->hashcode;
313}
314
315/* Hash table equality function for vn_constant_t.  */
316
317inline bool
318vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
319{
320  if (vc1->hashcode != vc2->hashcode)
321    return false;
322
323  return vn_constant_eq_with_type (vc1->constant, vc2->constant);
324}
325
326static hash_table<vn_constant_hasher> *constant_to_value_id;
327static bitmap constant_value_ids;
328
329
330/* Valid hashtables storing information we have proven to be
331   correct.  */
332
333static vn_tables_t valid_info;
334
335/* Optimistic hashtables storing information we are making assumptions about
336   during iterations.  */
337
338static vn_tables_t optimistic_info;
339
340/* Pointer to the set of hashtables that is currently being used.
341   Should always point to either the optimistic_info, or the
342   valid_info.  */
343
344static vn_tables_t current_info;
345
346
347/* Reverse post order index for each basic block.  */
348
349static int *rpo_numbers;
350
351#define SSA_VAL(x) (VN_INFO ((x))->valnum)
352
353/* Return the SSA value of the VUSE x, supporting released VDEFs
354   during elimination which will value-number the VDEF to the
355   associated VUSE (but not substitute in the whole lattice).  */
356
357static inline tree
358vuse_ssa_val (tree x)
359{
360  if (!x)
361    return NULL_TREE;
362
363  do
364    {
365      x = SSA_VAL (x);
366    }
367  while (SSA_NAME_IN_FREE_LIST (x));
368
369  return x;
370}
371
372/* This represents the top of the VN lattice, which is the universal
373   value.  */
374
375tree VN_TOP;
376
377/* Unique counter for our value ids.  */
378
379static unsigned int next_value_id;
380
381/* Next DFS number and the stack for strongly connected component
382   detection. */
383
384static unsigned int next_dfs_num;
385static vec<tree> sccstack;
386
387
388
389/* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
390   are allocated on an obstack for locality reasons, and to free them
391   without looping over the vec.  */
392
393static vec<vn_ssa_aux_t> vn_ssa_aux_table;
394static struct obstack vn_ssa_aux_obstack;
395
396/* Return the value numbering information for a given SSA name.  */
397
398vn_ssa_aux_t
399VN_INFO (tree name)
400{
401  vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
402  gcc_checking_assert (res);
403  return res;
404}
405
406/* Set the value numbering info for a given SSA name to a given
407   value.  */
408
409static inline void
410VN_INFO_SET (tree name, vn_ssa_aux_t value)
411{
412  vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413}
414
415/* Initialize the value numbering info for a given SSA name.
416   This should be called just once for every SSA name.  */
417
418vn_ssa_aux_t
419VN_INFO_GET (tree name)
420{
421  vn_ssa_aux_t newinfo;
422
423  newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424  memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425  if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426    vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
427  vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428  return newinfo;
429}
430
431
432/* Get the representative expression for the SSA_NAME NAME.  Returns
433   the representative SSA_NAME if there is no expression associated with it.  */
434
435tree
436vn_get_expr_for (tree name)
437{
438  vn_ssa_aux_t vn = VN_INFO (name);
439  gimple def_stmt;
440  tree expr = NULL_TREE;
441  enum tree_code code;
442
443  if (vn->valnum == VN_TOP)
444    return name;
445
446  /* If the value-number is a constant it is the representative
447     expression.  */
448  if (TREE_CODE (vn->valnum) != SSA_NAME)
449    return vn->valnum;
450
451  /* Get to the information of the value of this SSA_NAME.  */
452  vn = VN_INFO (vn->valnum);
453
454  /* If the value-number is a constant it is the representative
455     expression.  */
456  if (TREE_CODE (vn->valnum) != SSA_NAME)
457    return vn->valnum;
458
459  /* Else if we have an expression, return it.  */
460  if (vn->expr != NULL_TREE)
461    return vn->expr;
462
463  /* Otherwise use the defining statement to build the expression.  */
464  def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
465
466  /* If the value number is not an assignment use it directly.  */
467  if (!is_gimple_assign (def_stmt))
468    return vn->valnum;
469
470  /* Note that we can valueize here because we clear the cached
471     simplified expressions after each optimistic iteration.  */
472  code = gimple_assign_rhs_code (def_stmt);
473  switch (TREE_CODE_CLASS (code))
474    {
475    case tcc_reference:
476      if ((code == REALPART_EXPR
477	   || code == IMAGPART_EXPR
478	   || code == VIEW_CONVERT_EXPR)
479	  && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
480				      0)) == SSA_NAME)
481	expr = fold_build1 (code,
482			    gimple_expr_type (def_stmt),
483			    vn_valueize (TREE_OPERAND
484					   (gimple_assign_rhs1 (def_stmt), 0)));
485      break;
486
487    case tcc_unary:
488      expr = fold_build1 (code,
489			  gimple_expr_type (def_stmt),
490			  vn_valueize (gimple_assign_rhs1 (def_stmt)));
491      break;
492
493    case tcc_binary:
494      expr = fold_build2 (code,
495			  gimple_expr_type (def_stmt),
496			  vn_valueize (gimple_assign_rhs1 (def_stmt)),
497			  vn_valueize (gimple_assign_rhs2 (def_stmt)));
498      break;
499
500    case tcc_exceptional:
501      if (code == CONSTRUCTOR
502	  && TREE_CODE
503	       (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
504	expr = gimple_assign_rhs1 (def_stmt);
505      break;
506
507    default:;
508    }
509  if (expr == NULL_TREE)
510    return vn->valnum;
511
512  /* Cache the expression.  */
513  vn->expr = expr;
514
515  return expr;
516}
517
518/* Return the vn_kind the expression computed by the stmt should be
519   associated with.  */
520
521enum vn_kind
522vn_get_stmt_kind (gimple stmt)
523{
524  switch (gimple_code (stmt))
525    {
526    case GIMPLE_CALL:
527      return VN_REFERENCE;
528    case GIMPLE_PHI:
529      return VN_PHI;
530    case GIMPLE_ASSIGN:
531      {
532	enum tree_code code = gimple_assign_rhs_code (stmt);
533	tree rhs1 = gimple_assign_rhs1 (stmt);
534	switch (get_gimple_rhs_class (code))
535	  {
536	  case GIMPLE_UNARY_RHS:
537	  case GIMPLE_BINARY_RHS:
538	  case GIMPLE_TERNARY_RHS:
539	    return VN_NARY;
540	  case GIMPLE_SINGLE_RHS:
541	    switch (TREE_CODE_CLASS (code))
542	      {
543	      case tcc_reference:
544		/* VOP-less references can go through unary case.  */
545		if ((code == REALPART_EXPR
546		     || code == IMAGPART_EXPR
547		     || code == VIEW_CONVERT_EXPR
548		     || code == BIT_FIELD_REF)
549		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
550		  return VN_NARY;
551
552		/* Fallthrough.  */
553	      case tcc_declaration:
554		return VN_REFERENCE;
555
556	      case tcc_constant:
557		return VN_CONSTANT;
558
559	      default:
560		if (code == ADDR_EXPR)
561		  return (is_gimple_min_invariant (rhs1)
562			  ? VN_CONSTANT : VN_REFERENCE);
563		else if (code == CONSTRUCTOR)
564		  return VN_NARY;
565		return VN_NONE;
566	      }
567	  default:
568	    return VN_NONE;
569	  }
570      }
571    default:
572      return VN_NONE;
573    }
574}
575
576/* Lookup a value id for CONSTANT and return it.  If it does not
577   exist returns 0.  */
578
579unsigned int
580get_constant_value_id (tree constant)
581{
582  vn_constant_s **slot;
583  struct vn_constant_s vc;
584
585  vc.hashcode = vn_hash_constant_with_type (constant);
586  vc.constant = constant;
587  slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
588  if (slot)
589    return (*slot)->value_id;
590  return 0;
591}
592
593/* Lookup a value id for CONSTANT, and if it does not exist, create a
594   new one and return it.  If it does exist, return it.  */
595
596unsigned int
597get_or_alloc_constant_value_id (tree constant)
598{
599  vn_constant_s **slot;
600  struct vn_constant_s vc;
601  vn_constant_t vcp;
602
603  vc.hashcode = vn_hash_constant_with_type (constant);
604  vc.constant = constant;
605  slot = constant_to_value_id->find_slot (&vc, INSERT);
606  if (*slot)
607    return (*slot)->value_id;
608
609  vcp = XNEW (struct vn_constant_s);
610  vcp->hashcode = vc.hashcode;
611  vcp->constant = constant;
612  vcp->value_id = get_next_value_id ();
613  *slot = vcp;
614  bitmap_set_bit (constant_value_ids, vcp->value_id);
615  return vcp->value_id;
616}
617
618/* Return true if V is a value id for a constant.  */
619
620bool
621value_id_constant_p (unsigned int v)
622{
623  return bitmap_bit_p (constant_value_ids, v);
624}
625
626/* Compute the hash for a reference operand VRO1.  */
627
628static void
629vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
630{
631  hstate.add_int (vro1->opcode);
632  if (vro1->op0)
633    inchash::add_expr (vro1->op0, hstate);
634  if (vro1->op1)
635    inchash::add_expr (vro1->op1, hstate);
636  if (vro1->op2)
637    inchash::add_expr (vro1->op2, hstate);
638}
639
640/* Compute a hash for the reference operation VR1 and return it.  */
641
642static hashval_t
643vn_reference_compute_hash (const vn_reference_t vr1)
644{
645  inchash::hash hstate;
646  hashval_t result;
647  int i;
648  vn_reference_op_t vro;
649  HOST_WIDE_INT off = -1;
650  bool deref = false;
651
652  FOR_EACH_VEC_ELT (vr1->operands, i, vro)
653    {
654      if (vro->opcode == MEM_REF)
655	deref = true;
656      else if (vro->opcode != ADDR_EXPR)
657	deref = false;
658      if (vro->off != -1)
659	{
660	  if (off == -1)
661	    off = 0;
662	  off += vro->off;
663	}
664      else
665	{
666	  if (off != -1
667	      && off != 0)
668	    hstate.add_int (off);
669	  off = -1;
670	  if (deref
671	      && vro->opcode == ADDR_EXPR)
672	    {
673	      if (vro->op0)
674		{
675		  tree op = TREE_OPERAND (vro->op0, 0);
676		  hstate.add_int (TREE_CODE (op));
677		  inchash::add_expr (op, hstate);
678		}
679	    }
680	  else
681	    vn_reference_op_compute_hash (vro, hstate);
682	}
683    }
684  result = hstate.end ();
685  /* ??? We would ICE later if we hash instead of adding that in. */
686  if (vr1->vuse)
687    result += SSA_NAME_VERSION (vr1->vuse);
688
689  return result;
690}
691
692/* Return true if reference operations VR1 and VR2 are equivalent.  This
693   means they have the same set of operands and vuses.  */
694
695bool
696vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
697{
698  unsigned i, j;
699
700  /* Early out if this is not a hash collision.  */
701  if (vr1->hashcode != vr2->hashcode)
702    return false;
703
704  /* The VOP needs to be the same.  */
705  if (vr1->vuse != vr2->vuse)
706    return false;
707
708  /* If the operands are the same we are done.  */
709  if (vr1->operands == vr2->operands)
710    return true;
711
712  if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
713    return false;
714
715  if (INTEGRAL_TYPE_P (vr1->type)
716      && INTEGRAL_TYPE_P (vr2->type))
717    {
718      if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
719	return false;
720    }
721  else if (INTEGRAL_TYPE_P (vr1->type)
722	   && (TYPE_PRECISION (vr1->type)
723	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
724    return false;
725  else if (INTEGRAL_TYPE_P (vr2->type)
726	   && (TYPE_PRECISION (vr2->type)
727	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
728    return false;
729
730  i = 0;
731  j = 0;
732  do
733    {
734      HOST_WIDE_INT off1 = 0, off2 = 0;
735      vn_reference_op_t vro1, vro2;
736      vn_reference_op_s tem1, tem2;
737      bool deref1 = false, deref2 = false;
738      for (; vr1->operands.iterate (i, &vro1); i++)
739	{
740	  if (vro1->opcode == MEM_REF)
741	    deref1 = true;
742	  if (vro1->off == -1)
743	    break;
744	  off1 += vro1->off;
745	}
746      for (; vr2->operands.iterate (j, &vro2); j++)
747	{
748	  if (vro2->opcode == MEM_REF)
749	    deref2 = true;
750	  if (vro2->off == -1)
751	    break;
752	  off2 += vro2->off;
753	}
754      if (off1 != off2)
755	return false;
756      if (deref1 && vro1->opcode == ADDR_EXPR)
757	{
758	  memset (&tem1, 0, sizeof (tem1));
759	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
760	  tem1.type = TREE_TYPE (tem1.op0);
761	  tem1.opcode = TREE_CODE (tem1.op0);
762	  vro1 = &tem1;
763	  deref1 = false;
764	}
765      if (deref2 && vro2->opcode == ADDR_EXPR)
766	{
767	  memset (&tem2, 0, sizeof (tem2));
768	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
769	  tem2.type = TREE_TYPE (tem2.op0);
770	  tem2.opcode = TREE_CODE (tem2.op0);
771	  vro2 = &tem2;
772	  deref2 = false;
773	}
774      if (deref1 != deref2)
775	return false;
776      if (!vn_reference_op_eq (vro1, vro2))
777	return false;
778      ++j;
779      ++i;
780    }
781  while (vr1->operands.length () != i
782	 || vr2->operands.length () != j);
783
784  return true;
785}
786
787/* Copy the operations present in load/store REF into RESULT, a vector of
788   vn_reference_op_s's.  */
789
790static void
791copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
792{
793  if (TREE_CODE (ref) == TARGET_MEM_REF)
794    {
795      vn_reference_op_s temp;
796
797      result->reserve (3);
798
799      memset (&temp, 0, sizeof (temp));
800      temp.type = TREE_TYPE (ref);
801      temp.opcode = TREE_CODE (ref);
802      temp.op0 = TMR_INDEX (ref);
803      temp.op1 = TMR_STEP (ref);
804      temp.op2 = TMR_OFFSET (ref);
805      temp.off = -1;
806      result->quick_push (temp);
807
808      memset (&temp, 0, sizeof (temp));
809      temp.type = NULL_TREE;
810      temp.opcode = ERROR_MARK;
811      temp.op0 = TMR_INDEX2 (ref);
812      temp.off = -1;
813      result->quick_push (temp);
814
815      memset (&temp, 0, sizeof (temp));
816      temp.type = NULL_TREE;
817      temp.opcode = TREE_CODE (TMR_BASE (ref));
818      temp.op0 = TMR_BASE (ref);
819      temp.off = -1;
820      result->quick_push (temp);
821      return;
822    }
823
824  /* For non-calls, store the information that makes up the address.  */
825  tree orig = ref;
826  while (ref)
827    {
828      vn_reference_op_s temp;
829
830      memset (&temp, 0, sizeof (temp));
831      temp.type = TREE_TYPE (ref);
832      temp.opcode = TREE_CODE (ref);
833      temp.off = -1;
834
835      switch (temp.opcode)
836	{
837	case MODIFY_EXPR:
838	  temp.op0 = TREE_OPERAND (ref, 1);
839	  break;
840	case WITH_SIZE_EXPR:
841	  temp.op0 = TREE_OPERAND (ref, 1);
842	  temp.off = 0;
843	  break;
844	case MEM_REF:
845	  /* The base address gets its own vn_reference_op_s structure.  */
846	  temp.op0 = TREE_OPERAND (ref, 1);
847	    {
848	      offset_int off = mem_ref_offset (ref);
849	      if (wi::fits_shwi_p (off))
850		temp.off = off.to_shwi ();
851	    }
852	  break;
853	case BIT_FIELD_REF:
854	  /* Record bits and position.  */
855	  temp.op0 = TREE_OPERAND (ref, 1);
856	  temp.op1 = TREE_OPERAND (ref, 2);
857	  break;
858	case COMPONENT_REF:
859	  /* The field decl is enough to unambiguously specify the field,
860	     a matching type is not necessary and a mismatching type
861	     is always a spurious difference.  */
862	  temp.type = NULL_TREE;
863	  temp.op0 = TREE_OPERAND (ref, 1);
864	  temp.op1 = TREE_OPERAND (ref, 2);
865	  {
866	    tree this_offset = component_ref_field_offset (ref);
867	    if (this_offset
868		&& TREE_CODE (this_offset) == INTEGER_CST)
869	      {
870		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
871		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
872		  {
873		    offset_int off
874		      = (wi::to_offset (this_offset)
875			 + wi::lrshift (wi::to_offset (bit_offset),
876					LOG2_BITS_PER_UNIT));
877		    if (wi::fits_shwi_p (off)
878			/* Probibit value-numbering zero offset components
879			   of addresses the same before the pass folding
880			   __builtin_object_size had a chance to run
881			   (checking cfun->after_inlining does the
882			   trick here).  */
883			&& (TREE_CODE (orig) != ADDR_EXPR
884			    || off != 0
885			    || cfun->after_inlining))
886		      temp.off = off.to_shwi ();
887		  }
888	      }
889	  }
890	  break;
891	case ARRAY_RANGE_REF:
892	case ARRAY_REF:
893	  /* Record index as operand.  */
894	  temp.op0 = TREE_OPERAND (ref, 1);
895	  /* Always record lower bounds and element size.  */
896	  temp.op1 = array_ref_low_bound (ref);
897	  temp.op2 = array_ref_element_size (ref);
898	  if (TREE_CODE (temp.op0) == INTEGER_CST
899	      && TREE_CODE (temp.op1) == INTEGER_CST
900	      && TREE_CODE (temp.op2) == INTEGER_CST)
901	    {
902	      offset_int off = ((wi::to_offset (temp.op0)
903				 - wi::to_offset (temp.op1))
904				* wi::to_offset (temp.op2));
905	      if (wi::fits_shwi_p (off))
906		temp.off = off.to_shwi();
907	    }
908	  break;
909	case VAR_DECL:
910	  if (DECL_HARD_REGISTER (ref))
911	    {
912	      temp.op0 = ref;
913	      break;
914	    }
915	  /* Fallthru.  */
916	case PARM_DECL:
917	case CONST_DECL:
918	case RESULT_DECL:
919	  /* Canonicalize decls to MEM[&decl] which is what we end up with
920	     when valueizing MEM[ptr] with ptr = &decl.  */
921	  temp.opcode = MEM_REF;
922	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
923	  temp.off = 0;
924	  result->safe_push (temp);
925	  temp.opcode = ADDR_EXPR;
926	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
927	  temp.type = TREE_TYPE (temp.op0);
928	  temp.off = -1;
929	  break;
930	case STRING_CST:
931	case INTEGER_CST:
932	case COMPLEX_CST:
933	case VECTOR_CST:
934	case REAL_CST:
935	case FIXED_CST:
936	case CONSTRUCTOR:
937	case SSA_NAME:
938	  temp.op0 = ref;
939	  break;
940	case ADDR_EXPR:
941	  if (is_gimple_min_invariant (ref))
942	    {
943	      temp.op0 = ref;
944	      break;
945	    }
946	  break;
947	  /* These are only interesting for their operands, their
948	     existence, and their type.  They will never be the last
949	     ref in the chain of references (IE they require an
950	     operand), so we don't have to put anything
951	     for op* as it will be handled by the iteration  */
952	case REALPART_EXPR:
953	case VIEW_CONVERT_EXPR:
954	  temp.off = 0;
955	  break;
956	case IMAGPART_EXPR:
957	  /* This is only interesting for its constant offset.  */
958	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
959	  break;
960	default:
961	  gcc_unreachable ();
962	}
963      result->safe_push (temp);
964
965      if (REFERENCE_CLASS_P (ref)
966	  || TREE_CODE (ref) == MODIFY_EXPR
967	  || TREE_CODE (ref) == WITH_SIZE_EXPR
968	  || (TREE_CODE (ref) == ADDR_EXPR
969	      && !is_gimple_min_invariant (ref)))
970	ref = TREE_OPERAND (ref, 0);
971      else
972	ref = NULL_TREE;
973    }
974}
975
976/* Build a alias-oracle reference abstraction in *REF from the vn_reference
977   operands in *OPS, the reference alias set SET and the reference type TYPE.
978   Return true if something useful was produced.  */
979
980bool
981ao_ref_init_from_vn_reference (ao_ref *ref,
982			       alias_set_type set, tree type,
983			       vec<vn_reference_op_s> ops)
984{
985  vn_reference_op_t op;
986  unsigned i;
987  tree base = NULL_TREE;
988  tree *op0_p = &base;
989  HOST_WIDE_INT offset = 0;
990  HOST_WIDE_INT max_size;
991  HOST_WIDE_INT size = -1;
992  tree size_tree = NULL_TREE;
993  alias_set_type base_alias_set = -1;
994
995  /* First get the final access size from just the outermost expression.  */
996  op = &ops[0];
997  if (op->opcode == COMPONENT_REF)
998    size_tree = DECL_SIZE (op->op0);
999  else if (op->opcode == BIT_FIELD_REF)
1000    size_tree = op->op0;
1001  else
1002    {
1003      machine_mode mode = TYPE_MODE (type);
1004      if (mode == BLKmode)
1005	size_tree = TYPE_SIZE (type);
1006      else
1007        size = GET_MODE_BITSIZE (mode);
1008    }
1009  if (size_tree != NULL_TREE)
1010    {
1011      if (!tree_fits_uhwi_p (size_tree))
1012	size = -1;
1013      else
1014	size = tree_to_uhwi (size_tree);
1015    }
1016
1017  /* Initially, maxsize is the same as the accessed element size.
1018     In the following it will only grow (or become -1).  */
1019  max_size = size;
1020
1021  /* Compute cumulative bit-offset for nested component-refs and array-refs,
1022     and find the ultimate containing object.  */
1023  FOR_EACH_VEC_ELT (ops, i, op)
1024    {
1025      switch (op->opcode)
1026	{
1027	/* These may be in the reference ops, but we cannot do anything
1028	   sensible with them here.  */
1029	case ADDR_EXPR:
1030	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
1031	  if (base != NULL_TREE
1032	      && TREE_CODE (base) == MEM_REF
1033	      && op->op0
1034	      && DECL_P (TREE_OPERAND (op->op0, 0)))
1035	    {
1036	      vn_reference_op_t pop = &ops[i-1];
1037	      base = TREE_OPERAND (op->op0, 0);
1038	      if (pop->off == -1)
1039		{
1040		  max_size = -1;
1041		  offset = 0;
1042		}
1043	      else
1044		offset += pop->off * BITS_PER_UNIT;
1045	      op0_p = NULL;
1046	      break;
1047	    }
1048	  /* Fallthru.  */
1049	case CALL_EXPR:
1050	  return false;
1051
1052	/* Record the base objects.  */
1053	case MEM_REF:
1054	  base_alias_set = get_deref_alias_set (op->op0);
1055	  *op0_p = build2 (MEM_REF, op->type,
1056			   NULL_TREE, op->op0);
1057	  op0_p = &TREE_OPERAND (*op0_p, 0);
1058	  break;
1059
1060	case VAR_DECL:
1061	case PARM_DECL:
1062	case RESULT_DECL:
1063	case SSA_NAME:
1064	  *op0_p = op->op0;
1065	  op0_p = NULL;
1066	  break;
1067
1068	/* And now the usual component-reference style ops.  */
1069	case BIT_FIELD_REF:
1070	  offset += tree_to_shwi (op->op1);
1071	  break;
1072
1073	case COMPONENT_REF:
1074	  {
1075	    tree field = op->op0;
1076	    /* We do not have a complete COMPONENT_REF tree here so we
1077	       cannot use component_ref_field_offset.  Do the interesting
1078	       parts manually.  */
1079
1080	    if (op->op1
1081		|| !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1082	      max_size = -1;
1083	    else
1084	      {
1085		offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1086			   * BITS_PER_UNIT);
1087		offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1088	      }
1089	    break;
1090	  }
1091
1092	case ARRAY_RANGE_REF:
1093	case ARRAY_REF:
1094	  /* We recorded the lower bound and the element size.  */
1095	  if (!tree_fits_shwi_p (op->op0)
1096	      || !tree_fits_shwi_p (op->op1)
1097	      || !tree_fits_shwi_p (op->op2))
1098	    max_size = -1;
1099	  else
1100	    {
1101	      HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1102	      hindex -= tree_to_shwi (op->op1);
1103	      hindex *= tree_to_shwi (op->op2);
1104	      hindex *= BITS_PER_UNIT;
1105	      offset += hindex;
1106	    }
1107	  break;
1108
1109	case REALPART_EXPR:
1110	  break;
1111
1112	case IMAGPART_EXPR:
1113	  offset += size;
1114	  break;
1115
1116	case VIEW_CONVERT_EXPR:
1117	  break;
1118
1119	case STRING_CST:
1120	case INTEGER_CST:
1121	case COMPLEX_CST:
1122	case VECTOR_CST:
1123	case REAL_CST:
1124	case CONSTRUCTOR:
1125	case CONST_DECL:
1126	  return false;
1127
1128	default:
1129	  return false;
1130	}
1131    }
1132
1133  if (base == NULL_TREE)
1134    return false;
1135
1136  ref->ref = NULL_TREE;
1137  ref->base = base;
1138  ref->offset = offset;
1139  ref->size = size;
1140  ref->max_size = max_size;
1141  ref->ref_alias_set = set;
1142  if (base_alias_set != -1)
1143    ref->base_alias_set = base_alias_set;
1144  else
1145    ref->base_alias_set = get_alias_set (base);
1146  /* We discount volatiles from value-numbering elsewhere.  */
1147  ref->volatile_p = false;
1148
1149  return true;
1150}
1151
1152/* Copy the operations present in load/store/call REF into RESULT, a vector of
1153   vn_reference_op_s's.  */
1154
1155static void
1156copy_reference_ops_from_call (gcall *call,
1157			      vec<vn_reference_op_s> *result)
1158{
1159  vn_reference_op_s temp;
1160  unsigned i;
1161  tree lhs = gimple_call_lhs (call);
1162  int lr;
1163
1164  /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1165     different.  By adding the lhs here in the vector, we ensure that the
1166     hashcode is different, guaranteeing a different value number.  */
1167  if (lhs && TREE_CODE (lhs) != SSA_NAME)
1168    {
1169      memset (&temp, 0, sizeof (temp));
1170      temp.opcode = MODIFY_EXPR;
1171      temp.type = TREE_TYPE (lhs);
1172      temp.op0 = lhs;
1173      temp.off = -1;
1174      result->safe_push (temp);
1175    }
1176
1177  /* Copy the type, opcode, function, static chain and EH region, if any.  */
1178  memset (&temp, 0, sizeof (temp));
1179  temp.type = gimple_call_return_type (call);
1180  temp.opcode = CALL_EXPR;
1181  temp.op0 = gimple_call_fn (call);
1182  temp.op1 = gimple_call_chain (call);
1183  if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1184    temp.op2 = size_int (lr);
1185  temp.off = -1;
1186  if (gimple_call_with_bounds_p (call))
1187    temp.with_bounds = 1;
1188  result->safe_push (temp);
1189
1190  /* Copy the call arguments.  As they can be references as well,
1191     just chain them together.  */
1192  for (i = 0; i < gimple_call_num_args (call); ++i)
1193    {
1194      tree callarg = gimple_call_arg (call, i);
1195      copy_reference_ops_from_ref (callarg, result);
1196    }
1197}
1198
1199/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1200   *I_P to point to the last element of the replacement.  */
1201void
1202vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1203			    unsigned int *i_p)
1204{
1205  unsigned int i = *i_p;
1206  vn_reference_op_t op = &(*ops)[i];
1207  vn_reference_op_t mem_op = &(*ops)[i - 1];
1208  tree addr_base;
1209  HOST_WIDE_INT addr_offset = 0;
1210
1211  /* The only thing we have to do is from &OBJ.foo.bar add the offset
1212     from .foo.bar to the preceding MEM_REF offset and replace the
1213     address with &OBJ.  */
1214  addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1215					     &addr_offset);
1216  gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1217  if (addr_base != TREE_OPERAND (op->op0, 0))
1218    {
1219      offset_int off = offset_int::from (mem_op->op0, SIGNED);
1220      off += addr_offset;
1221      mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1222      op->op0 = build_fold_addr_expr (addr_base);
1223      if (tree_fits_shwi_p (mem_op->op0))
1224	mem_op->off = tree_to_shwi (mem_op->op0);
1225      else
1226	mem_op->off = -1;
1227    }
1228}
1229
1230/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1231   *I_P to point to the last element of the replacement.  */
1232static void
1233vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1234				     unsigned int *i_p)
1235{
1236  unsigned int i = *i_p;
1237  vn_reference_op_t op = &(*ops)[i];
1238  vn_reference_op_t mem_op = &(*ops)[i - 1];
1239  gimple def_stmt;
1240  enum tree_code code;
1241  offset_int off;
1242
1243  def_stmt = SSA_NAME_DEF_STMT (op->op0);
1244  if (!is_gimple_assign (def_stmt))
1245    return;
1246
1247  code = gimple_assign_rhs_code (def_stmt);
1248  if (code != ADDR_EXPR
1249      && code != POINTER_PLUS_EXPR)
1250    return;
1251
1252  off = offset_int::from (mem_op->op0, SIGNED);
1253
1254  /* The only thing we have to do is from &OBJ.foo.bar add the offset
1255     from .foo.bar to the preceding MEM_REF offset and replace the
1256     address with &OBJ.  */
1257  if (code == ADDR_EXPR)
1258    {
1259      tree addr, addr_base;
1260      HOST_WIDE_INT addr_offset;
1261
1262      addr = gimple_assign_rhs1 (def_stmt);
1263      addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1264						 &addr_offset);
1265      if (!addr_base
1266	  || TREE_CODE (addr_base) != MEM_REF)
1267	return;
1268
1269      off += addr_offset;
1270      off += mem_ref_offset (addr_base);
1271      op->op0 = TREE_OPERAND (addr_base, 0);
1272    }
1273  else
1274    {
1275      tree ptr, ptroff;
1276      ptr = gimple_assign_rhs1 (def_stmt);
1277      ptroff = gimple_assign_rhs2 (def_stmt);
1278      if (TREE_CODE (ptr) != SSA_NAME
1279	  || TREE_CODE (ptroff) != INTEGER_CST)
1280	return;
1281
1282      off += wi::to_offset (ptroff);
1283      op->op0 = ptr;
1284    }
1285
1286  mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1287  if (tree_fits_shwi_p (mem_op->op0))
1288    mem_op->off = tree_to_shwi (mem_op->op0);
1289  else
1290    mem_op->off = -1;
1291  if (TREE_CODE (op->op0) == SSA_NAME)
1292    op->op0 = SSA_VAL (op->op0);
1293  if (TREE_CODE (op->op0) != SSA_NAME)
1294    op->opcode = TREE_CODE (op->op0);
1295
1296  /* And recurse.  */
1297  if (TREE_CODE (op->op0) == SSA_NAME)
1298    vn_reference_maybe_forwprop_address (ops, i_p);
1299  else if (TREE_CODE (op->op0) == ADDR_EXPR)
1300    vn_reference_fold_indirect (ops, i_p);
1301}
1302
1303/* Optimize the reference REF to a constant if possible or return
1304   NULL_TREE if not.  */
1305
1306tree
1307fully_constant_vn_reference_p (vn_reference_t ref)
1308{
1309  vec<vn_reference_op_s> operands = ref->operands;
1310  vn_reference_op_t op;
1311
1312  /* Try to simplify the translated expression if it is
1313     a call to a builtin function with at most two arguments.  */
1314  op = &operands[0];
1315  if (op->opcode == CALL_EXPR
1316      && TREE_CODE (op->op0) == ADDR_EXPR
1317      && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1318      && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1319      && operands.length () >= 2
1320      && operands.length () <= 3)
1321    {
1322      vn_reference_op_t arg0, arg1 = NULL;
1323      bool anyconst = false;
1324      arg0 = &operands[1];
1325      if (operands.length () > 2)
1326	arg1 = &operands[2];
1327      if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1328	  || (arg0->opcode == ADDR_EXPR
1329	      && is_gimple_min_invariant (arg0->op0)))
1330	anyconst = true;
1331      if (arg1
1332	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1333	      || (arg1->opcode == ADDR_EXPR
1334		  && is_gimple_min_invariant (arg1->op0))))
1335	anyconst = true;
1336      if (anyconst)
1337	{
1338	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1339					 arg1 ? 2 : 1,
1340					 arg0->op0,
1341					 arg1 ? arg1->op0 : NULL);
1342	  if (folded
1343	      && TREE_CODE (folded) == NOP_EXPR)
1344	    folded = TREE_OPERAND (folded, 0);
1345	  if (folded
1346	      && is_gimple_min_invariant (folded))
1347	    return folded;
1348	}
1349    }
1350
1351  /* Simplify reads from constants or constant initializers.  */
1352  else if (BITS_PER_UNIT == 8
1353	   && is_gimple_reg_type (ref->type)
1354	   && (!INTEGRAL_TYPE_P (ref->type)
1355	       || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1356    {
1357      HOST_WIDE_INT off = 0;
1358      HOST_WIDE_INT size;
1359      if (INTEGRAL_TYPE_P (ref->type))
1360	size = TYPE_PRECISION (ref->type);
1361      else
1362	size = tree_to_shwi (TYPE_SIZE (ref->type));
1363      if (size % BITS_PER_UNIT != 0
1364	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1365	return NULL_TREE;
1366      size /= BITS_PER_UNIT;
1367      unsigned i;
1368      for (i = 0; i < operands.length (); ++i)
1369	{
1370	  if (operands[i].off == -1)
1371	    return NULL_TREE;
1372	  off += operands[i].off;
1373	  if (operands[i].opcode == MEM_REF)
1374	    {
1375	      ++i;
1376	      break;
1377	    }
1378	}
1379      vn_reference_op_t base = &operands[--i];
1380      tree ctor = error_mark_node;
1381      tree decl = NULL_TREE;
1382      if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1383	ctor = base->op0;
1384      else if (base->opcode == MEM_REF
1385	       && base[1].opcode == ADDR_EXPR
1386	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1387		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1388	{
1389	  decl = TREE_OPERAND (base[1].op0, 0);
1390	  ctor = ctor_for_folding (decl);
1391	}
1392      if (ctor == NULL_TREE)
1393	return build_zero_cst (ref->type);
1394      else if (ctor != error_mark_node)
1395	{
1396	  if (decl)
1397	    {
1398	      tree res = fold_ctor_reference (ref->type, ctor,
1399					      off * BITS_PER_UNIT,
1400					      size * BITS_PER_UNIT, decl);
1401	      if (res)
1402		{
1403		  STRIP_USELESS_TYPE_CONVERSION (res);
1404		  if (is_gimple_min_invariant (res))
1405		    return res;
1406		}
1407	    }
1408	  else
1409	    {
1410	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1411	      if (native_encode_expr (ctor, buf, size, off) > 0)
1412		return native_interpret_expr (ref->type, buf, size);
1413	    }
1414	}
1415    }
1416
1417  return NULL_TREE;
1418}
1419
1420/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1421   structures into their value numbers.  This is done in-place, and
1422   the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1423   whether any operands were valueized.  */
1424
1425static vec<vn_reference_op_s>
1426valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1427{
1428  vn_reference_op_t vro;
1429  unsigned int i;
1430
1431  *valueized_anything = false;
1432
1433  FOR_EACH_VEC_ELT (orig, i, vro)
1434    {
1435      if (vro->opcode == SSA_NAME
1436	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1437	{
1438	  tree tem = SSA_VAL (vro->op0);
1439	  if (tem != vro->op0)
1440	    {
1441	      *valueized_anything = true;
1442	      vro->op0 = tem;
1443	    }
1444	  /* If it transforms from an SSA_NAME to a constant, update
1445	     the opcode.  */
1446	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1447	    vro->opcode = TREE_CODE (vro->op0);
1448	}
1449      if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1450	{
1451	  tree tem = SSA_VAL (vro->op1);
1452	  if (tem != vro->op1)
1453	    {
1454	      *valueized_anything = true;
1455	      vro->op1 = tem;
1456	    }
1457	}
1458      if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1459	{
1460	  tree tem = SSA_VAL (vro->op2);
1461	  if (tem != vro->op2)
1462	    {
1463	      *valueized_anything = true;
1464	      vro->op2 = tem;
1465	    }
1466	}
1467      /* If it transforms from an SSA_NAME to an address, fold with
1468	 a preceding indirect reference.  */
1469      if (i > 0
1470	  && vro->op0
1471	  && TREE_CODE (vro->op0) == ADDR_EXPR
1472	  && orig[i - 1].opcode == MEM_REF)
1473	vn_reference_fold_indirect (&orig, &i);
1474      else if (i > 0
1475	       && vro->opcode == SSA_NAME
1476	       && orig[i - 1].opcode == MEM_REF)
1477	vn_reference_maybe_forwprop_address (&orig, &i);
1478      /* If it transforms a non-constant ARRAY_REF into a constant
1479	 one, adjust the constant offset.  */
1480      else if (vro->opcode == ARRAY_REF
1481	       && vro->off == -1
1482	       && TREE_CODE (vro->op0) == INTEGER_CST
1483	       && TREE_CODE (vro->op1) == INTEGER_CST
1484	       && TREE_CODE (vro->op2) == INTEGER_CST)
1485	{
1486	  offset_int off = ((wi::to_offset (vro->op0)
1487			     - wi::to_offset (vro->op1))
1488			    * wi::to_offset (vro->op2));
1489	  if (wi::fits_shwi_p (off))
1490	    vro->off = off.to_shwi ();
1491	}
1492    }
1493
1494  return orig;
1495}
1496
1497static vec<vn_reference_op_s>
1498valueize_refs (vec<vn_reference_op_s> orig)
1499{
1500  bool tem;
1501  return valueize_refs_1 (orig, &tem);
1502}
1503
1504static vec<vn_reference_op_s> shared_lookup_references;
1505
1506/* Create a vector of vn_reference_op_s structures from REF, a
1507   REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1508   this function.  *VALUEIZED_ANYTHING will specify whether any
1509   operands were valueized.  */
1510
1511static vec<vn_reference_op_s>
1512valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1513{
1514  if (!ref)
1515    return vNULL;
1516  shared_lookup_references.truncate (0);
1517  copy_reference_ops_from_ref (ref, &shared_lookup_references);
1518  shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1519					      valueized_anything);
1520  return shared_lookup_references;
1521}
1522
1523/* Create a vector of vn_reference_op_s structures from CALL, a
1524   call statement.  The vector is shared among all callers of
1525   this function.  */
1526
1527static vec<vn_reference_op_s>
1528valueize_shared_reference_ops_from_call (gcall *call)
1529{
1530  if (!call)
1531    return vNULL;
1532  shared_lookup_references.truncate (0);
1533  copy_reference_ops_from_call (call, &shared_lookup_references);
1534  shared_lookup_references = valueize_refs (shared_lookup_references);
1535  return shared_lookup_references;
1536}
1537
1538/* Lookup a SCCVN reference operation VR in the current hash table.
1539   Returns the resulting value number if it exists in the hash table,
1540   NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1541   vn_reference_t stored in the hashtable if something is found.  */
1542
1543static tree
1544vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1545{
1546  vn_reference_s **slot;
1547  hashval_t hash;
1548
1549  hash = vr->hashcode;
1550  slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1551  if (!slot && current_info == optimistic_info)
1552    slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1553  if (slot)
1554    {
1555      if (vnresult)
1556	*vnresult = (vn_reference_t)*slot;
1557      return ((vn_reference_t)*slot)->result;
1558    }
1559
1560  return NULL_TREE;
1561}
1562
1563static tree *last_vuse_ptr;
1564static vn_lookup_kind vn_walk_kind;
1565static vn_lookup_kind default_vn_walk_kind;
1566
1567/* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1568   with the current VUSE and performs the expression lookup.  */
1569
1570static void *
1571vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1572		       unsigned int cnt, void *vr_)
1573{
1574  vn_reference_t vr = (vn_reference_t)vr_;
1575  vn_reference_s **slot;
1576  hashval_t hash;
1577
1578  /* This bounds the stmt walks we perform on reference lookups
1579     to O(1) instead of O(N) where N is the number of dominating
1580     stores.  */
1581  if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1582    return (void *)-1;
1583
1584  if (last_vuse_ptr)
1585    *last_vuse_ptr = vuse;
1586
1587  /* Fixup vuse and hash.  */
1588  if (vr->vuse)
1589    vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1590  vr->vuse = vuse_ssa_val (vuse);
1591  if (vr->vuse)
1592    vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1593
1594  hash = vr->hashcode;
1595  slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1596  if (!slot && current_info == optimistic_info)
1597    slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1598  if (slot)
1599    return *slot;
1600
1601  return NULL;
1602}
1603
1604/* Lookup an existing or insert a new vn_reference entry into the
1605   value table for the VUSE, SET, TYPE, OPERANDS reference which
1606   has the value VALUE which is either a constant or an SSA name.  */
1607
1608static vn_reference_t
1609vn_reference_lookup_or_insert_for_pieces (tree vuse,
1610					  alias_set_type set,
1611					  tree type,
1612					  vec<vn_reference_op_s,
1613					        va_heap> operands,
1614					  tree value)
1615{
1616  vn_reference_s vr1;
1617  vn_reference_t result;
1618  unsigned value_id;
1619  vr1.vuse = vuse;
1620  vr1.operands = operands;
1621  vr1.type = type;
1622  vr1.set = set;
1623  vr1.hashcode = vn_reference_compute_hash (&vr1);
1624  if (vn_reference_lookup_1 (&vr1, &result))
1625    return result;
1626  if (TREE_CODE (value) == SSA_NAME)
1627    value_id = VN_INFO (value)->value_id;
1628  else
1629    value_id = get_or_alloc_constant_value_id (value);
1630  return vn_reference_insert_pieces (vuse, set, type,
1631				     operands.copy (), value, value_id);
1632}
1633
1634/* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1635   from the statement defining VUSE and if not successful tries to
1636   translate *REFP and VR_ through an aggregate copy at the definition
1637   of VUSE.  */
1638
1639static void *
1640vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1641		       bool disambiguate_only)
1642{
1643  vn_reference_t vr = (vn_reference_t)vr_;
1644  gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1645  tree base;
1646  HOST_WIDE_INT offset, maxsize;
1647  static vec<vn_reference_op_s>
1648    lhs_ops = vNULL;
1649  ao_ref lhs_ref;
1650  bool lhs_ref_ok = false;
1651
1652  /* First try to disambiguate after value-replacing in the definitions LHS.  */
1653  if (is_gimple_assign (def_stmt))
1654    {
1655      vec<vn_reference_op_s> tem;
1656      tree lhs = gimple_assign_lhs (def_stmt);
1657      bool valueized_anything = false;
1658      /* Avoid re-allocation overhead.  */
1659      lhs_ops.truncate (0);
1660      copy_reference_ops_from_ref (lhs, &lhs_ops);
1661      tem = lhs_ops;
1662      lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1663      gcc_assert (lhs_ops == tem);
1664      if (valueized_anything)
1665	{
1666	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1667						      get_alias_set (lhs),
1668						      TREE_TYPE (lhs), lhs_ops);
1669	  if (lhs_ref_ok
1670	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1671	    return NULL;
1672	}
1673      else
1674	{
1675	  ao_ref_init (&lhs_ref, lhs);
1676	  lhs_ref_ok = true;
1677	}
1678    }
1679  else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1680	   && gimple_call_num_args (def_stmt) <= 4)
1681    {
1682      /* For builtin calls valueize its arguments and call the
1683         alias oracle again.  Valueization may improve points-to
1684	 info of pointers and constify size and position arguments.
1685	 Originally this was motivated by PR61034 which has
1686	 conditional calls to free falsely clobbering ref because
1687	 of imprecise points-to info of the argument.  */
1688      tree oldargs[4];
1689      bool valueized_anything = false;
1690      for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1691	{
1692	  oldargs[i] = gimple_call_arg (def_stmt, i);
1693	  if (TREE_CODE (oldargs[i]) == SSA_NAME
1694	      && VN_INFO (oldargs[i])->valnum != oldargs[i])
1695	    {
1696	      gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1697	      valueized_anything = true;
1698	    }
1699	}
1700      if (valueized_anything)
1701	{
1702	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1703					       ref);
1704	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1705	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
1706	  if (!res)
1707	    return NULL;
1708	}
1709    }
1710
1711  if (disambiguate_only)
1712    return (void *)-1;
1713
1714  base = ao_ref_base (ref);
1715  offset = ref->offset;
1716  maxsize = ref->max_size;
1717
1718  /* If we cannot constrain the size of the reference we cannot
1719     test if anything kills it.  */
1720  if (maxsize == -1)
1721    return (void *)-1;
1722
1723  /* We can't deduce anything useful from clobbers.  */
1724  if (gimple_clobber_p (def_stmt))
1725    return (void *)-1;
1726
1727  /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1728     from that definition.
1729     1) Memset.  */
1730  if (is_gimple_reg_type (vr->type)
1731      && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1732      && integer_zerop (gimple_call_arg (def_stmt, 1))
1733      && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1734      && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1735    {
1736      tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1737      tree base2;
1738      HOST_WIDE_INT offset2, size2, maxsize2;
1739      base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1740      size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1741      if ((unsigned HOST_WIDE_INT)size2 / 8
1742	  == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1743	  && maxsize2 != -1
1744	  && operand_equal_p (base, base2, 0)
1745	  && offset2 <= offset
1746	  && offset2 + size2 >= offset + maxsize)
1747	{
1748	  tree val = build_zero_cst (vr->type);
1749	  return vn_reference_lookup_or_insert_for_pieces
1750	           (vuse, vr->set, vr->type, vr->operands, val);
1751	}
1752    }
1753
1754  /* 2) Assignment from an empty CONSTRUCTOR.  */
1755  else if (is_gimple_reg_type (vr->type)
1756	   && gimple_assign_single_p (def_stmt)
1757	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1758	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1759    {
1760      tree base2;
1761      HOST_WIDE_INT offset2, size2, maxsize2;
1762      base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1763				       &offset2, &size2, &maxsize2);
1764      if (maxsize2 != -1
1765	  && operand_equal_p (base, base2, 0)
1766	  && offset2 <= offset
1767	  && offset2 + size2 >= offset + maxsize)
1768	{
1769	  tree val = build_zero_cst (vr->type);
1770	  return vn_reference_lookup_or_insert_for_pieces
1771	           (vuse, vr->set, vr->type, vr->operands, val);
1772	}
1773    }
1774
1775  /* 3) Assignment from a constant.  We can use folds native encode/interpret
1776     routines to extract the assigned bits.  */
1777  else if (vn_walk_kind == VN_WALKREWRITE
1778	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1779	   && ref->size == maxsize
1780	   && maxsize % BITS_PER_UNIT == 0
1781	   && offset % BITS_PER_UNIT == 0
1782	   && is_gimple_reg_type (vr->type)
1783	   && gimple_assign_single_p (def_stmt)
1784	   && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1785    {
1786      tree base2;
1787      HOST_WIDE_INT offset2, size2, maxsize2;
1788      base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1789				       &offset2, &size2, &maxsize2);
1790      if (maxsize2 != -1
1791	  && maxsize2 == size2
1792	  && size2 % BITS_PER_UNIT == 0
1793	  && offset2 % BITS_PER_UNIT == 0
1794	  && operand_equal_p (base, base2, 0)
1795	  && offset2 <= offset
1796	  && offset2 + size2 >= offset + maxsize)
1797	{
1798	  /* We support up to 512-bit values (for V8DFmode).  */
1799	  unsigned char buffer[64];
1800	  int len;
1801
1802	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1803				    buffer, sizeof (buffer));
1804	  if (len > 0)
1805	    {
1806	      tree val = native_interpret_expr (vr->type,
1807						buffer
1808						+ ((offset - offset2)
1809						   / BITS_PER_UNIT),
1810						ref->size / BITS_PER_UNIT);
1811	      if (val)
1812		return vn_reference_lookup_or_insert_for_pieces
1813		         (vuse, vr->set, vr->type, vr->operands, val);
1814	    }
1815	}
1816    }
1817
1818  /* 4) Assignment from an SSA name which definition we may be able
1819     to access pieces from.  */
1820  else if (ref->size == maxsize
1821	   && is_gimple_reg_type (vr->type)
1822	   && gimple_assign_single_p (def_stmt)
1823	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1824    {
1825      tree rhs1 = gimple_assign_rhs1 (def_stmt);
1826      gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1827      if (is_gimple_assign (def_stmt2)
1828	  && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1829	      || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1830	  && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1831	{
1832	  tree base2;
1833	  HOST_WIDE_INT offset2, size2, maxsize2, off;
1834	  base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1835					   &offset2, &size2, &maxsize2);
1836	  off = offset - offset2;
1837	  if (maxsize2 != -1
1838	      && maxsize2 == size2
1839	      && operand_equal_p (base, base2, 0)
1840	      && offset2 <= offset
1841	      && offset2 + size2 >= offset + maxsize)
1842	    {
1843	      tree val = NULL_TREE;
1844	      HOST_WIDE_INT elsz
1845		= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1846	      if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1847		{
1848		  if (off == 0)
1849		    val = gimple_assign_rhs1 (def_stmt2);
1850		  else if (off == elsz)
1851		    val = gimple_assign_rhs2 (def_stmt2);
1852		}
1853	      else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1854		       && off % elsz == 0)
1855		{
1856		  tree ctor = gimple_assign_rhs1 (def_stmt2);
1857		  unsigned i = off / elsz;
1858		  if (i < CONSTRUCTOR_NELTS (ctor))
1859		    {
1860		      constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1861		      if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1862			{
1863			  if (TREE_CODE (TREE_TYPE (elt->value))
1864			      != VECTOR_TYPE)
1865			    val = elt->value;
1866			}
1867		    }
1868		}
1869	      if (val)
1870		return vn_reference_lookup_or_insert_for_pieces
1871		         (vuse, vr->set, vr->type, vr->operands, val);
1872	    }
1873	}
1874    }
1875
1876  /* 5) For aggregate copies translate the reference through them if
1877     the copy kills ref.  */
1878  else if (vn_walk_kind == VN_WALKREWRITE
1879	   && gimple_assign_single_p (def_stmt)
1880	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
1881	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1882	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1883    {
1884      tree base2;
1885      HOST_WIDE_INT offset2, size2, maxsize2;
1886      int i, j;
1887      auto_vec<vn_reference_op_s> rhs;
1888      vn_reference_op_t vro;
1889      ao_ref r;
1890
1891      if (!lhs_ref_ok)
1892	return (void *)-1;
1893
1894      /* See if the assignment kills REF.  */
1895      base2 = ao_ref_base (&lhs_ref);
1896      offset2 = lhs_ref.offset;
1897      size2 = lhs_ref.size;
1898      maxsize2 = lhs_ref.max_size;
1899      if (maxsize2 == -1
1900	  || (base != base2 && !operand_equal_p (base, base2, 0))
1901	  || offset2 > offset
1902	  || offset2 + size2 < offset + maxsize)
1903	return (void *)-1;
1904
1905      /* Find the common base of ref and the lhs.  lhs_ops already
1906         contains valueized operands for the lhs.  */
1907      i = vr->operands.length () - 1;
1908      j = lhs_ops.length () - 1;
1909      while (j >= 0 && i >= 0
1910	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1911	{
1912	  i--;
1913	  j--;
1914	}
1915
1916      /* ???  The innermost op should always be a MEM_REF and we already
1917         checked that the assignment to the lhs kills vr.  Thus for
1918	 aggregate copies using char[] types the vn_reference_op_eq
1919	 may fail when comparing types for compatibility.  But we really
1920	 don't care here - further lookups with the rewritten operands
1921	 will simply fail if we messed up types too badly.  */
1922      HOST_WIDE_INT extra_off = 0;
1923      if (j == 0 && i >= 0
1924	  && lhs_ops[0].opcode == MEM_REF
1925	  && lhs_ops[0].off != -1)
1926	{
1927	  if (lhs_ops[0].off == vr->operands[i].off)
1928	    i--, j--;
1929	  else if (vr->operands[i].opcode == MEM_REF
1930		   && vr->operands[i].off != -1)
1931	    {
1932	      extra_off = vr->operands[i].off - lhs_ops[0].off;
1933	      i--, j--;
1934	    }
1935	}
1936
1937      /* i now points to the first additional op.
1938	 ???  LHS may not be completely contained in VR, one or more
1939	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
1940	 try handling outermost VIEW_CONVERT_EXPRs.  */
1941      if (j != -1)
1942	return (void *)-1;
1943
1944      /* Now re-write REF to be based on the rhs of the assignment.  */
1945      copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1946
1947      /* Apply an extra offset to the inner MEM_REF of the RHS.  */
1948      if (extra_off != 0)
1949	{
1950	  if (rhs.length () < 2
1951	      || rhs[0].opcode != MEM_REF
1952	      || rhs[0].off == -1)
1953	    return (void *)-1;
1954	  rhs[0].off += extra_off;
1955	  rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1956					build_int_cst (TREE_TYPE (rhs[0].op0),
1957						       extra_off));
1958	}
1959
1960      /* We need to pre-pend vr->operands[0..i] to rhs.  */
1961      vec<vn_reference_op_s> old = vr->operands;
1962      if (i + 1 + rhs.length () > vr->operands.length ())
1963	{
1964	  vr->operands.safe_grow (i + 1 + rhs.length ());
1965	  if (old == shared_lookup_references)
1966	    shared_lookup_references = vr->operands;
1967	}
1968      else
1969	vr->operands.truncate (i + 1 + rhs.length ());
1970      FOR_EACH_VEC_ELT (rhs, j, vro)
1971	vr->operands[i + 1 + j] = *vro;
1972      vr->operands = valueize_refs (vr->operands);
1973      if (old == shared_lookup_references)
1974	shared_lookup_references = vr->operands;
1975      vr->hashcode = vn_reference_compute_hash (vr);
1976
1977      /* Try folding the new reference to a constant.  */
1978      tree val = fully_constant_vn_reference_p (vr);
1979      if (val)
1980	return vn_reference_lookup_or_insert_for_pieces
1981		 (vuse, vr->set, vr->type, vr->operands, val);
1982
1983      /* Adjust *ref from the new operands.  */
1984      if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1985	return (void *)-1;
1986      /* This can happen with bitfields.  */
1987      if (ref->size != r.size)
1988	return (void *)-1;
1989      *ref = r;
1990
1991      /* Do not update last seen VUSE after translating.  */
1992      last_vuse_ptr = NULL;
1993
1994      /* Keep looking for the adjusted *REF / VR pair.  */
1995      return NULL;
1996    }
1997
1998  /* 6) For memcpy copies translate the reference through them if
1999     the copy kills ref.  */
2000  else if (vn_walk_kind == VN_WALKREWRITE
2001	   && is_gimple_reg_type (vr->type)
2002	   /* ???  Handle BCOPY as well.  */
2003	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2004	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2005	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2006	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2007	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2008	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2009	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2010	   && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2011    {
2012      tree lhs, rhs;
2013      ao_ref r;
2014      HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2015      vn_reference_op_s op;
2016      HOST_WIDE_INT at;
2017
2018
2019      /* Only handle non-variable, addressable refs.  */
2020      if (ref->size != maxsize
2021	  || offset % BITS_PER_UNIT != 0
2022	  || ref->size % BITS_PER_UNIT != 0)
2023	return (void *)-1;
2024
2025      /* Extract a pointer base and an offset for the destination.  */
2026      lhs = gimple_call_arg (def_stmt, 0);
2027      lhs_offset = 0;
2028      if (TREE_CODE (lhs) == SSA_NAME)
2029	lhs = SSA_VAL (lhs);
2030      if (TREE_CODE (lhs) == ADDR_EXPR)
2031	{
2032	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2033						    &lhs_offset);
2034	  if (!tem)
2035	    return (void *)-1;
2036	  if (TREE_CODE (tem) == MEM_REF
2037	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2038	    {
2039	      lhs = TREE_OPERAND (tem, 0);
2040	      lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2041	    }
2042	  else if (DECL_P (tem))
2043	    lhs = build_fold_addr_expr (tem);
2044	  else
2045	    return (void *)-1;
2046	}
2047      if (TREE_CODE (lhs) != SSA_NAME
2048	  && TREE_CODE (lhs) != ADDR_EXPR)
2049	return (void *)-1;
2050
2051      /* Extract a pointer base and an offset for the source.  */
2052      rhs = gimple_call_arg (def_stmt, 1);
2053      rhs_offset = 0;
2054      if (TREE_CODE (rhs) == SSA_NAME)
2055	rhs = SSA_VAL (rhs);
2056      if (TREE_CODE (rhs) == ADDR_EXPR)
2057	{
2058	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2059						    &rhs_offset);
2060	  if (!tem)
2061	    return (void *)-1;
2062	  if (TREE_CODE (tem) == MEM_REF
2063	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2064	    {
2065	      rhs = TREE_OPERAND (tem, 0);
2066	      rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2067	    }
2068	  else if (DECL_P (tem))
2069	    rhs = build_fold_addr_expr (tem);
2070	  else
2071	    return (void *)-1;
2072	}
2073      if (TREE_CODE (rhs) != SSA_NAME
2074	  && TREE_CODE (rhs) != ADDR_EXPR)
2075	return (void *)-1;
2076
2077      copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2078
2079      /* The bases of the destination and the references have to agree.  */
2080      if ((TREE_CODE (base) != MEM_REF
2081	   && !DECL_P (base))
2082	  || (TREE_CODE (base) == MEM_REF
2083	      && (TREE_OPERAND (base, 0) != lhs
2084		  || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2085	  || (DECL_P (base)
2086	      && (TREE_CODE (lhs) != ADDR_EXPR
2087		  || TREE_OPERAND (lhs, 0) != base)))
2088	return (void *)-1;
2089
2090      /* And the access has to be contained within the memcpy destination.  */
2091      at = offset / BITS_PER_UNIT;
2092      if (TREE_CODE (base) == MEM_REF)
2093	at += tree_to_uhwi (TREE_OPERAND (base, 1));
2094      if (lhs_offset > at
2095	  || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2096	return (void *)-1;
2097
2098      /* Make room for 2 operands in the new reference.  */
2099      if (vr->operands.length () < 2)
2100	{
2101	  vec<vn_reference_op_s> old = vr->operands;
2102	  vr->operands.safe_grow_cleared (2);
2103	  if (old == shared_lookup_references
2104	      && vr->operands != old)
2105	    shared_lookup_references = vr->operands;
2106	}
2107      else
2108	vr->operands.truncate (2);
2109
2110      /* The looked-through reference is a simple MEM_REF.  */
2111      memset (&op, 0, sizeof (op));
2112      op.type = vr->type;
2113      op.opcode = MEM_REF;
2114      op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2115      op.off = at - lhs_offset + rhs_offset;
2116      vr->operands[0] = op;
2117      op.type = TREE_TYPE (rhs);
2118      op.opcode = TREE_CODE (rhs);
2119      op.op0 = rhs;
2120      op.off = -1;
2121      vr->operands[1] = op;
2122      vr->hashcode = vn_reference_compute_hash (vr);
2123
2124      /* Adjust *ref from the new operands.  */
2125      if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2126	return (void *)-1;
2127      /* This can happen with bitfields.  */
2128      if (ref->size != r.size)
2129	return (void *)-1;
2130      *ref = r;
2131
2132      /* Do not update last seen VUSE after translating.  */
2133      last_vuse_ptr = NULL;
2134
2135      /* Keep looking for the adjusted *REF / VR pair.  */
2136      return NULL;
2137    }
2138
2139  /* Bail out and stop walking.  */
2140  return (void *)-1;
2141}
2142
2143/* Lookup a reference operation by it's parts, in the current hash table.
2144   Returns the resulting value number if it exists in the hash table,
2145   NULL_TREE otherwise.  VNRESULT will be filled in with the actual
2146   vn_reference_t stored in the hashtable if something is found.  */
2147
2148tree
2149vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2150			    vec<vn_reference_op_s> operands,
2151			    vn_reference_t *vnresult, vn_lookup_kind kind)
2152{
2153  struct vn_reference_s vr1;
2154  vn_reference_t tmp;
2155  tree cst;
2156
2157  if (!vnresult)
2158    vnresult = &tmp;
2159  *vnresult = NULL;
2160
2161  vr1.vuse = vuse_ssa_val (vuse);
2162  shared_lookup_references.truncate (0);
2163  shared_lookup_references.safe_grow (operands.length ());
2164  memcpy (shared_lookup_references.address (),
2165	  operands.address (),
2166	  sizeof (vn_reference_op_s)
2167	  * operands.length ());
2168  vr1.operands = operands = shared_lookup_references
2169    = valueize_refs (shared_lookup_references);
2170  vr1.type = type;
2171  vr1.set = set;
2172  vr1.hashcode = vn_reference_compute_hash (&vr1);
2173  if ((cst = fully_constant_vn_reference_p (&vr1)))
2174    return cst;
2175
2176  vn_reference_lookup_1 (&vr1, vnresult);
2177  if (!*vnresult
2178      && kind != VN_NOWALK
2179      && vr1.vuse)
2180    {
2181      ao_ref r;
2182      vn_walk_kind = kind;
2183      if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2184	*vnresult =
2185	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2186						  vn_reference_lookup_2,
2187						  vn_reference_lookup_3,
2188						  vuse_ssa_val, &vr1);
2189      gcc_checking_assert (vr1.operands == shared_lookup_references);
2190    }
2191
2192  if (*vnresult)
2193     return (*vnresult)->result;
2194
2195  return NULL_TREE;
2196}
2197
2198/* Lookup OP in the current hash table, and return the resulting value
2199   number if it exists in the hash table.  Return NULL_TREE if it does
2200   not exist in the hash table or if the result field of the structure
2201   was NULL..  VNRESULT will be filled in with the vn_reference_t
2202   stored in the hashtable if one exists.  When TBAA_P is false assume
2203   we are looking up a store and treat it as having alias-set zero.  */
2204
2205tree
2206vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2207		     vn_reference_t *vnresult, bool tbaa_p)
2208{
2209  vec<vn_reference_op_s> operands;
2210  struct vn_reference_s vr1;
2211  tree cst;
2212  bool valuezied_anything;
2213
2214  if (vnresult)
2215    *vnresult = NULL;
2216
2217  vr1.vuse = vuse_ssa_val (vuse);
2218  vr1.operands = operands
2219    = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2220  vr1.type = TREE_TYPE (op);
2221  vr1.set = tbaa_p ? get_alias_set (op) : 0;
2222  vr1.hashcode = vn_reference_compute_hash (&vr1);
2223  if ((cst = fully_constant_vn_reference_p (&vr1)))
2224    return cst;
2225
2226  if (kind != VN_NOWALK
2227      && vr1.vuse)
2228    {
2229      vn_reference_t wvnresult;
2230      ao_ref r;
2231      /* Make sure to use a valueized reference if we valueized anything.
2232         Otherwise preserve the full reference for advanced TBAA.  */
2233      if (!valuezied_anything
2234	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2235					     vr1.operands))
2236	ao_ref_init (&r, op);
2237      if (! tbaa_p)
2238	r.ref_alias_set = r.base_alias_set = 0;
2239      vn_walk_kind = kind;
2240      wvnresult =
2241	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2242						vn_reference_lookup_2,
2243						vn_reference_lookup_3,
2244						vuse_ssa_val, &vr1);
2245      gcc_checking_assert (vr1.operands == shared_lookup_references);
2246      if (wvnresult)
2247	{
2248	  if (vnresult)
2249	    *vnresult = wvnresult;
2250	  return wvnresult->result;
2251	}
2252
2253      return NULL_TREE;
2254    }
2255
2256  return vn_reference_lookup_1 (&vr1, vnresult);
2257}
2258
2259/* Lookup CALL in the current hash table and return the entry in
2260   *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
2261
2262void
2263vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2264			  vn_reference_t vr)
2265{
2266  if (vnresult)
2267    *vnresult = NULL;
2268
2269  tree vuse = gimple_vuse (call);
2270
2271  vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2272  vr->operands = valueize_shared_reference_ops_from_call (call);
2273  vr->type = gimple_expr_type (call);
2274  vr->set = 0;
2275  vr->hashcode = vn_reference_compute_hash (vr);
2276  vn_reference_lookup_1 (vr, vnresult);
2277}
2278
2279/* Insert OP into the current hash table with a value number of
2280   RESULT, and return the resulting reference structure we created.  */
2281
2282static vn_reference_t
2283vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2284{
2285  vn_reference_s **slot;
2286  vn_reference_t vr1;
2287  bool tem;
2288
2289  vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2290  if (TREE_CODE (result) == SSA_NAME)
2291    vr1->value_id = VN_INFO (result)->value_id;
2292  else
2293    vr1->value_id = get_or_alloc_constant_value_id (result);
2294  vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2295  vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2296  vr1->type = TREE_TYPE (op);
2297  vr1->set = get_alias_set (op);
2298  vr1->hashcode = vn_reference_compute_hash (vr1);
2299  vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2300  vr1->result_vdef = vdef;
2301
2302  slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2303							INSERT);
2304
2305  /* Because we lookup stores using vuses, and value number failures
2306     using the vdefs (see visit_reference_op_store for how and why),
2307     it's possible that on failure we may try to insert an already
2308     inserted store.  This is not wrong, there is no ssa name for a
2309     store that we could use as a differentiator anyway.  Thus, unlike
2310     the other lookup functions, you cannot gcc_assert (!*slot)
2311     here.  */
2312
2313  /* But free the old slot in case of a collision.  */
2314  if (*slot)
2315    free_reference (*slot);
2316
2317  *slot = vr1;
2318  return vr1;
2319}
2320
2321/* Insert a reference by it's pieces into the current hash table with
2322   a value number of RESULT.  Return the resulting reference
2323   structure we created.  */
2324
2325vn_reference_t
2326vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2327			    vec<vn_reference_op_s> operands,
2328			    tree result, unsigned int value_id)
2329
2330{
2331  vn_reference_s **slot;
2332  vn_reference_t vr1;
2333
2334  vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2335  vr1->value_id = value_id;
2336  vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2337  vr1->operands = valueize_refs (operands);
2338  vr1->type = type;
2339  vr1->set = set;
2340  vr1->hashcode = vn_reference_compute_hash (vr1);
2341  if (result && TREE_CODE (result) == SSA_NAME)
2342    result = SSA_VAL (result);
2343  vr1->result = result;
2344
2345  slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2346							INSERT);
2347
2348  /* At this point we should have all the things inserted that we have
2349     seen before, and we should never try inserting something that
2350     already exists.  */
2351  gcc_assert (!*slot);
2352  if (*slot)
2353    free_reference (*slot);
2354
2355  *slot = vr1;
2356  return vr1;
2357}
2358
2359/* Compute and return the hash value for nary operation VBO1.  */
2360
2361static hashval_t
2362vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2363{
2364  inchash::hash hstate;
2365  unsigned i;
2366
2367  for (i = 0; i < vno1->length; ++i)
2368    if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2369      vno1->op[i] = SSA_VAL (vno1->op[i]);
2370
2371  if (vno1->length == 2
2372      && commutative_tree_code (vno1->opcode)
2373      && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2374    {
2375      tree temp = vno1->op[0];
2376      vno1->op[0] = vno1->op[1];
2377      vno1->op[1] = temp;
2378    }
2379
2380  hstate.add_int (vno1->opcode);
2381  for (i = 0; i < vno1->length; ++i)
2382    inchash::add_expr (vno1->op[i], hstate);
2383
2384  return hstate.end ();
2385}
2386
2387/* Compare nary operations VNO1 and VNO2 and return true if they are
2388   equivalent.  */
2389
2390bool
2391vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2392{
2393  unsigned i;
2394
2395  if (vno1->hashcode != vno2->hashcode)
2396    return false;
2397
2398  if (vno1->length != vno2->length)
2399    return false;
2400
2401  if (vno1->opcode != vno2->opcode
2402      || !types_compatible_p (vno1->type, vno2->type))
2403    return false;
2404
2405  for (i = 0; i < vno1->length; ++i)
2406    if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2407      return false;
2408
2409  return true;
2410}
2411
2412/* Initialize VNO from the pieces provided.  */
2413
2414static void
2415init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2416			     enum tree_code code, tree type, tree *ops)
2417{
2418  vno->opcode = code;
2419  vno->length = length;
2420  vno->type = type;
2421  memcpy (&vno->op[0], ops, sizeof (tree) * length);
2422}
2423
2424/* Initialize VNO from OP.  */
2425
2426static void
2427init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2428{
2429  unsigned i;
2430
2431  vno->opcode = TREE_CODE (op);
2432  vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2433  vno->type = TREE_TYPE (op);
2434  for (i = 0; i < vno->length; ++i)
2435    vno->op[i] = TREE_OPERAND (op, i);
2436}
2437
2438/* Return the number of operands for a vn_nary ops structure from STMT.  */
2439
2440static unsigned int
2441vn_nary_length_from_stmt (gimple stmt)
2442{
2443  switch (gimple_assign_rhs_code (stmt))
2444    {
2445    case REALPART_EXPR:
2446    case IMAGPART_EXPR:
2447    case VIEW_CONVERT_EXPR:
2448      return 1;
2449
2450    case BIT_FIELD_REF:
2451      return 3;
2452
2453    case CONSTRUCTOR:
2454      return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2455
2456    default:
2457      return gimple_num_ops (stmt) - 1;
2458    }
2459}
2460
2461/* Initialize VNO from STMT.  */
2462
2463static void
2464init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2465{
2466  unsigned i;
2467
2468  vno->opcode = gimple_assign_rhs_code (stmt);
2469  vno->type = gimple_expr_type (stmt);
2470  switch (vno->opcode)
2471    {
2472    case REALPART_EXPR:
2473    case IMAGPART_EXPR:
2474    case VIEW_CONVERT_EXPR:
2475      vno->length = 1;
2476      vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2477      break;
2478
2479    case BIT_FIELD_REF:
2480      vno->length = 3;
2481      vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2482      vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2483      vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2484      break;
2485
2486    case CONSTRUCTOR:
2487      vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2488      for (i = 0; i < vno->length; ++i)
2489	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2490      break;
2491
2492    default:
2493      gcc_checking_assert (!gimple_assign_single_p (stmt));
2494      vno->length = gimple_num_ops (stmt) - 1;
2495      for (i = 0; i < vno->length; ++i)
2496	vno->op[i] = gimple_op (stmt, i + 1);
2497    }
2498}
2499
2500/* Compute the hashcode for VNO and look for it in the hash table;
2501   return the resulting value number if it exists in the hash table.
2502   Return NULL_TREE if it does not exist in the hash table or if the
2503   result field of the operation is NULL.  VNRESULT will contain the
2504   vn_nary_op_t from the hashtable if it exists.  */
2505
2506static tree
2507vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2508{
2509  vn_nary_op_s **slot;
2510
2511  if (vnresult)
2512    *vnresult = NULL;
2513
2514  vno->hashcode = vn_nary_op_compute_hash (vno);
2515  slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2516						  NO_INSERT);
2517  if (!slot && current_info == optimistic_info)
2518    slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2519						  NO_INSERT);
2520  if (!slot)
2521    return NULL_TREE;
2522  if (vnresult)
2523    *vnresult = *slot;
2524  return (*slot)->result;
2525}
2526
2527/* Lookup a n-ary operation by its pieces and return the resulting value
2528   number if it exists in the hash table.  Return NULL_TREE if it does
2529   not exist in the hash table or if the result field of the operation
2530   is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2531   if it exists.  */
2532
2533tree
2534vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2535			  tree type, tree *ops, vn_nary_op_t *vnresult)
2536{
2537  vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2538				  sizeof_vn_nary_op (length));
2539  init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2540  return vn_nary_op_lookup_1 (vno1, vnresult);
2541}
2542
2543/* Lookup OP in the current hash table, and return the resulting value
2544   number if it exists in the hash table.  Return NULL_TREE if it does
2545   not exist in the hash table or if the result field of the operation
2546   is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2547   if it exists.  */
2548
2549tree
2550vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2551{
2552  vn_nary_op_t vno1
2553    = XALLOCAVAR (struct vn_nary_op_s,
2554		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2555  init_vn_nary_op_from_op (vno1, op);
2556  return vn_nary_op_lookup_1 (vno1, vnresult);
2557}
2558
2559/* Lookup the rhs of STMT in the current hash table, and return the resulting
2560   value number if it exists in the hash table.  Return NULL_TREE if
2561   it does not exist in the hash table.  VNRESULT will contain the
2562   vn_nary_op_t from the hashtable if it exists.  */
2563
2564tree
2565vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2566{
2567  vn_nary_op_t vno1
2568    = XALLOCAVAR (struct vn_nary_op_s,
2569		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2570  init_vn_nary_op_from_stmt (vno1, stmt);
2571  return vn_nary_op_lookup_1 (vno1, vnresult);
2572}
2573
2574/* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2575
2576static vn_nary_op_t
2577alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2578{
2579  return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2580}
2581
2582/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2583   obstack.  */
2584
2585static vn_nary_op_t
2586alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2587{
2588  vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2589					       &current_info->nary_obstack);
2590
2591  vno1->value_id = value_id;
2592  vno1->length = length;
2593  vno1->result = result;
2594
2595  return vno1;
2596}
2597
2598/* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2599   VNO->HASHCODE first.  */
2600
2601static vn_nary_op_t
2602vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2603			bool compute_hash)
2604{
2605  vn_nary_op_s **slot;
2606
2607  if (compute_hash)
2608    vno->hashcode = vn_nary_op_compute_hash (vno);
2609
2610  slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2611  gcc_assert (!*slot);
2612
2613  *slot = vno;
2614  return vno;
2615}
2616
2617/* Insert a n-ary operation into the current hash table using it's
2618   pieces.  Return the vn_nary_op_t structure we created and put in
2619   the hashtable.  */
2620
2621vn_nary_op_t
2622vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2623			  tree type, tree *ops,
2624			  tree result, unsigned int value_id)
2625{
2626  vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2627  init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2628  return vn_nary_op_insert_into (vno1, current_info->nary, true);
2629}
2630
2631/* Insert OP into the current hash table with a value number of
2632   RESULT.  Return the vn_nary_op_t structure we created and put in
2633   the hashtable.  */
2634
2635vn_nary_op_t
2636vn_nary_op_insert (tree op, tree result)
2637{
2638  unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2639  vn_nary_op_t vno1;
2640
2641  vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2642  init_vn_nary_op_from_op (vno1, op);
2643  return vn_nary_op_insert_into (vno1, current_info->nary, true);
2644}
2645
2646/* Insert the rhs of STMT into the current hash table with a value number of
2647   RESULT.  */
2648
2649vn_nary_op_t
2650vn_nary_op_insert_stmt (gimple stmt, tree result)
2651{
2652  vn_nary_op_t vno1
2653    = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2654			result, VN_INFO (result)->value_id);
2655  init_vn_nary_op_from_stmt (vno1, stmt);
2656  return vn_nary_op_insert_into (vno1, current_info->nary, true);
2657}
2658
2659/* Compute a hashcode for PHI operation VP1 and return it.  */
2660
2661static inline hashval_t
2662vn_phi_compute_hash (vn_phi_t vp1)
2663{
2664  inchash::hash hstate (vp1->block->index);
2665  int i;
2666  tree phi1op;
2667  tree type;
2668
2669  /* If all PHI arguments are constants we need to distinguish
2670     the PHI node via its type.  */
2671  type = vp1->type;
2672  hstate.merge_hash (vn_hash_type (type));
2673
2674  FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2675    {
2676      if (phi1op == VN_TOP)
2677	continue;
2678      inchash::add_expr (phi1op, hstate);
2679    }
2680
2681  return hstate.end ();
2682}
2683
2684/* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
2685
2686static int
2687vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2688{
2689  if (vp1->hashcode != vp2->hashcode)
2690    return false;
2691
2692  if (vp1->block == vp2->block)
2693    {
2694      int i;
2695      tree phi1op;
2696
2697      /* If the PHI nodes do not have compatible types
2698	 they are not the same.  */
2699      if (!types_compatible_p (vp1->type, vp2->type))
2700	return false;
2701
2702      /* Any phi in the same block will have it's arguments in the
2703	 same edge order, because of how we store phi nodes.  */
2704      FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2705	{
2706	  tree phi2op = vp2->phiargs[i];
2707	  if (phi1op == VN_TOP || phi2op == VN_TOP)
2708	    continue;
2709	  if (!expressions_equal_p (phi1op, phi2op))
2710	    return false;
2711	}
2712      return true;
2713    }
2714  return false;
2715}
2716
2717static vec<tree> shared_lookup_phiargs;
2718
2719/* Lookup PHI in the current hash table, and return the resulting
2720   value number if it exists in the hash table.  Return NULL_TREE if
2721   it does not exist in the hash table. */
2722
2723static tree
2724vn_phi_lookup (gimple phi)
2725{
2726  vn_phi_s **slot;
2727  struct vn_phi_s vp1;
2728  unsigned i;
2729
2730  shared_lookup_phiargs.truncate (0);
2731
2732  /* Canonicalize the SSA_NAME's to their value number.  */
2733  for (i = 0; i < gimple_phi_num_args (phi); i++)
2734    {
2735      tree def = PHI_ARG_DEF (phi, i);
2736      def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2737      shared_lookup_phiargs.safe_push (def);
2738    }
2739  vp1.type = TREE_TYPE (gimple_phi_result (phi));
2740  vp1.phiargs = shared_lookup_phiargs;
2741  vp1.block = gimple_bb (phi);
2742  vp1.hashcode = vn_phi_compute_hash (&vp1);
2743  slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2744						  NO_INSERT);
2745  if (!slot && current_info == optimistic_info)
2746    slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2747						  NO_INSERT);
2748  if (!slot)
2749    return NULL_TREE;
2750  return (*slot)->result;
2751}
2752
2753/* Insert PHI into the current hash table with a value number of
2754   RESULT.  */
2755
2756static vn_phi_t
2757vn_phi_insert (gimple phi, tree result)
2758{
2759  vn_phi_s **slot;
2760  vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2761  unsigned i;
2762  vec<tree> args = vNULL;
2763
2764  /* Canonicalize the SSA_NAME's to their value number.  */
2765  for (i = 0; i < gimple_phi_num_args (phi); i++)
2766    {
2767      tree def = PHI_ARG_DEF (phi, i);
2768      def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2769      args.safe_push (def);
2770    }
2771  vp1->value_id = VN_INFO (result)->value_id;
2772  vp1->type = TREE_TYPE (gimple_phi_result (phi));
2773  vp1->phiargs = args;
2774  vp1->block = gimple_bb (phi);
2775  vp1->result = result;
2776  vp1->hashcode = vn_phi_compute_hash (vp1);
2777
2778  slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2779
2780  /* Because we iterate over phi operations more than once, it's
2781     possible the slot might already exist here, hence no assert.*/
2782  *slot = vp1;
2783  return vp1;
2784}
2785
2786
2787/* Print set of components in strongly connected component SCC to OUT. */
2788
2789static void
2790print_scc (FILE *out, vec<tree> scc)
2791{
2792  tree var;
2793  unsigned int i;
2794
2795  fprintf (out, "SCC consists of:");
2796  FOR_EACH_VEC_ELT (scc, i, var)
2797    {
2798      fprintf (out, " ");
2799      print_generic_expr (out, var, 0);
2800    }
2801  fprintf (out, "\n");
2802}
2803
2804/* Set the value number of FROM to TO, return true if it has changed
2805   as a result.  */
2806
2807static inline bool
2808set_ssa_val_to (tree from, tree to)
2809{
2810  tree currval = SSA_VAL (from);
2811  HOST_WIDE_INT toff, coff;
2812
2813  /* The only thing we allow as value numbers are ssa_names
2814     and invariants.  So assert that here.  We don't allow VN_TOP
2815     as visiting a stmt should produce a value-number other than
2816     that.
2817     ???  Still VN_TOP can happen for unreachable code, so force
2818     it to varying in that case.  Not all code is prepared to
2819     get VN_TOP on valueization.  */
2820  if (to == VN_TOP)
2821    {
2822      if (dump_file && (dump_flags & TDF_DETAILS))
2823	fprintf (dump_file, "Forcing value number to varying on "
2824		 "receiving VN_TOP\n");
2825      to = from;
2826    }
2827
2828  gcc_assert (to != NULL_TREE
2829	      && ((TREE_CODE (to) == SSA_NAME
2830		   && (to == from || SSA_VAL (to) == to))
2831		  || is_gimple_min_invariant (to)));
2832
2833  if (from != to)
2834    {
2835      if (currval == from)
2836	{
2837	  if (dump_file && (dump_flags & TDF_DETAILS))
2838	    {
2839	      fprintf (dump_file, "Not changing value number of ");
2840	      print_generic_expr (dump_file, from, 0);
2841	      fprintf (dump_file, " from VARYING to ");
2842	      print_generic_expr (dump_file, to, 0);
2843	      fprintf (dump_file, "\n");
2844	    }
2845	  return false;
2846	}
2847      else if (TREE_CODE (to) == SSA_NAME
2848	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2849	to = from;
2850    }
2851
2852  if (dump_file && (dump_flags & TDF_DETAILS))
2853    {
2854      fprintf (dump_file, "Setting value number of ");
2855      print_generic_expr (dump_file, from, 0);
2856      fprintf (dump_file, " to ");
2857      print_generic_expr (dump_file, to, 0);
2858    }
2859
2860  if (currval != to
2861      && !operand_equal_p (currval, to, 0)
2862      /* ???  For addresses involving volatile objects or types operand_equal_p
2863         does not reliably detect ADDR_EXPRs as equal.  We know we are only
2864	 getting invariant gimple addresses here, so can use
2865	 get_addr_base_and_unit_offset to do this comparison.  */
2866      && !(TREE_CODE (currval) == ADDR_EXPR
2867	   && TREE_CODE (to) == ADDR_EXPR
2868	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2869	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2870	   && coff == toff))
2871    {
2872      VN_INFO (from)->valnum = to;
2873      if (dump_file && (dump_flags & TDF_DETAILS))
2874	fprintf (dump_file, " (changed)\n");
2875      return true;
2876    }
2877  if (dump_file && (dump_flags & TDF_DETAILS))
2878    fprintf (dump_file, "\n");
2879  return false;
2880}
2881
2882/* Mark as processed all the definitions in the defining stmt of USE, or
2883   the USE itself.  */
2884
2885static void
2886mark_use_processed (tree use)
2887{
2888  ssa_op_iter iter;
2889  def_operand_p defp;
2890  gimple stmt = SSA_NAME_DEF_STMT (use);
2891
2892  if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2893    {
2894      VN_INFO (use)->use_processed = true;
2895      return;
2896    }
2897
2898  FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2899    {
2900      tree def = DEF_FROM_PTR (defp);
2901
2902      VN_INFO (def)->use_processed = true;
2903    }
2904}
2905
2906/* Set all definitions in STMT to value number to themselves.
2907   Return true if a value number changed. */
2908
2909static bool
2910defs_to_varying (gimple stmt)
2911{
2912  bool changed = false;
2913  ssa_op_iter iter;
2914  def_operand_p defp;
2915
2916  FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2917    {
2918      tree def = DEF_FROM_PTR (defp);
2919      changed |= set_ssa_val_to (def, def);
2920    }
2921  return changed;
2922}
2923
2924static bool expr_has_constants (tree expr);
2925
2926/* Visit a copy between LHS and RHS, return true if the value number
2927   changed.  */
2928
2929static bool
2930visit_copy (tree lhs, tree rhs)
2931{
2932  /* The copy may have a more interesting constant filled expression
2933     (we don't, since we know our RHS is just an SSA name).  */
2934  VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2935  VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2936
2937  /* And finally valueize.  */
2938  rhs = SSA_VAL (rhs);
2939
2940  return set_ssa_val_to (lhs, rhs);
2941}
2942
2943/* Visit a nary operator RHS, value number it, and return true if the
2944   value number of LHS has changed as a result.  */
2945
2946static bool
2947visit_nary_op (tree lhs, gimple stmt)
2948{
2949  bool changed = false;
2950  tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2951
2952  if (result)
2953    changed = set_ssa_val_to (lhs, result);
2954  else
2955    {
2956      changed = set_ssa_val_to (lhs, lhs);
2957      vn_nary_op_insert_stmt (stmt, lhs);
2958    }
2959
2960  return changed;
2961}
2962
2963/* Visit a call STMT storing into LHS.  Return true if the value number
2964   of the LHS has changed as a result.  */
2965
2966static bool
2967visit_reference_op_call (tree lhs, gcall *stmt)
2968{
2969  bool changed = false;
2970  struct vn_reference_s vr1;
2971  vn_reference_t vnresult = NULL;
2972  tree vdef = gimple_vdef (stmt);
2973
2974  /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
2975  if (lhs && TREE_CODE (lhs) != SSA_NAME)
2976    lhs = NULL_TREE;
2977
2978  vn_reference_lookup_call (stmt, &vnresult, &vr1);
2979  if (vnresult)
2980    {
2981      if (vnresult->result_vdef && vdef)
2982	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2983
2984      if (!vnresult->result && lhs)
2985	vnresult->result = lhs;
2986
2987      if (vnresult->result && lhs)
2988	{
2989	  changed |= set_ssa_val_to (lhs, vnresult->result);
2990
2991	  if (VN_INFO (vnresult->result)->has_constants)
2992	    VN_INFO (lhs)->has_constants = true;
2993	}
2994    }
2995  else
2996    {
2997      vn_reference_t vr2;
2998      vn_reference_s **slot;
2999      if (vdef)
3000	changed |= set_ssa_val_to (vdef, vdef);
3001      if (lhs)
3002	changed |= set_ssa_val_to (lhs, lhs);
3003      vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
3004      vr2->vuse = vr1.vuse;
3005      /* As we are not walking the virtual operand chain we know the
3006	 shared_lookup_references are still original so we can re-use
3007	 them here.  */
3008      vr2->operands = vr1.operands.copy ();
3009      vr2->type = vr1.type;
3010      vr2->set = vr1.set;
3011      vr2->hashcode = vr1.hashcode;
3012      vr2->result = lhs;
3013      vr2->result_vdef = vdef;
3014      slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3015							    INSERT);
3016      gcc_assert (!*slot);
3017      *slot = vr2;
3018    }
3019
3020  return changed;
3021}
3022
3023/* Visit a load from a reference operator RHS, part of STMT, value number it,
3024   and return true if the value number of the LHS has changed as a result.  */
3025
3026static bool
3027visit_reference_op_load (tree lhs, tree op, gimple stmt)
3028{
3029  bool changed = false;
3030  tree last_vuse;
3031  tree result;
3032
3033  last_vuse = gimple_vuse (stmt);
3034  last_vuse_ptr = &last_vuse;
3035  result = vn_reference_lookup (op, gimple_vuse (stmt),
3036				default_vn_walk_kind, NULL, true);
3037  last_vuse_ptr = NULL;
3038
3039  /* We handle type-punning through unions by value-numbering based
3040     on offset and size of the access.  Be prepared to handle a
3041     type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
3042  if (result
3043      && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3044    {
3045      /* We will be setting the value number of lhs to the value number
3046	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3047	 So first simplify and lookup this expression to see if it
3048	 is already available.  */
3049      tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3050      if ((CONVERT_EXPR_P (val)
3051	   || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3052	  && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
3053        {
3054	  tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
3055	  if ((CONVERT_EXPR_P (tem)
3056	       || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
3057	      && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3058						    TREE_TYPE (val), tem)))
3059	    val = tem;
3060	}
3061      result = val;
3062      if (!is_gimple_min_invariant (val)
3063	  && TREE_CODE (val) != SSA_NAME)
3064	result = vn_nary_op_lookup (val, NULL);
3065      /* If the expression is not yet available, value-number lhs to
3066	 a new SSA_NAME we create.  */
3067      if (!result)
3068        {
3069	  result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3070				       "vntemp");
3071	  /* Initialize value-number information properly.  */
3072	  VN_INFO_GET (result)->valnum = result;
3073	  VN_INFO (result)->value_id = get_next_value_id ();
3074	  VN_INFO (result)->expr = val;
3075	  VN_INFO (result)->has_constants = expr_has_constants (val);
3076	  VN_INFO (result)->needs_insertion = true;
3077	  /* As all "inserted" statements are singleton SCCs, insert
3078	     to the valid table.  This is strictly needed to
3079	     avoid re-generating new value SSA_NAMEs for the same
3080	     expression during SCC iteration over and over (the
3081	     optimistic table gets cleared after each iteration).
3082	     We do not need to insert into the optimistic table, as
3083	     lookups there will fall back to the valid table.  */
3084	  if (current_info == optimistic_info)
3085	    {
3086	      current_info = valid_info;
3087	      vn_nary_op_insert (val, result);
3088	      current_info = optimistic_info;
3089	    }
3090	  else
3091	    vn_nary_op_insert (val, result);
3092	  if (dump_file && (dump_flags & TDF_DETAILS))
3093	    {
3094	      fprintf (dump_file, "Inserting name ");
3095	      print_generic_expr (dump_file, result, 0);
3096	      fprintf (dump_file, " for expression ");
3097	      print_generic_expr (dump_file, val, 0);
3098	      fprintf (dump_file, "\n");
3099	    }
3100	}
3101    }
3102
3103  if (result)
3104    {
3105      changed = set_ssa_val_to (lhs, result);
3106      if (TREE_CODE (result) == SSA_NAME
3107	  && VN_INFO (result)->has_constants)
3108	{
3109	  VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3110	  VN_INFO (lhs)->has_constants = true;
3111	}
3112    }
3113  else
3114    {
3115      changed = set_ssa_val_to (lhs, lhs);
3116      vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3117    }
3118
3119  return changed;
3120}
3121
3122
3123/* Visit a store to a reference operator LHS, part of STMT, value number it,
3124   and return true if the value number of the LHS has changed as a result.  */
3125
3126static bool
3127visit_reference_op_store (tree lhs, tree op, gimple stmt)
3128{
3129  bool changed = false;
3130  vn_reference_t vnresult = NULL;
3131  tree result, assign;
3132  bool resultsame = false;
3133  tree vuse = gimple_vuse (stmt);
3134  tree vdef = gimple_vdef (stmt);
3135
3136  if (TREE_CODE (op) == SSA_NAME)
3137    op = SSA_VAL (op);
3138
3139  /* First we want to lookup using the *vuses* from the store and see
3140     if there the last store to this location with the same address
3141     had the same value.
3142
3143     The vuses represent the memory state before the store.  If the
3144     memory state, address, and value of the store is the same as the
3145     last store to this location, then this store will produce the
3146     same memory state as that store.
3147
3148     In this case the vdef versions for this store are value numbered to those
3149     vuse versions, since they represent the same memory state after
3150     this store.
3151
3152     Otherwise, the vdefs for the store are used when inserting into
3153     the table, since the store generates a new memory state.  */
3154
3155  result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL, false);
3156
3157  if (result)
3158    {
3159      if (TREE_CODE (result) == SSA_NAME)
3160	result = SSA_VAL (result);
3161      resultsame = expressions_equal_p (result, op);
3162    }
3163
3164  if ((!result || !resultsame)
3165      /* Only perform the following when being called from PRE
3166	 which embeds tail merging.  */
3167      && default_vn_walk_kind == VN_WALK)
3168    {
3169      assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3170      vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3171      if (vnresult)
3172	{
3173	  VN_INFO (vdef)->use_processed = true;
3174	  return set_ssa_val_to (vdef, vnresult->result_vdef);
3175	}
3176    }
3177
3178  if (!result || !resultsame)
3179    {
3180      if (dump_file && (dump_flags & TDF_DETAILS))
3181	{
3182	  fprintf (dump_file, "No store match\n");
3183	  fprintf (dump_file, "Value numbering store ");
3184	  print_generic_expr (dump_file, lhs, 0);
3185	  fprintf (dump_file, " to ");
3186	  print_generic_expr (dump_file, op, 0);
3187	  fprintf (dump_file, "\n");
3188	}
3189      /* Have to set value numbers before insert, since insert is
3190	 going to valueize the references in-place.  */
3191      if (vdef)
3192	{
3193	  changed |= set_ssa_val_to (vdef, vdef);
3194	}
3195
3196      /* Do not insert structure copies into the tables.  */
3197      if (is_gimple_min_invariant (op)
3198	  || is_gimple_reg (op))
3199        vn_reference_insert (lhs, op, vdef, NULL);
3200
3201      /* Only perform the following when being called from PRE
3202	 which embeds tail merging.  */
3203      if (default_vn_walk_kind == VN_WALK)
3204	{
3205	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3206	  vn_reference_insert (assign, lhs, vuse, vdef);
3207	}
3208    }
3209  else
3210    {
3211      /* We had a match, so value number the vdef to have the value
3212	 number of the vuse it came from.  */
3213
3214      if (dump_file && (dump_flags & TDF_DETAILS))
3215	fprintf (dump_file, "Store matched earlier value,"
3216		 "value numbering store vdefs to matching vuses.\n");
3217
3218      changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3219    }
3220
3221  return changed;
3222}
3223
3224/* Visit and value number PHI, return true if the value number
3225   changed.  */
3226
3227static bool
3228visit_phi (gimple phi)
3229{
3230  bool changed = false;
3231  tree result;
3232  tree sameval = VN_TOP;
3233  bool allsame = true;
3234
3235  /* TODO: We could check for this in init_sccvn, and replace this
3236     with a gcc_assert.  */
3237  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3238    return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3239
3240  /* See if all non-TOP arguments have the same value.  TOP is
3241     equivalent to everything, so we can ignore it.  */
3242  edge_iterator ei;
3243  edge e;
3244  FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3245    if (e->flags & EDGE_EXECUTABLE)
3246      {
3247	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3248
3249	if (TREE_CODE (def) == SSA_NAME)
3250	  def = SSA_VAL (def);
3251	if (def == VN_TOP)
3252	  continue;
3253	if (sameval == VN_TOP)
3254	  {
3255	    sameval = def;
3256	  }
3257	else
3258	  {
3259	    if (!expressions_equal_p (def, sameval))
3260	      {
3261		allsame = false;
3262		break;
3263	      }
3264	  }
3265      }
3266
3267  /* If all value numbered to the same value, the phi node has that
3268     value.  */
3269  if (allsame)
3270    return set_ssa_val_to (PHI_RESULT (phi), sameval);
3271
3272  /* Otherwise, see if it is equivalent to a phi node in this block.  */
3273  result = vn_phi_lookup (phi);
3274  if (result)
3275    changed = set_ssa_val_to (PHI_RESULT (phi), result);
3276  else
3277    {
3278      vn_phi_insert (phi, PHI_RESULT (phi));
3279      VN_INFO (PHI_RESULT (phi))->has_constants = false;
3280      VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3281      changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3282    }
3283
3284  return changed;
3285}
3286
3287/* Return true if EXPR contains constants.  */
3288
3289static bool
3290expr_has_constants (tree expr)
3291{
3292  switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3293    {
3294    case tcc_unary:
3295      return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3296
3297    case tcc_binary:
3298      return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3299	|| is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3300      /* Constants inside reference ops are rarely interesting, but
3301	 it can take a lot of looking to find them.  */
3302    case tcc_reference:
3303    case tcc_declaration:
3304      return false;
3305    default:
3306      return is_gimple_min_invariant (expr);
3307    }
3308  return false;
3309}
3310
3311/* Return true if STMT contains constants.  */
3312
3313static bool
3314stmt_has_constants (gimple stmt)
3315{
3316  tree tem;
3317
3318  if (gimple_code (stmt) != GIMPLE_ASSIGN)
3319    return false;
3320
3321  switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3322    {
3323    case GIMPLE_TERNARY_RHS:
3324      tem = gimple_assign_rhs3 (stmt);
3325      if (TREE_CODE (tem) == SSA_NAME)
3326	tem = SSA_VAL (tem);
3327      if (is_gimple_min_invariant (tem))
3328	return true;
3329      /* Fallthru.  */
3330
3331    case GIMPLE_BINARY_RHS:
3332      tem = gimple_assign_rhs2 (stmt);
3333      if (TREE_CODE (tem) == SSA_NAME)
3334	tem = SSA_VAL (tem);
3335      if (is_gimple_min_invariant (tem))
3336	return true;
3337      /* Fallthru.  */
3338
3339    case GIMPLE_SINGLE_RHS:
3340      /* Constants inside reference ops are rarely interesting, but
3341	 it can take a lot of looking to find them.  */
3342    case GIMPLE_UNARY_RHS:
3343      tem = gimple_assign_rhs1 (stmt);
3344      if (TREE_CODE (tem) == SSA_NAME)
3345	tem = SSA_VAL (tem);
3346      return is_gimple_min_invariant (tem);
3347
3348    default:
3349      gcc_unreachable ();
3350    }
3351  return false;
3352}
3353
3354/* Simplify the binary expression RHS, and return the result if
3355   simplified. */
3356
3357static tree
3358simplify_binary_expression (gimple stmt)
3359{
3360  tree result = NULL_TREE;
3361  tree op0 = gimple_assign_rhs1 (stmt);
3362  tree op1 = gimple_assign_rhs2 (stmt);
3363  enum tree_code code = gimple_assign_rhs_code (stmt);
3364
3365  /* This will not catch every single case we could combine, but will
3366     catch those with constants.  The goal here is to simultaneously
3367     combine constants between expressions, but avoid infinite
3368     expansion of expressions during simplification.  */
3369  op0 = vn_valueize (op0);
3370  if (TREE_CODE (op0) == SSA_NAME
3371      && (VN_INFO (op0)->has_constants
3372	  || TREE_CODE_CLASS (code) == tcc_comparison
3373	  || code == COMPLEX_EXPR))
3374    op0 = vn_get_expr_for (op0);
3375
3376  op1 = vn_valueize (op1);
3377  if (TREE_CODE (op1) == SSA_NAME
3378      && (VN_INFO (op1)->has_constants
3379	  || code == COMPLEX_EXPR))
3380    op1 = vn_get_expr_for (op1);
3381
3382  /* Pointer plus constant can be represented as invariant address.
3383     Do so to allow further propatation, see also tree forwprop.  */
3384  if (code == POINTER_PLUS_EXPR
3385      && tree_fits_uhwi_p (op1)
3386      && TREE_CODE (op0) == ADDR_EXPR
3387      && is_gimple_min_invariant (op0))
3388    return build_invariant_address (TREE_TYPE (op0),
3389				    TREE_OPERAND (op0, 0),
3390				    tree_to_uhwi (op1));
3391
3392  /* Avoid folding if nothing changed.  */
3393  if (op0 == gimple_assign_rhs1 (stmt)
3394      && op1 == gimple_assign_rhs2 (stmt))
3395    return NULL_TREE;
3396
3397  fold_defer_overflow_warnings ();
3398
3399  result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3400  if (result)
3401    STRIP_USELESS_TYPE_CONVERSION (result);
3402
3403  fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3404				  stmt, 0);
3405
3406  /* Make sure result is not a complex expression consisting
3407     of operators of operators (IE (a + b) + (a + c))
3408     Otherwise, we will end up with unbounded expressions if
3409     fold does anything at all.  */
3410  if (result && valid_gimple_rhs_p (result))
3411    return result;
3412
3413  return NULL_TREE;
3414}
3415
3416/* Simplify the unary expression RHS, and return the result if
3417   simplified. */
3418
3419static tree
3420simplify_unary_expression (gassign *stmt)
3421{
3422  tree result = NULL_TREE;
3423  tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3424  enum tree_code code = gimple_assign_rhs_code (stmt);
3425
3426  /* We handle some tcc_reference codes here that are all
3427     GIMPLE_ASSIGN_SINGLE codes.  */
3428  if (code == REALPART_EXPR
3429      || code == IMAGPART_EXPR
3430      || code == VIEW_CONVERT_EXPR
3431      || code == BIT_FIELD_REF)
3432    op0 = TREE_OPERAND (op0, 0);
3433
3434  orig_op0 = op0;
3435  op0 = vn_valueize (op0);
3436  if (TREE_CODE (op0) == SSA_NAME)
3437    {
3438      if (VN_INFO (op0)->has_constants)
3439	op0 = vn_get_expr_for (op0);
3440      else if (CONVERT_EXPR_CODE_P (code)
3441	       || code == REALPART_EXPR
3442	       || code == IMAGPART_EXPR
3443	       || code == VIEW_CONVERT_EXPR
3444	       || code == BIT_FIELD_REF)
3445	{
3446	  /* We want to do tree-combining on conversion-like expressions.
3447	     Make sure we feed only SSA_NAMEs or constants to fold though.  */
3448	  tree tem = vn_get_expr_for (op0);
3449	  if (UNARY_CLASS_P (tem)
3450	      || BINARY_CLASS_P (tem)
3451	      || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3452	      || TREE_CODE (tem) == SSA_NAME
3453	      || TREE_CODE (tem) == CONSTRUCTOR
3454	      || is_gimple_min_invariant (tem))
3455	    op0 = tem;
3456	}
3457    }
3458
3459  /* Avoid folding if nothing changed, but remember the expression.  */
3460  if (op0 == orig_op0)
3461    return NULL_TREE;
3462
3463  if (code == BIT_FIELD_REF)
3464    {
3465      tree rhs = gimple_assign_rhs1 (stmt);
3466      result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3467			     op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3468    }
3469  else
3470    result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3471  if (result)
3472    {
3473      STRIP_USELESS_TYPE_CONVERSION (result);
3474      if (valid_gimple_rhs_p (result))
3475        return result;
3476    }
3477
3478  return NULL_TREE;
3479}
3480
3481/* Try to simplify RHS using equivalences and constant folding.  */
3482
3483static tree
3484try_to_simplify (gassign *stmt)
3485{
3486  enum tree_code code = gimple_assign_rhs_code (stmt);
3487  tree tem;
3488
3489  /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3490     in this case, there is no point in doing extra work.  */
3491  if (code == SSA_NAME)
3492    return NULL_TREE;
3493
3494  /* First try constant folding based on our current lattice.  */
3495  tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3496  if (tem
3497      && (TREE_CODE (tem) == SSA_NAME
3498	  || is_gimple_min_invariant (tem)))
3499    return tem;
3500
3501  /* If that didn't work try combining multiple statements.  */
3502  switch (TREE_CODE_CLASS (code))
3503    {
3504    case tcc_reference:
3505      /* Fallthrough for some unary codes that can operate on registers.  */
3506      if (!(code == REALPART_EXPR
3507	    || code == IMAGPART_EXPR
3508	    || code == VIEW_CONVERT_EXPR
3509	    || code == BIT_FIELD_REF))
3510	break;
3511      /* We could do a little more with unary ops, if they expand
3512	 into binary ops, but it's debatable whether it is worth it. */
3513    case tcc_unary:
3514      return simplify_unary_expression (stmt);
3515
3516    case tcc_comparison:
3517    case tcc_binary:
3518      return simplify_binary_expression (stmt);
3519
3520    default:
3521      break;
3522    }
3523
3524  return NULL_TREE;
3525}
3526
3527/* Visit and value number USE, return true if the value number
3528   changed. */
3529
3530static bool
3531visit_use (tree use)
3532{
3533  bool changed = false;
3534  gimple stmt = SSA_NAME_DEF_STMT (use);
3535
3536  mark_use_processed (use);
3537
3538  gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3539  if (dump_file && (dump_flags & TDF_DETAILS)
3540      && !SSA_NAME_IS_DEFAULT_DEF (use))
3541    {
3542      fprintf (dump_file, "Value numbering ");
3543      print_generic_expr (dump_file, use, 0);
3544      fprintf (dump_file, " stmt = ");
3545      print_gimple_stmt (dump_file, stmt, 0, 0);
3546    }
3547
3548  /* Handle uninitialized uses.  */
3549  if (SSA_NAME_IS_DEFAULT_DEF (use))
3550    changed = set_ssa_val_to (use, use);
3551  else
3552    {
3553      if (gimple_code (stmt) == GIMPLE_PHI)
3554	changed = visit_phi (stmt);
3555      else if (gimple_has_volatile_ops (stmt))
3556	changed = defs_to_varying (stmt);
3557      else if (is_gimple_assign (stmt))
3558	{
3559	  enum tree_code code = gimple_assign_rhs_code (stmt);
3560	  tree lhs = gimple_assign_lhs (stmt);
3561	  tree rhs1 = gimple_assign_rhs1 (stmt);
3562	  tree simplified;
3563
3564	  /* Shortcut for copies. Simplifying copies is pointless,
3565	     since we copy the expression and value they represent.  */
3566	  if (code == SSA_NAME
3567	      && TREE_CODE (lhs) == SSA_NAME)
3568	    {
3569	      changed = visit_copy (lhs, rhs1);
3570	      goto done;
3571	    }
3572	  simplified = try_to_simplify (as_a <gassign *> (stmt));
3573	  if (simplified)
3574	    {
3575	      if (dump_file && (dump_flags & TDF_DETAILS))
3576		{
3577		  fprintf (dump_file, "RHS ");
3578		  print_gimple_expr (dump_file, stmt, 0, 0);
3579		  fprintf (dump_file, " simplified to ");
3580		  print_generic_expr (dump_file, simplified, 0);
3581		  if (TREE_CODE (lhs) == SSA_NAME)
3582		    fprintf (dump_file, " has constants %d\n",
3583			     expr_has_constants (simplified));
3584		  else
3585		    fprintf (dump_file, "\n");
3586		}
3587	    }
3588	  /* Setting value numbers to constants will occasionally
3589	     screw up phi congruence because constants are not
3590	     uniquely associated with a single ssa name that can be
3591	     looked up.  */
3592	  if (simplified
3593	      && is_gimple_min_invariant (simplified)
3594	      && TREE_CODE (lhs) == SSA_NAME)
3595	    {
3596	      VN_INFO (lhs)->expr = simplified;
3597	      VN_INFO (lhs)->has_constants = true;
3598	      changed = set_ssa_val_to (lhs, simplified);
3599	      goto done;
3600	    }
3601	  else if (simplified
3602		   && TREE_CODE (simplified) == SSA_NAME
3603		   && TREE_CODE (lhs) == SSA_NAME)
3604	    {
3605	      changed = visit_copy (lhs, simplified);
3606	      goto done;
3607	    }
3608	  else if (simplified)
3609	    {
3610	      if (TREE_CODE (lhs) == SSA_NAME)
3611		{
3612		  VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3613		  /* We have to unshare the expression or else
3614		     valuizing may change the IL stream.  */
3615		  VN_INFO (lhs)->expr = unshare_expr (simplified);
3616		}
3617	    }
3618	  else if (stmt_has_constants (stmt)
3619		   && TREE_CODE (lhs) == SSA_NAME)
3620	    VN_INFO (lhs)->has_constants = true;
3621	  else if (TREE_CODE (lhs) == SSA_NAME)
3622	    {
3623	      /* We reset expr and constantness here because we may
3624		 have been value numbering optimistically, and
3625		 iterating. They may become non-constant in this case,
3626		 even if they were optimistically constant. */
3627
3628	      VN_INFO (lhs)->has_constants = false;
3629	      VN_INFO (lhs)->expr = NULL_TREE;
3630	    }
3631
3632	  if ((TREE_CODE (lhs) == SSA_NAME
3633	       /* We can substitute SSA_NAMEs that are live over
3634		  abnormal edges with their constant value.  */
3635	       && !(gimple_assign_copy_p (stmt)
3636		    && is_gimple_min_invariant (rhs1))
3637	       && !(simplified
3638		    && is_gimple_min_invariant (simplified))
3639	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3640	      /* Stores or copies from SSA_NAMEs that are live over
3641		 abnormal edges are a problem.  */
3642	      || (code == SSA_NAME
3643		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3644	    changed = defs_to_varying (stmt);
3645	  else if (REFERENCE_CLASS_P (lhs)
3646		   || DECL_P (lhs))
3647	    changed = visit_reference_op_store (lhs, rhs1, stmt);
3648	  else if (TREE_CODE (lhs) == SSA_NAME)
3649	    {
3650	      if ((gimple_assign_copy_p (stmt)
3651		   && is_gimple_min_invariant (rhs1))
3652		  || (simplified
3653		      && is_gimple_min_invariant (simplified)))
3654		{
3655		  VN_INFO (lhs)->has_constants = true;
3656		  if (simplified)
3657		    changed = set_ssa_val_to (lhs, simplified);
3658		  else
3659		    changed = set_ssa_val_to (lhs, rhs1);
3660		}
3661	      else
3662		{
3663		  /* First try to lookup the simplified expression.  */
3664		  if (simplified)
3665		    {
3666		      enum gimple_rhs_class rhs_class;
3667
3668
3669		      rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3670		      if ((rhs_class == GIMPLE_UNARY_RHS
3671			   || rhs_class == GIMPLE_BINARY_RHS
3672			   || rhs_class == GIMPLE_TERNARY_RHS)
3673			  && valid_gimple_rhs_p (simplified))
3674			{
3675			  tree result = vn_nary_op_lookup (simplified, NULL);
3676			  if (result)
3677			    {
3678			      changed = set_ssa_val_to (lhs, result);
3679			      goto done;
3680			    }
3681			}
3682		    }
3683
3684		  /* Otherwise visit the original statement.  */
3685		  switch (vn_get_stmt_kind (stmt))
3686		    {
3687		    case VN_NARY:
3688		      changed = visit_nary_op (lhs, stmt);
3689		      break;
3690		    case VN_REFERENCE:
3691		      changed = visit_reference_op_load (lhs, rhs1, stmt);
3692		      break;
3693		    default:
3694		      changed = defs_to_varying (stmt);
3695		      break;
3696		    }
3697		}
3698	    }
3699	  else
3700	    changed = defs_to_varying (stmt);
3701	}
3702      else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3703	{
3704	  tree lhs = gimple_call_lhs (stmt);
3705	  if (lhs && TREE_CODE (lhs) == SSA_NAME)
3706	    {
3707	      /* Try constant folding based on our current lattice.  */
3708	      tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3709								vn_valueize);
3710	      if (simplified)
3711		{
3712		  if (dump_file && (dump_flags & TDF_DETAILS))
3713		    {
3714		      fprintf (dump_file, "call ");
3715		      print_gimple_expr (dump_file, stmt, 0, 0);
3716		      fprintf (dump_file, " simplified to ");
3717		      print_generic_expr (dump_file, simplified, 0);
3718		      if (TREE_CODE (lhs) == SSA_NAME)
3719			fprintf (dump_file, " has constants %d\n",
3720				 expr_has_constants (simplified));
3721		      else
3722			fprintf (dump_file, "\n");
3723		    }
3724		}
3725	      /* Setting value numbers to constants will occasionally
3726		 screw up phi congruence because constants are not
3727		 uniquely associated with a single ssa name that can be
3728		 looked up.  */
3729	      if (simplified
3730		  && is_gimple_min_invariant (simplified))
3731		{
3732		  VN_INFO (lhs)->expr = simplified;
3733		  VN_INFO (lhs)->has_constants = true;
3734		  changed = set_ssa_val_to (lhs, simplified);
3735		  if (gimple_vdef (stmt))
3736		    changed |= set_ssa_val_to (gimple_vdef (stmt),
3737					       SSA_VAL (gimple_vuse (stmt)));
3738		  goto done;
3739		}
3740	      else if (simplified
3741		       && TREE_CODE (simplified) == SSA_NAME)
3742		{
3743		  changed = visit_copy (lhs, simplified);
3744		  if (gimple_vdef (stmt))
3745		    changed |= set_ssa_val_to (gimple_vdef (stmt),
3746					       SSA_VAL (gimple_vuse (stmt)));
3747		  goto done;
3748		}
3749	      else
3750		{
3751		  if (stmt_has_constants (stmt))
3752		    VN_INFO (lhs)->has_constants = true;
3753		  else
3754		    {
3755		      /* We reset expr and constantness here because we may
3756			 have been value numbering optimistically, and
3757			 iterating.  They may become non-constant in this case,
3758			 even if they were optimistically constant.  */
3759		      VN_INFO (lhs)->has_constants = false;
3760		      VN_INFO (lhs)->expr = NULL_TREE;
3761		    }
3762
3763		  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3764		    {
3765		      changed = defs_to_varying (stmt);
3766		      goto done;
3767		    }
3768		}
3769	    }
3770
3771	  if (!gimple_call_internal_p (stmt)
3772	      && (/* Calls to the same function with the same vuse
3773		     and the same operands do not necessarily return the same
3774		     value, unless they're pure or const.  */
3775		  gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3776		  /* If calls have a vdef, subsequent calls won't have
3777		     the same incoming vuse.  So, if 2 calls with vdef have the
3778		     same vuse, we know they're not subsequent.
3779		     We can value number 2 calls to the same function with the
3780		     same vuse and the same operands which are not subsequent
3781		     the same, because there is no code in the program that can
3782		     compare the 2 values...  */
3783		  || (gimple_vdef (stmt)
3784		      /* ... unless the call returns a pointer which does
3785		         not alias with anything else.  In which case the
3786			 information that the values are distinct are encoded
3787			 in the IL.  */
3788		      && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3789		      /* Only perform the following when being called from PRE
3790			 which embeds tail merging.  */
3791		      && default_vn_walk_kind == VN_WALK)))
3792	    changed = visit_reference_op_call (lhs, call_stmt);
3793	  else
3794	    changed = defs_to_varying (stmt);
3795	}
3796      else
3797	changed = defs_to_varying (stmt);
3798    }
3799 done:
3800  return changed;
3801}
3802
3803/* Compare two operands by reverse postorder index */
3804
3805static int
3806compare_ops (const void *pa, const void *pb)
3807{
3808  const tree opa = *((const tree *)pa);
3809  const tree opb = *((const tree *)pb);
3810  gimple opstmta = SSA_NAME_DEF_STMT (opa);
3811  gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3812  basic_block bba;
3813  basic_block bbb;
3814
3815  if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3816    return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3817  else if (gimple_nop_p (opstmta))
3818    return -1;
3819  else if (gimple_nop_p (opstmtb))
3820    return 1;
3821
3822  bba = gimple_bb (opstmta);
3823  bbb = gimple_bb (opstmtb);
3824
3825  if (!bba && !bbb)
3826    return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3827  else if (!bba)
3828    return -1;
3829  else if (!bbb)
3830    return 1;
3831
3832  if (bba == bbb)
3833    {
3834      if (gimple_code (opstmta) == GIMPLE_PHI
3835	  && gimple_code (opstmtb) == GIMPLE_PHI)
3836	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3837      else if (gimple_code (opstmta) == GIMPLE_PHI)
3838	return -1;
3839      else if (gimple_code (opstmtb) == GIMPLE_PHI)
3840	return 1;
3841      else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3842        return gimple_uid (opstmta) - gimple_uid (opstmtb);
3843      else
3844	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3845    }
3846  return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3847}
3848
3849/* Sort an array containing members of a strongly connected component
3850   SCC so that the members are ordered by RPO number.
3851   This means that when the sort is complete, iterating through the
3852   array will give you the members in RPO order.  */
3853
3854static void
3855sort_scc (vec<tree> scc)
3856{
3857  scc.qsort (compare_ops);
3858}
3859
3860/* Insert the no longer used nary ONARY to the hash INFO.  */
3861
3862static void
3863copy_nary (vn_nary_op_t onary, vn_tables_t info)
3864{
3865  size_t size = sizeof_vn_nary_op (onary->length);
3866  vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3867					       &info->nary_obstack);
3868  memcpy (nary, onary, size);
3869  vn_nary_op_insert_into (nary, info->nary, false);
3870}
3871
3872/* Insert the no longer used phi OPHI to the hash INFO.  */
3873
3874static void
3875copy_phi (vn_phi_t ophi, vn_tables_t info)
3876{
3877  vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3878  vn_phi_s **slot;
3879  memcpy (phi, ophi, sizeof (*phi));
3880  ophi->phiargs.create (0);
3881  slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3882  gcc_assert (!*slot);
3883  *slot = phi;
3884}
3885
3886/* Insert the no longer used reference OREF to the hash INFO.  */
3887
3888static void
3889copy_reference (vn_reference_t oref, vn_tables_t info)
3890{
3891  vn_reference_t ref;
3892  vn_reference_s **slot;
3893  ref = (vn_reference_t) pool_alloc (info->references_pool);
3894  memcpy (ref, oref, sizeof (*ref));
3895  oref->operands.create (0);
3896  slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3897  if (*slot)
3898    free_reference (*slot);
3899  *slot = ref;
3900}
3901
3902/* Process a strongly connected component in the SSA graph.  */
3903
3904static void
3905process_scc (vec<tree> scc)
3906{
3907  tree var;
3908  unsigned int i;
3909  unsigned int iterations = 0;
3910  bool changed = true;
3911  vn_nary_op_iterator_type hin;
3912  vn_phi_iterator_type hip;
3913  vn_reference_iterator_type hir;
3914  vn_nary_op_t nary;
3915  vn_phi_t phi;
3916  vn_reference_t ref;
3917
3918  /* If the SCC has a single member, just visit it.  */
3919  if (scc.length () == 1)
3920    {
3921      tree use = scc[0];
3922      if (VN_INFO (use)->use_processed)
3923	return;
3924      /* We need to make sure it doesn't form a cycle itself, which can
3925	 happen for self-referential PHI nodes.  In that case we would
3926	 end up inserting an expression with VN_TOP operands into the
3927	 valid table which makes us derive bogus equivalences later.
3928	 The cheapest way to check this is to assume it for all PHI nodes.  */
3929      if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3930	/* Fallthru to iteration.  */ ;
3931      else
3932	{
3933	  visit_use (use);
3934	  return;
3935	}
3936    }
3937
3938  if (dump_file && (dump_flags & TDF_DETAILS))
3939    print_scc (dump_file, scc);
3940
3941  /* Iterate over the SCC with the optimistic table until it stops
3942     changing.  */
3943  current_info = optimistic_info;
3944  while (changed)
3945    {
3946      changed = false;
3947      iterations++;
3948      if (dump_file && (dump_flags & TDF_DETAILS))
3949	fprintf (dump_file, "Starting iteration %d\n", iterations);
3950      /* As we are value-numbering optimistically we have to
3951	 clear the expression tables and the simplified expressions
3952	 in each iteration until we converge.  */
3953      optimistic_info->nary->empty ();
3954      optimistic_info->phis->empty ();
3955      optimistic_info->references->empty ();
3956      obstack_free (&optimistic_info->nary_obstack, NULL);
3957      gcc_obstack_init (&optimistic_info->nary_obstack);
3958      empty_alloc_pool (optimistic_info->phis_pool);
3959      empty_alloc_pool (optimistic_info->references_pool);
3960      FOR_EACH_VEC_ELT (scc, i, var)
3961	VN_INFO (var)->expr = NULL_TREE;
3962      FOR_EACH_VEC_ELT (scc, i, var)
3963	changed |= visit_use (var);
3964    }
3965
3966  if (dump_file && (dump_flags & TDF_DETAILS))
3967    fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3968  statistics_histogram_event (cfun, "SCC iterations", iterations);
3969
3970  /* Finally, copy the contents of the no longer used optimistic
3971     table to the valid table.  */
3972  FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3973    copy_nary (nary, valid_info);
3974  FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3975    copy_phi (phi, valid_info);
3976  FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3977			       ref, vn_reference_t, hir)
3978    copy_reference (ref, valid_info);
3979
3980  current_info = valid_info;
3981}
3982
3983
3984/* Pop the components of the found SCC for NAME off the SCC stack
3985   and process them.  Returns true if all went well, false if
3986   we run into resource limits.  */
3987
3988static bool
3989extract_and_process_scc_for_name (tree name)
3990{
3991  auto_vec<tree> scc;
3992  tree x;
3993
3994  /* Found an SCC, pop the components off the SCC stack and
3995     process them.  */
3996  do
3997    {
3998      x = sccstack.pop ();
3999
4000      VN_INFO (x)->on_sccstack = false;
4001      scc.safe_push (x);
4002    } while (x != name);
4003
4004  /* Bail out of SCCVN in case a SCC turns out to be incredibly large.  */
4005  if (scc.length ()
4006      > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4007    {
4008      if (dump_file)
4009	fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4010		 "SCC size %u exceeding %u\n", scc.length (),
4011		 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4012
4013      return false;
4014    }
4015
4016  if (scc.length () > 1)
4017    sort_scc (scc);
4018
4019  process_scc (scc);
4020
4021  return true;
4022}
4023
4024/* Depth first search on NAME to discover and process SCC's in the SSA
4025   graph.
4026   Execution of this algorithm relies on the fact that the SCC's are
4027   popped off the stack in topological order.
4028   Returns true if successful, false if we stopped processing SCC's due
4029   to resource constraints.  */
4030
4031static bool
4032DFS (tree name)
4033{
4034  vec<ssa_op_iter> itervec = vNULL;
4035  vec<tree> namevec = vNULL;
4036  use_operand_p usep = NULL;
4037  gimple defstmt;
4038  tree use;
4039  ssa_op_iter iter;
4040
4041start_over:
4042  /* SCC info */
4043  VN_INFO (name)->dfsnum = next_dfs_num++;
4044  VN_INFO (name)->visited = true;
4045  VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4046
4047  sccstack.safe_push (name);
4048  VN_INFO (name)->on_sccstack = true;
4049  defstmt = SSA_NAME_DEF_STMT (name);
4050
4051  /* Recursively DFS on our operands, looking for SCC's.  */
4052  if (!gimple_nop_p (defstmt))
4053    {
4054      /* Push a new iterator.  */
4055      if (gphi *phi = dyn_cast <gphi *> (defstmt))
4056	usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4057      else
4058	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4059    }
4060  else
4061    clear_and_done_ssa_iter (&iter);
4062
4063  while (1)
4064    {
4065      /* If we are done processing uses of a name, go up the stack
4066	 of iterators and process SCCs as we found them.  */
4067      if (op_iter_done (&iter))
4068	{
4069	  /* See if we found an SCC.  */
4070	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4071	    if (!extract_and_process_scc_for_name (name))
4072	      {
4073		namevec.release ();
4074		itervec.release ();
4075		return false;
4076	      }
4077
4078	  /* Check if we are done.  */
4079	  if (namevec.is_empty ())
4080	    {
4081	      namevec.release ();
4082	      itervec.release ();
4083	      return true;
4084	    }
4085
4086	  /* Restore the last use walker and continue walking there.  */
4087	  use = name;
4088	  name = namevec.pop ();
4089	  memcpy (&iter, &itervec.last (),
4090		  sizeof (ssa_op_iter));
4091	  itervec.pop ();
4092	  goto continue_walking;
4093	}
4094
4095      use = USE_FROM_PTR (usep);
4096
4097      /* Since we handle phi nodes, we will sometimes get
4098	 invariants in the use expression.  */
4099      if (TREE_CODE (use) == SSA_NAME)
4100	{
4101	  if (! (VN_INFO (use)->visited))
4102	    {
4103	      /* Recurse by pushing the current use walking state on
4104		 the stack and starting over.  */
4105	      itervec.safe_push (iter);
4106	      namevec.safe_push (name);
4107	      name = use;
4108	      goto start_over;
4109
4110continue_walking:
4111	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4112					 VN_INFO (use)->low);
4113	    }
4114	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4115	      && VN_INFO (use)->on_sccstack)
4116	    {
4117	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4118					 VN_INFO (name)->low);
4119	    }
4120	}
4121
4122      usep = op_iter_next_use (&iter);
4123    }
4124}
4125
4126/* Allocate a value number table.  */
4127
4128static void
4129allocate_vn_table (vn_tables_t table)
4130{
4131  table->phis = new vn_phi_table_type (23);
4132  table->nary = new vn_nary_op_table_type (23);
4133  table->references = new vn_reference_table_type (23);
4134
4135  gcc_obstack_init (&table->nary_obstack);
4136  table->phis_pool = create_alloc_pool ("VN phis",
4137					sizeof (struct vn_phi_s),
4138					30);
4139  table->references_pool = create_alloc_pool ("VN references",
4140					      sizeof (struct vn_reference_s),
4141					      30);
4142}
4143
4144/* Free a value number table.  */
4145
4146static void
4147free_vn_table (vn_tables_t table)
4148{
4149  delete table->phis;
4150  table->phis = NULL;
4151  delete table->nary;
4152  table->nary = NULL;
4153  delete table->references;
4154  table->references = NULL;
4155  obstack_free (&table->nary_obstack, NULL);
4156  free_alloc_pool (table->phis_pool);
4157  free_alloc_pool (table->references_pool);
4158}
4159
4160static void
4161init_scc_vn (void)
4162{
4163  size_t i;
4164  int j;
4165  int *rpo_numbers_temp;
4166
4167  calculate_dominance_info (CDI_DOMINATORS);
4168  sccstack.create (0);
4169  constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4170
4171  constant_value_ids = BITMAP_ALLOC (NULL);
4172
4173  next_dfs_num = 1;
4174  next_value_id = 1;
4175
4176  vn_ssa_aux_table.create (num_ssa_names + 1);
4177  /* VEC_alloc doesn't actually grow it to the right size, it just
4178     preallocates the space to do so.  */
4179  vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4180  gcc_obstack_init (&vn_ssa_aux_obstack);
4181
4182  shared_lookup_phiargs.create (0);
4183  shared_lookup_references.create (0);
4184  rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4185  rpo_numbers_temp =
4186    XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4187  pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4188
4189  /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4190     the i'th block in RPO order is bb.  We want to map bb's to RPO
4191     numbers, so we need to rearrange this array.  */
4192  for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4193    rpo_numbers[rpo_numbers_temp[j]] = j;
4194
4195  XDELETE (rpo_numbers_temp);
4196
4197  VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4198
4199  /* Create the VN_INFO structures, and initialize value numbers to
4200     TOP.  */
4201  for (i = 0; i < num_ssa_names; i++)
4202    {
4203      tree name = ssa_name (i);
4204      if (name)
4205	{
4206	  VN_INFO_GET (name)->valnum = VN_TOP;
4207	  VN_INFO (name)->expr = NULL_TREE;
4208	  VN_INFO (name)->value_id = 0;
4209	}
4210    }
4211
4212  renumber_gimple_stmt_uids ();
4213
4214  /* Create the valid and optimistic value numbering tables.  */
4215  valid_info = XCNEW (struct vn_tables_s);
4216  allocate_vn_table (valid_info);
4217  optimistic_info = XCNEW (struct vn_tables_s);
4218  allocate_vn_table (optimistic_info);
4219}
4220
4221void
4222free_scc_vn (void)
4223{
4224  size_t i;
4225
4226  delete constant_to_value_id;
4227  constant_to_value_id = NULL;
4228  BITMAP_FREE (constant_value_ids);
4229  shared_lookup_phiargs.release ();
4230  shared_lookup_references.release ();
4231  XDELETEVEC (rpo_numbers);
4232
4233  for (i = 0; i < num_ssa_names; i++)
4234    {
4235      tree name = ssa_name (i);
4236      if (name
4237	  && VN_INFO (name)->needs_insertion)
4238	release_ssa_name (name);
4239    }
4240  obstack_free (&vn_ssa_aux_obstack, NULL);
4241  vn_ssa_aux_table.release ();
4242
4243  sccstack.release ();
4244  free_vn_table (valid_info);
4245  XDELETE (valid_info);
4246  free_vn_table (optimistic_info);
4247  XDELETE (optimistic_info);
4248}
4249
4250/* Set *ID according to RESULT.  */
4251
4252static void
4253set_value_id_for_result (tree result, unsigned int *id)
4254{
4255  if (result && TREE_CODE (result) == SSA_NAME)
4256    *id = VN_INFO (result)->value_id;
4257  else if (result && is_gimple_min_invariant (result))
4258    *id = get_or_alloc_constant_value_id (result);
4259  else
4260    *id = get_next_value_id ();
4261}
4262
4263/* Set the value ids in the valid hash tables.  */
4264
4265static void
4266set_hashtable_value_ids (void)
4267{
4268  vn_nary_op_iterator_type hin;
4269  vn_phi_iterator_type hip;
4270  vn_reference_iterator_type hir;
4271  vn_nary_op_t vno;
4272  vn_reference_t vr;
4273  vn_phi_t vp;
4274
4275  /* Now set the value ids of the things we had put in the hash
4276     table.  */
4277
4278  FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4279    set_value_id_for_result (vno->result, &vno->value_id);
4280
4281  FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4282    set_value_id_for_result (vp->result, &vp->value_id);
4283
4284  FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4285			       hir)
4286    set_value_id_for_result (vr->result, &vr->value_id);
4287}
4288
4289class cond_dom_walker : public dom_walker
4290{
4291public:
4292  cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4293
4294  virtual void before_dom_children (basic_block);
4295
4296  bool fail;
4297};
4298
4299void
4300cond_dom_walker::before_dom_children (basic_block bb)
4301{
4302  edge e;
4303  edge_iterator ei;
4304
4305  if (fail)
4306    return;
4307
4308  /* If any of the predecessor edges that do not come from blocks dominated
4309     by us are still marked as possibly executable consider this block
4310     reachable.  */
4311  bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4312  FOR_EACH_EDGE (e, ei, bb->preds)
4313    if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4314      reachable |= (e->flags & EDGE_EXECUTABLE);
4315
4316  /* If the block is not reachable all outgoing edges are not
4317     executable.  */
4318  if (!reachable)
4319    {
4320      if (dump_file && (dump_flags & TDF_DETAILS))
4321	fprintf (dump_file, "Marking all outgoing edges of unreachable "
4322		 "BB %d as not executable\n", bb->index);
4323
4324      FOR_EACH_EDGE (e, ei, bb->succs)
4325	e->flags &= ~EDGE_EXECUTABLE;
4326      return;
4327    }
4328
4329  gimple stmt = last_stmt (bb);
4330  if (!stmt)
4331    return;
4332
4333  enum gimple_code code = gimple_code (stmt);
4334  if (code != GIMPLE_COND
4335      && code != GIMPLE_SWITCH
4336      && code != GIMPLE_GOTO)
4337    return;
4338
4339  if (dump_file && (dump_flags & TDF_DETAILS))
4340    {
4341      fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4342	       bb->index);
4343      print_gimple_stmt (dump_file, stmt, 0, 0);
4344    }
4345
4346  /* Value-number the last stmts SSA uses.  */
4347  ssa_op_iter i;
4348  tree op;
4349  FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4350    if (VN_INFO (op)->visited == false
4351	&& !DFS (op))
4352      {
4353	fail = true;
4354	return;
4355      }
4356
4357  /* ???  We can even handle stmts with outgoing EH or ABNORMAL edges
4358     if value-numbering can prove they are not reachable.  Handling
4359     computed gotos is also possible.  */
4360  tree val;
4361  switch (code)
4362    {
4363    case GIMPLE_COND:
4364      {
4365	tree lhs = gimple_cond_lhs (stmt);
4366	tree rhs = gimple_cond_rhs (stmt);
4367	/* Work hard in computing the condition and take into account
4368	   the valueization of the defining stmt.  */
4369	if (TREE_CODE (lhs) == SSA_NAME)
4370	  lhs = vn_get_expr_for (lhs);
4371	if (TREE_CODE (rhs) == SSA_NAME)
4372	  rhs = vn_get_expr_for (rhs);
4373	val = fold_binary (gimple_cond_code (stmt),
4374			   boolean_type_node, lhs, rhs);
4375	break;
4376      }
4377    case GIMPLE_SWITCH:
4378      val = gimple_switch_index (as_a <gswitch *> (stmt));
4379      break;
4380    case GIMPLE_GOTO:
4381      val = gimple_goto_dest (stmt);
4382      break;
4383    default:
4384      gcc_unreachable ();
4385    }
4386  if (!val)
4387    return;
4388
4389  edge taken = find_taken_edge (bb, vn_valueize (val));
4390  if (!taken)
4391    return;
4392
4393  if (dump_file && (dump_flags & TDF_DETAILS))
4394    fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4395	     "not executable\n", bb->index, bb->index, taken->dest->index);
4396
4397  FOR_EACH_EDGE (e, ei, bb->succs)
4398    if (e != taken)
4399      e->flags &= ~EDGE_EXECUTABLE;
4400}
4401
4402/* Do SCCVN.  Returns true if it finished, false if we bailed out
4403   due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
4404   how we use the alias oracle walking during the VN process.  */
4405
4406bool
4407run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4408{
4409  basic_block bb;
4410  size_t i;
4411  tree param;
4412
4413  default_vn_walk_kind = default_vn_walk_kind_;
4414
4415  init_scc_vn ();
4416  current_info = valid_info;
4417
4418  for (param = DECL_ARGUMENTS (current_function_decl);
4419       param;
4420       param = DECL_CHAIN (param))
4421    {
4422      tree def = ssa_default_def (cfun, param);
4423      if (def)
4424	{
4425	  VN_INFO (def)->visited = true;
4426	  VN_INFO (def)->valnum = def;
4427	}
4428    }
4429
4430  /* Mark all edges as possibly executable.  */
4431  FOR_ALL_BB_FN (bb, cfun)
4432    {
4433      edge_iterator ei;
4434      edge e;
4435      FOR_EACH_EDGE (e, ei, bb->succs)
4436	e->flags |= EDGE_EXECUTABLE;
4437    }
4438
4439  /* Walk all blocks in dominator order, value-numbering the last stmts
4440     SSA uses and decide whether outgoing edges are not executable.  */
4441  cond_dom_walker walker;
4442  walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4443  if (walker.fail)
4444    {
4445      free_scc_vn ();
4446      return false;
4447    }
4448
4449  /* Value-number remaining SSA names.  */
4450  for (i = 1; i < num_ssa_names; ++i)
4451    {
4452      tree name = ssa_name (i);
4453      if (name
4454	  && VN_INFO (name)->visited == false
4455	  && !has_zero_uses (name))
4456	if (!DFS (name))
4457	  {
4458	    free_scc_vn ();
4459	    return false;
4460	  }
4461    }
4462
4463  /* Initialize the value ids.  */
4464
4465  for (i = 1; i < num_ssa_names; ++i)
4466    {
4467      tree name = ssa_name (i);
4468      vn_ssa_aux_t info;
4469      if (!name)
4470	continue;
4471      info = VN_INFO (name);
4472      if (info->valnum == name
4473	  || info->valnum == VN_TOP)
4474	info->value_id = get_next_value_id ();
4475      else if (is_gimple_min_invariant (info->valnum))
4476	info->value_id = get_or_alloc_constant_value_id (info->valnum);
4477    }
4478
4479  /* Propagate.  */
4480  for (i = 1; i < num_ssa_names; ++i)
4481    {
4482      tree name = ssa_name (i);
4483      vn_ssa_aux_t info;
4484      if (!name)
4485	continue;
4486      info = VN_INFO (name);
4487      if (TREE_CODE (info->valnum) == SSA_NAME
4488	  && info->valnum != name
4489	  && info->value_id != VN_INFO (info->valnum)->value_id)
4490	info->value_id = VN_INFO (info->valnum)->value_id;
4491    }
4492
4493  set_hashtable_value_ids ();
4494
4495  if (dump_file && (dump_flags & TDF_DETAILS))
4496    {
4497      fprintf (dump_file, "Value numbers:\n");
4498      for (i = 0; i < num_ssa_names; i++)
4499	{
4500	  tree name = ssa_name (i);
4501	  if (name
4502	      && VN_INFO (name)->visited
4503	      && SSA_VAL (name) != name)
4504	    {
4505	      print_generic_expr (dump_file, name, 0);
4506	      fprintf (dump_file, " = ");
4507	      print_generic_expr (dump_file, SSA_VAL (name), 0);
4508	      fprintf (dump_file, "\n");
4509	    }
4510	}
4511    }
4512
4513  return true;
4514}
4515
4516/* Return the maximum value id we have ever seen.  */
4517
4518unsigned int
4519get_max_value_id (void)
4520{
4521  return next_value_id;
4522}
4523
4524/* Return the next unique value id.  */
4525
4526unsigned int
4527get_next_value_id (void)
4528{
4529  return next_value_id++;
4530}
4531
4532
4533/* Compare two expressions E1 and E2 and return true if they are equal.  */
4534
4535bool
4536expressions_equal_p (tree e1, tree e2)
4537{
4538  /* The obvious case.  */
4539  if (e1 == e2)
4540    return true;
4541
4542  /* If only one of them is null, they cannot be equal.  */
4543  if (!e1 || !e2)
4544    return false;
4545
4546  /* Now perform the actual comparison.  */
4547  if (TREE_CODE (e1) == TREE_CODE (e2)
4548      && operand_equal_p (e1, e2, OEP_PURE_SAME))
4549    return true;
4550
4551  return false;
4552}
4553
4554
4555/* Return true if the nary operation NARY may trap.  This is a copy
4556   of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
4557
4558bool
4559vn_nary_may_trap (vn_nary_op_t nary)
4560{
4561  tree type;
4562  tree rhs2 = NULL_TREE;
4563  bool honor_nans = false;
4564  bool honor_snans = false;
4565  bool fp_operation = false;
4566  bool honor_trapv = false;
4567  bool handled, ret;
4568  unsigned i;
4569
4570  if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4571      || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4572      || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4573    {
4574      type = nary->type;
4575      fp_operation = FLOAT_TYPE_P (type);
4576      if (fp_operation)
4577	{
4578	  honor_nans = flag_trapping_math && !flag_finite_math_only;
4579	  honor_snans = flag_signaling_nans != 0;
4580	}
4581      else if (INTEGRAL_TYPE_P (type)
4582	       && TYPE_OVERFLOW_TRAPS (type))
4583	honor_trapv = true;
4584    }
4585  if (nary->length >= 2)
4586    rhs2 = nary->op[1];
4587  ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4588				       honor_trapv,
4589				       honor_nans, honor_snans, rhs2,
4590				       &handled);
4591  if (handled
4592      && ret)
4593    return true;
4594
4595  for (i = 0; i < nary->length; ++i)
4596    if (tree_could_trap_p (nary->op[i]))
4597      return true;
4598
4599  return false;
4600}
4601