1/* Alias analysis for trees.
2   Copyright (C) 2004-2015 Free Software Foundation, Inc.
3   Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "hash-set.h"
26#include "machmode.h"
27#include "vec.h"
28#include "double-int.h"
29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"
35#include "fold-const.h"
36#include "tm_p.h"
37#include "target.h"
38#include "predict.h"
39
40#include "hard-reg-set.h"
41#include "function.h"
42#include "dominance.h"
43#include "basic-block.h"
44#include "timevar.h"	/* for TV_ALIAS_STMT_WALK */
45#include "langhooks.h"
46#include "flags.h"
47#include "tree-pretty-print.h"
48#include "dumpfile.h"
49#include "tree-ssa-alias.h"
50#include "internal-fn.h"
51#include "tree-eh.h"
52#include "gimple-expr.h"
53#include "is-a.h"
54#include "gimple.h"
55#include "gimple-ssa.h"
56#include "stringpool.h"
57#include "tree-ssanames.h"
58#include "hashtab.h"
59#include "rtl.h"
60#include "statistics.h"
61#include "real.h"
62#include "fixed-value.h"
63#include "insn-config.h"
64#include "expmed.h"
65#include "dojump.h"
66#include "explow.h"
67#include "calls.h"
68#include "emit-rtl.h"
69#include "varasm.h"
70#include "stmt.h"
71#include "expr.h"
72#include "tree-dfa.h"
73#include "tree-inline.h"
74#include "params.h"
75#include "alloc-pool.h"
76#include "bitmap.h"
77#include "hash-map.h"
78#include "plugin-api.h"
79#include "ipa-ref.h"
80#include "cgraph.h"
81#include "ipa-reference.h"
82
83/* Broad overview of how alias analysis on gimple works:
84
85   Statements clobbering or using memory are linked through the
86   virtual operand factored use-def chain.  The virtual operand
87   is unique per function, its symbol is accessible via gimple_vop (cfun).
88   Virtual operands are used for efficiently walking memory statements
89   in the gimple IL and are useful for things like value-numbering as
90   a generation count for memory references.
91
92   SSA_NAME pointers may have associated points-to information
93   accessible via the SSA_NAME_PTR_INFO macro.  Flow-insensitive
94   points-to information is (re-)computed by the TODO_rebuild_alias
95   pass manager todo.  Points-to information is also used for more
96   precise tracking of call-clobbered and call-used variables and
97   related disambiguations.
98
99   This file contains functions for disambiguating memory references,
100   the so called alias-oracle and tools for walking of the gimple IL.
101
102   The main alias-oracle entry-points are
103
104   bool stmt_may_clobber_ref_p (gimple, tree)
105
106     This function queries if a statement may invalidate (parts of)
107     the memory designated by the reference tree argument.
108
109   bool ref_maybe_used_by_stmt_p (gimple, tree)
110
111     This function queries if a statement may need (parts of) the
112     memory designated by the reference tree argument.
113
114   There are variants of these functions that only handle the call
115   part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
116   Note that these do not disambiguate against a possible call lhs.
117
118   bool refs_may_alias_p (tree, tree)
119
120     This function tries to disambiguate two reference trees.
121
122   bool ptr_deref_may_alias_global_p (tree)
123
124     This function queries if dereferencing a pointer variable may
125     alias global memory.
126
127   More low-level disambiguators are available and documented in
128   this file.  Low-level disambiguators dealing with points-to
129   information are in tree-ssa-structalias.c.  */
130
131
132/* Query statistics for the different low-level disambiguators.
133   A high-level query may trigger multiple of them.  */
134
135static struct {
136  unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
137  unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
138  unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
139  unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
140  unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
141  unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
142} alias_stats;
143
144void
145dump_alias_stats (FILE *s)
146{
147  fprintf (s, "\nAlias oracle query stats:\n");
148  fprintf (s, "  refs_may_alias_p: "
149	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
150	   HOST_WIDE_INT_PRINT_DEC" queries\n",
151	   alias_stats.refs_may_alias_p_no_alias,
152	   alias_stats.refs_may_alias_p_no_alias
153	   + alias_stats.refs_may_alias_p_may_alias);
154  fprintf (s, "  ref_maybe_used_by_call_p: "
155	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
156	   HOST_WIDE_INT_PRINT_DEC" queries\n",
157	   alias_stats.ref_maybe_used_by_call_p_no_alias,
158	   alias_stats.refs_may_alias_p_no_alias
159	   + alias_stats.ref_maybe_used_by_call_p_may_alias);
160  fprintf (s, "  call_may_clobber_ref_p: "
161	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
162	   HOST_WIDE_INT_PRINT_DEC" queries\n",
163	   alias_stats.call_may_clobber_ref_p_no_alias,
164	   alias_stats.call_may_clobber_ref_p_no_alias
165	   + alias_stats.call_may_clobber_ref_p_may_alias);
166}
167
168
169/* Return true, if dereferencing PTR may alias with a global variable.  */
170
171bool
172ptr_deref_may_alias_global_p (tree ptr)
173{
174  struct ptr_info_def *pi;
175
176  /* If we end up with a pointer constant here that may point
177     to global memory.  */
178  if (TREE_CODE (ptr) != SSA_NAME)
179    return true;
180
181  pi = SSA_NAME_PTR_INFO (ptr);
182
183  /* If we do not have points-to information for this variable,
184     we have to punt.  */
185  if (!pi)
186    return true;
187
188  /* ???  This does not use TBAA to prune globals ptr may not access.  */
189  return pt_solution_includes_global (&pi->pt);
190}
191
192/* Return true if dereferencing PTR may alias DECL.
193   The caller is responsible for applying TBAA to see if PTR
194   may access DECL at all.  */
195
196static bool
197ptr_deref_may_alias_decl_p (tree ptr, tree decl)
198{
199  struct ptr_info_def *pi;
200
201  /* Conversions are irrelevant for points-to information and
202     data-dependence analysis can feed us those.  */
203  STRIP_NOPS (ptr);
204
205  /* Anything we do not explicilty handle aliases.  */
206  if ((TREE_CODE (ptr) != SSA_NAME
207       && TREE_CODE (ptr) != ADDR_EXPR
208       && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
209      || !POINTER_TYPE_P (TREE_TYPE (ptr))
210      || (TREE_CODE (decl) != VAR_DECL
211	  && TREE_CODE (decl) != PARM_DECL
212	  && TREE_CODE (decl) != RESULT_DECL))
213    return true;
214
215  /* Disregard pointer offsetting.  */
216  if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
217    {
218      do
219	{
220	  ptr = TREE_OPERAND (ptr, 0);
221	}
222      while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
223      return ptr_deref_may_alias_decl_p (ptr, decl);
224    }
225
226  /* ADDR_EXPR pointers either just offset another pointer or directly
227     specify the pointed-to set.  */
228  if (TREE_CODE (ptr) == ADDR_EXPR)
229    {
230      tree base = get_base_address (TREE_OPERAND (ptr, 0));
231      if (base
232	  && (TREE_CODE (base) == MEM_REF
233	      || TREE_CODE (base) == TARGET_MEM_REF))
234	ptr = TREE_OPERAND (base, 0);
235      else if (base
236	       && DECL_P (base))
237	return base == decl;
238      else if (base
239	       && CONSTANT_CLASS_P (base))
240	return false;
241      else
242	return true;
243    }
244
245  /* Non-aliased variables can not be pointed to.  */
246  if (!may_be_aliased (decl))
247    return false;
248
249  /* If we do not have useful points-to information for this pointer
250     we cannot disambiguate anything else.  */
251  pi = SSA_NAME_PTR_INFO (ptr);
252  if (!pi)
253    return true;
254
255  return pt_solution_includes (&pi->pt, decl);
256}
257
258/* Return true if dereferenced PTR1 and PTR2 may alias.
259   The caller is responsible for applying TBAA to see if accesses
260   through PTR1 and PTR2 may conflict at all.  */
261
262bool
263ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
264{
265  struct ptr_info_def *pi1, *pi2;
266
267  /* Conversions are irrelevant for points-to information and
268     data-dependence analysis can feed us those.  */
269  STRIP_NOPS (ptr1);
270  STRIP_NOPS (ptr2);
271
272  /* Disregard pointer offsetting.  */
273  if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
274    {
275      do
276	{
277	  ptr1 = TREE_OPERAND (ptr1, 0);
278	}
279      while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
280      return ptr_derefs_may_alias_p (ptr1, ptr2);
281    }
282  if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
283    {
284      do
285	{
286	  ptr2 = TREE_OPERAND (ptr2, 0);
287	}
288      while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
289      return ptr_derefs_may_alias_p (ptr1, ptr2);
290    }
291
292  /* ADDR_EXPR pointers either just offset another pointer or directly
293     specify the pointed-to set.  */
294  if (TREE_CODE (ptr1) == ADDR_EXPR)
295    {
296      tree base = get_base_address (TREE_OPERAND (ptr1, 0));
297      if (base
298	  && (TREE_CODE (base) == MEM_REF
299	      || TREE_CODE (base) == TARGET_MEM_REF))
300	return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
301      else if (base
302	       && DECL_P (base))
303	return ptr_deref_may_alias_decl_p (ptr2, base);
304      else
305	return true;
306    }
307  if (TREE_CODE (ptr2) == ADDR_EXPR)
308    {
309      tree base = get_base_address (TREE_OPERAND (ptr2, 0));
310      if (base
311	  && (TREE_CODE (base) == MEM_REF
312	      || TREE_CODE (base) == TARGET_MEM_REF))
313	return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
314      else if (base
315	       && DECL_P (base))
316	return ptr_deref_may_alias_decl_p (ptr1, base);
317      else
318	return true;
319    }
320
321  /* From here we require SSA name pointers.  Anything else aliases.  */
322  if (TREE_CODE (ptr1) != SSA_NAME
323      || TREE_CODE (ptr2) != SSA_NAME
324      || !POINTER_TYPE_P (TREE_TYPE (ptr1))
325      || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
326    return true;
327
328  /* We may end up with two empty points-to solutions for two same pointers.
329     In this case we still want to say both pointers alias, so shortcut
330     that here.  */
331  if (ptr1 == ptr2)
332    return true;
333
334  /* If we do not have useful points-to information for either pointer
335     we cannot disambiguate anything else.  */
336  pi1 = SSA_NAME_PTR_INFO (ptr1);
337  pi2 = SSA_NAME_PTR_INFO (ptr2);
338  if (!pi1 || !pi2)
339    return true;
340
341  /* ???  This does not use TBAA to prune decls from the intersection
342     that not both pointers may access.  */
343  return pt_solutions_intersect (&pi1->pt, &pi2->pt);
344}
345
346/* Return true if dereferencing PTR may alias *REF.
347   The caller is responsible for applying TBAA to see if PTR
348   may access *REF at all.  */
349
350static bool
351ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
352{
353  tree base = ao_ref_base (ref);
354
355  if (TREE_CODE (base) == MEM_REF
356      || TREE_CODE (base) == TARGET_MEM_REF)
357    return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
358  else if (DECL_P (base))
359    return ptr_deref_may_alias_decl_p (ptr, base);
360
361  return true;
362}
363
364/* Returns whether reference REF to BASE may refer to global memory.  */
365
366static bool
367ref_may_alias_global_p_1 (tree base)
368{
369  if (DECL_P (base))
370    return is_global_var (base);
371  else if (TREE_CODE (base) == MEM_REF
372	   || TREE_CODE (base) == TARGET_MEM_REF)
373    return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
374  return true;
375}
376
377bool
378ref_may_alias_global_p (ao_ref *ref)
379{
380  tree base = ao_ref_base (ref);
381  return ref_may_alias_global_p_1 (base);
382}
383
384bool
385ref_may_alias_global_p (tree ref)
386{
387  tree base = get_base_address (ref);
388  return ref_may_alias_global_p_1 (base);
389}
390
391/* Return true whether STMT may clobber global memory.  */
392
393bool
394stmt_may_clobber_global_p (gimple stmt)
395{
396  tree lhs;
397
398  if (!gimple_vdef (stmt))
399    return false;
400
401  /* ???  We can ask the oracle whether an artificial pointer
402     dereference with a pointer with points-to information covering
403     all global memory (what about non-address taken memory?) maybe
404     clobbered by this call.  As there is at the moment no convenient
405     way of doing that without generating garbage do some manual
406     checking instead.
407     ???  We could make a NULL ao_ref argument to the various
408     predicates special, meaning any global memory.  */
409
410  switch (gimple_code (stmt))
411    {
412    case GIMPLE_ASSIGN:
413      lhs = gimple_assign_lhs (stmt);
414      return (TREE_CODE (lhs) != SSA_NAME
415	      && ref_may_alias_global_p (lhs));
416    case GIMPLE_CALL:
417      return true;
418    default:
419      return true;
420    }
421}
422
423
424/* Dump alias information on FILE.  */
425
426void
427dump_alias_info (FILE *file)
428{
429  unsigned i;
430  const char *funcname
431    = lang_hooks.decl_printable_name (current_function_decl, 2);
432  tree var;
433
434  fprintf (file, "\n\nAlias information for %s\n\n", funcname);
435
436  fprintf (file, "Aliased symbols\n\n");
437
438  FOR_EACH_LOCAL_DECL (cfun, i, var)
439    {
440      if (may_be_aliased (var))
441	dump_variable (file, var);
442    }
443
444  fprintf (file, "\nCall clobber information\n");
445
446  fprintf (file, "\nESCAPED");
447  dump_points_to_solution (file, &cfun->gimple_df->escaped);
448
449  fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
450
451  for (i = 1; i < num_ssa_names; i++)
452    {
453      tree ptr = ssa_name (i);
454      struct ptr_info_def *pi;
455
456      if (ptr == NULL_TREE
457	  || !POINTER_TYPE_P (TREE_TYPE (ptr))
458	  || SSA_NAME_IN_FREE_LIST (ptr))
459	continue;
460
461      pi = SSA_NAME_PTR_INFO (ptr);
462      if (pi)
463	dump_points_to_info_for (file, ptr);
464    }
465
466  fprintf (file, "\n");
467}
468
469
470/* Dump alias information on stderr.  */
471
472DEBUG_FUNCTION void
473debug_alias_info (void)
474{
475  dump_alias_info (stderr);
476}
477
478
479/* Dump the points-to set *PT into FILE.  */
480
481void
482dump_points_to_solution (FILE *file, struct pt_solution *pt)
483{
484  if (pt->anything)
485    fprintf (file, ", points-to anything");
486
487  if (pt->nonlocal)
488    fprintf (file, ", points-to non-local");
489
490  if (pt->escaped)
491    fprintf (file, ", points-to escaped");
492
493  if (pt->ipa_escaped)
494    fprintf (file, ", points-to unit escaped");
495
496  if (pt->null)
497    fprintf (file, ", points-to NULL");
498
499  if (pt->vars)
500    {
501      fprintf (file, ", points-to vars: ");
502      dump_decl_set (file, pt->vars);
503      if (pt->vars_contains_nonlocal
504	  && pt->vars_contains_escaped_heap)
505	fprintf (file, " (nonlocal, escaped heap)");
506      else if (pt->vars_contains_nonlocal
507	       && pt->vars_contains_escaped)
508	fprintf (file, " (nonlocal, escaped)");
509      else if (pt->vars_contains_nonlocal)
510	fprintf (file, " (nonlocal)");
511      else if (pt->vars_contains_escaped_heap)
512	fprintf (file, " (escaped heap)");
513      else if (pt->vars_contains_escaped)
514	fprintf (file, " (escaped)");
515    }
516}
517
518
519/* Unified dump function for pt_solution.  */
520
521DEBUG_FUNCTION void
522debug (pt_solution &ref)
523{
524  dump_points_to_solution (stderr, &ref);
525}
526
527DEBUG_FUNCTION void
528debug (pt_solution *ptr)
529{
530  if (ptr)
531    debug (*ptr);
532  else
533    fprintf (stderr, "<nil>\n");
534}
535
536
537/* Dump points-to information for SSA_NAME PTR into FILE.  */
538
539void
540dump_points_to_info_for (FILE *file, tree ptr)
541{
542  struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
543
544  print_generic_expr (file, ptr, dump_flags);
545
546  if (pi)
547    dump_points_to_solution (file, &pi->pt);
548  else
549    fprintf (file, ", points-to anything");
550
551  fprintf (file, "\n");
552}
553
554
555/* Dump points-to information for VAR into stderr.  */
556
557DEBUG_FUNCTION void
558debug_points_to_info_for (tree var)
559{
560  dump_points_to_info_for (stderr, var);
561}
562
563
564/* Initializes the alias-oracle reference representation *R from REF.  */
565
566void
567ao_ref_init (ao_ref *r, tree ref)
568{
569  r->ref = ref;
570  r->base = NULL_TREE;
571  r->offset = 0;
572  r->size = -1;
573  r->max_size = -1;
574  r->ref_alias_set = -1;
575  r->base_alias_set = -1;
576  r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
577}
578
579/* Returns the base object of the memory reference *REF.  */
580
581tree
582ao_ref_base (ao_ref *ref)
583{
584  if (ref->base)
585    return ref->base;
586  ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
587				       &ref->max_size);
588  return ref->base;
589}
590
591/* Returns the base object alias set of the memory reference *REF.  */
592
593alias_set_type
594ao_ref_base_alias_set (ao_ref *ref)
595{
596  tree base_ref;
597  if (ref->base_alias_set != -1)
598    return ref->base_alias_set;
599  if (!ref->ref)
600    return 0;
601  base_ref = ref->ref;
602  while (handled_component_p (base_ref))
603    base_ref = TREE_OPERAND (base_ref, 0);
604  ref->base_alias_set = get_alias_set (base_ref);
605  return ref->base_alias_set;
606}
607
608/* Returns the reference alias set of the memory reference *REF.  */
609
610alias_set_type
611ao_ref_alias_set (ao_ref *ref)
612{
613  if (ref->ref_alias_set != -1)
614    return ref->ref_alias_set;
615  ref->ref_alias_set = get_alias_set (ref->ref);
616  return ref->ref_alias_set;
617}
618
619/* Init an alias-oracle reference representation from a gimple pointer
620   PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE then the
621   size is assumed to be unknown.  The access is assumed to be only
622   to or after of the pointer target, not before it.  */
623
624void
625ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
626{
627  HOST_WIDE_INT t, size_hwi, extra_offset = 0;
628  ref->ref = NULL_TREE;
629  if (TREE_CODE (ptr) == SSA_NAME)
630    {
631      gimple stmt = SSA_NAME_DEF_STMT (ptr);
632      if (gimple_assign_single_p (stmt)
633	  && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
634	ptr = gimple_assign_rhs1 (stmt);
635      else if (is_gimple_assign (stmt)
636	       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
637	       && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
638	{
639	  ptr = gimple_assign_rhs1 (stmt);
640	  extra_offset = BITS_PER_UNIT
641			 * int_cst_value (gimple_assign_rhs2 (stmt));
642	}
643    }
644
645  if (TREE_CODE (ptr) == ADDR_EXPR)
646    {
647      ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
648      if (ref->base)
649	ref->offset = BITS_PER_UNIT * t;
650      else
651	{
652	  size = NULL_TREE;
653	  ref->offset = 0;
654	  ref->base = get_base_address (TREE_OPERAND (ptr, 0));
655	}
656    }
657  else
658    {
659      ref->base = build2 (MEM_REF, char_type_node,
660			  ptr, null_pointer_node);
661      ref->offset = 0;
662    }
663  ref->offset += extra_offset;
664  if (size
665      && tree_fits_shwi_p (size)
666      && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
667    ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
668  else
669    ref->max_size = ref->size = -1;
670  ref->ref_alias_set = 0;
671  ref->base_alias_set = 0;
672  ref->volatile_p = false;
673}
674
675/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
676   purpose of TBAA.  Return 0 if they are distinct and -1 if we cannot
677   decide.  */
678
679static inline int
680same_type_for_tbaa (tree type1, tree type2)
681{
682  type1 = TYPE_MAIN_VARIANT (type1);
683  type2 = TYPE_MAIN_VARIANT (type2);
684
685  /* If we would have to do structural comparison bail out.  */
686  if (TYPE_STRUCTURAL_EQUALITY_P (type1)
687      || TYPE_STRUCTURAL_EQUALITY_P (type2))
688    return -1;
689
690  /* Compare the canonical types.  */
691  if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
692    return 1;
693
694  /* ??? Array types are not properly unified in all cases as we have
695     spurious changes in the index types for example.  Removing this
696     causes all sorts of problems with the Fortran frontend.  */
697  if (TREE_CODE (type1) == ARRAY_TYPE
698      && TREE_CODE (type2) == ARRAY_TYPE)
699    return -1;
700
701  /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
702     object of one of its constrained subtypes, e.g. when a function with an
703     unconstrained parameter passed by reference is called on an object and
704     inlined.  But, even in the case of a fixed size, type and subtypes are
705     not equivalent enough as to share the same TYPE_CANONICAL, since this
706     would mean that conversions between them are useless, whereas they are
707     not (e.g. type and subtypes can have different modes).  So, in the end,
708     they are only guaranteed to have the same alias set.  */
709  if (get_alias_set (type1) == get_alias_set (type2))
710    return -1;
711
712  /* The types are known to be not equal.  */
713  return 0;
714}
715
716/* Determine if the two component references REF1 and REF2 which are
717   based on access types TYPE1 and TYPE2 and of which at least one is based
718   on an indirect reference may alias.  REF2 is the only one that can
719   be a decl in which case REF2_IS_DECL is true.
720   REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
721   are the respective alias sets.  */
722
723static bool
724aliasing_component_refs_p (tree ref1,
725			   alias_set_type ref1_alias_set,
726			   alias_set_type base1_alias_set,
727			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
728			   tree ref2,
729			   alias_set_type ref2_alias_set,
730			   alias_set_type base2_alias_set,
731			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
732			   bool ref2_is_decl)
733{
734  /* If one reference is a component references through pointers try to find a
735     common base and apply offset based disambiguation.  This handles
736     for example
737       struct A { int i; int j; } *q;
738       struct B { struct A a; int k; } *p;
739     disambiguating q->i and p->a.j.  */
740  tree base1, base2;
741  tree type1, type2;
742  tree *refp;
743  int same_p;
744
745  /* Choose bases and base types to search for.  */
746  base1 = ref1;
747  while (handled_component_p (base1))
748    base1 = TREE_OPERAND (base1, 0);
749  type1 = TREE_TYPE (base1);
750  base2 = ref2;
751  while (handled_component_p (base2))
752    base2 = TREE_OPERAND (base2, 0);
753  type2 = TREE_TYPE (base2);
754
755  /* Now search for the type1 in the access path of ref2.  This
756     would be a common base for doing offset based disambiguation on.  */
757  refp = &ref2;
758  while (handled_component_p (*refp)
759	 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
760    refp = &TREE_OPERAND (*refp, 0);
761  same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
762  /* If we couldn't compare types we have to bail out.  */
763  if (same_p == -1)
764    return true;
765  else if (same_p == 1)
766    {
767      HOST_WIDE_INT offadj, sztmp, msztmp;
768      get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
769      offset2 -= offadj;
770      get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
771      offset1 -= offadj;
772      return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
773    }
774  /* If we didn't find a common base, try the other way around.  */
775  refp = &ref1;
776  while (handled_component_p (*refp)
777	 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
778    refp = &TREE_OPERAND (*refp, 0);
779  same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
780  /* If we couldn't compare types we have to bail out.  */
781  if (same_p == -1)
782    return true;
783  else if (same_p == 1)
784    {
785      HOST_WIDE_INT offadj, sztmp, msztmp;
786      get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
787      offset1 -= offadj;
788      get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
789      offset2 -= offadj;
790      return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
791    }
792
793  /* If we have two type access paths B1.path1 and B2.path2 they may
794     only alias if either B1 is in B2.path2 or B2 is in B1.path1.
795     But we can still have a path that goes B1.path1...B2.path2 with
796     a part that we do not see.  So we can only disambiguate now
797     if there is no B2 in the tail of path1 and no B1 on the
798     tail of path2.  */
799  if (base1_alias_set == ref2_alias_set
800      || alias_set_subset_of (base1_alias_set, ref2_alias_set))
801    return true;
802  /* If this is ptr vs. decl then we know there is no ptr ... decl path.  */
803  if (!ref2_is_decl)
804    return (base2_alias_set == ref1_alias_set
805	    || alias_set_subset_of (base2_alias_set, ref1_alias_set));
806  return false;
807}
808
809/* Return true if we can determine that component references REF1 and REF2,
810   that are within a common DECL, cannot overlap.  */
811
812static bool
813nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
814{
815  auto_vec<tree, 16> component_refs1;
816  auto_vec<tree, 16> component_refs2;
817
818  /* Create the stack of handled components for REF1.  */
819  while (handled_component_p (ref1))
820    {
821      component_refs1.safe_push (ref1);
822      ref1 = TREE_OPERAND (ref1, 0);
823    }
824  if (TREE_CODE (ref1) == MEM_REF)
825    {
826      if (!integer_zerop (TREE_OPERAND (ref1, 1)))
827	goto may_overlap;
828      ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
829    }
830
831  /* Create the stack of handled components for REF2.  */
832  while (handled_component_p (ref2))
833    {
834      component_refs2.safe_push (ref2);
835      ref2 = TREE_OPERAND (ref2, 0);
836    }
837  if (TREE_CODE (ref2) == MEM_REF)
838    {
839      if (!integer_zerop (TREE_OPERAND (ref2, 1)))
840	goto may_overlap;
841      ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
842    }
843
844  /* We must have the same base DECL.  */
845  gcc_assert (ref1 == ref2);
846
847  /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
848     rank.  This is sufficient because we start from the same DECL and you
849     cannot reference several fields at a time with COMPONENT_REFs (unlike
850     with ARRAY_RANGE_REFs for arrays) so you always need the same number
851     of them to access a sub-component, unless you're in a union, in which
852     case the return value will precisely be false.  */
853  while (true)
854    {
855      do
856	{
857	  if (component_refs1.is_empty ())
858	    goto may_overlap;
859	  ref1 = component_refs1.pop ();
860	}
861      while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
862
863      do
864	{
865	  if (component_refs2.is_empty ())
866	     goto may_overlap;
867	  ref2 = component_refs2.pop ();
868	}
869      while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
870
871      /* Beware of BIT_FIELD_REF.  */
872      if (TREE_CODE (ref1) != COMPONENT_REF
873	  || TREE_CODE (ref2) != COMPONENT_REF)
874	goto may_overlap;
875
876      tree field1 = TREE_OPERAND (ref1, 1);
877      tree field2 = TREE_OPERAND (ref2, 1);
878
879      /* ??? We cannot simply use the type of operand #0 of the refs here
880	 as the Fortran compiler smuggles type punning into COMPONENT_REFs
881	 for common blocks instead of using unions like everyone else.  */
882      tree type1 = DECL_CONTEXT (field1);
883      tree type2 = DECL_CONTEXT (field2);
884
885      /* We cannot disambiguate fields in a union or qualified union.  */
886      if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
887	 goto may_overlap;
888
889      /* Different fields of the same record type cannot overlap.
890	 ??? Bitfields can overlap at RTL level so punt on them.  */
891      if (field1 != field2)
892	{
893	  component_refs1.release ();
894	  component_refs2.release ();
895	  return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
896	}
897    }
898
899may_overlap:
900  component_refs1.release ();
901  component_refs2.release ();
902  return false;
903}
904
905/* qsort compare function to sort FIELD_DECLs after their
906   DECL_FIELD_CONTEXT TYPE_UID.  */
907
908static inline int
909ncr_compar (const void *field1_, const void *field2_)
910{
911  const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
912  const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
913  unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
914  unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
915  if (uid1 < uid2)
916    return -1;
917  else if (uid1 > uid2)
918    return 1;
919  return 0;
920}
921
922/* Return true if we can determine that the fields referenced cannot
923   overlap for any pair of objects.  */
924
925static bool
926nonoverlapping_component_refs_p (const_tree x, const_tree y)
927{
928  if (!flag_strict_aliasing
929      || !x || !y
930      || TREE_CODE (x) != COMPONENT_REF
931      || TREE_CODE (y) != COMPONENT_REF)
932    return false;
933
934  auto_vec<const_tree, 16> fieldsx;
935  while (TREE_CODE (x) == COMPONENT_REF)
936    {
937      tree field = TREE_OPERAND (x, 1);
938      tree type = DECL_FIELD_CONTEXT (field);
939      if (TREE_CODE (type) == RECORD_TYPE)
940	fieldsx.safe_push (field);
941      x = TREE_OPERAND (x, 0);
942    }
943  if (fieldsx.length () == 0)
944    return false;
945  auto_vec<const_tree, 16> fieldsy;
946  while (TREE_CODE (y) == COMPONENT_REF)
947    {
948      tree field = TREE_OPERAND (y, 1);
949      tree type = DECL_FIELD_CONTEXT (field);
950      if (TREE_CODE (type) == RECORD_TYPE)
951	fieldsy.safe_push (TREE_OPERAND (y, 1));
952      y = TREE_OPERAND (y, 0);
953    }
954  if (fieldsy.length () == 0)
955    return false;
956
957  /* Most common case first.  */
958  if (fieldsx.length () == 1
959      && fieldsy.length () == 1)
960    return ((DECL_FIELD_CONTEXT (fieldsx[0])
961	     == DECL_FIELD_CONTEXT (fieldsy[0]))
962	    && fieldsx[0] != fieldsy[0]
963	    && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
964
965  if (fieldsx.length () == 2)
966    {
967      if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
968	{
969	  const_tree tem = fieldsx[0];
970	  fieldsx[0] = fieldsx[1];
971	  fieldsx[1] = tem;
972	}
973    }
974  else
975    fieldsx.qsort (ncr_compar);
976
977  if (fieldsy.length () == 2)
978    {
979      if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
980	{
981	  const_tree tem = fieldsy[0];
982	  fieldsy[0] = fieldsy[1];
983	  fieldsy[1] = tem;
984	}
985    }
986  else
987    fieldsy.qsort (ncr_compar);
988
989  unsigned i = 0, j = 0;
990  do
991    {
992      const_tree fieldx = fieldsx[i];
993      const_tree fieldy = fieldsy[j];
994      tree typex = DECL_FIELD_CONTEXT (fieldx);
995      tree typey = DECL_FIELD_CONTEXT (fieldy);
996      if (typex == typey)
997	{
998	  /* We're left with accessing different fields of a structure,
999	     no possible overlap, unless they are both bitfields.  */
1000	  if (fieldx != fieldy)
1001	    return !(DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy));
1002	}
1003      if (TYPE_UID (typex) < TYPE_UID (typey))
1004	{
1005	  i++;
1006	  if (i == fieldsx.length ())
1007	    break;
1008	}
1009      else
1010	{
1011	  j++;
1012	  if (j == fieldsy.length ())
1013	    break;
1014	}
1015    }
1016  while (1);
1017
1018  return false;
1019}
1020
1021
1022/* Return true if two memory references based on the variables BASE1
1023   and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1024   [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  REF1 and REF2
1025   if non-NULL are the complete memory reference trees.  */
1026
1027static bool
1028decl_refs_may_alias_p (tree ref1, tree base1,
1029		       HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1030		       tree ref2, tree base2,
1031		       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
1032{
1033  gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1034
1035  /* If both references are based on different variables, they cannot alias.  */
1036  if (base1 != base2)
1037    return false;
1038
1039  /* If both references are based on the same variable, they cannot alias if
1040     the accesses do not overlap.  */
1041  if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1042    return false;
1043
1044  /* For components with variable position, the above test isn't sufficient,
1045     so we disambiguate component references manually.  */
1046  if (ref1 && ref2
1047      && handled_component_p (ref1) && handled_component_p (ref2)
1048      && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1049    return false;
1050
1051  return true;
1052}
1053
1054/* Return true if an indirect reference based on *PTR1 constrained
1055   to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1056   constrained to [OFFSET2, OFFSET2 + MAX_SIZE2).  *PTR1 and BASE2 have
1057   the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1058   in which case they are computed on-demand.  REF1 and REF2
1059   if non-NULL are the complete memory reference trees.  */
1060
1061static bool
1062indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1063			       HOST_WIDE_INT offset1,
1064			       HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
1065			       alias_set_type ref1_alias_set,
1066			       alias_set_type base1_alias_set,
1067			       tree ref2 ATTRIBUTE_UNUSED, tree base2,
1068			       HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1069			       alias_set_type ref2_alias_set,
1070			       alias_set_type base2_alias_set, bool tbaa_p)
1071{
1072  tree ptr1;
1073  tree ptrtype1, dbase2;
1074  HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
1075  HOST_WIDE_INT doffset1, doffset2;
1076
1077  gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1078			|| TREE_CODE (base1) == TARGET_MEM_REF)
1079		       && DECL_P (base2));
1080
1081  ptr1 = TREE_OPERAND (base1, 0);
1082
1083  /* The offset embedded in MEM_REFs can be negative.  Bias them
1084     so that the resulting offset adjustment is positive.  */
1085  offset_int moff = mem_ref_offset (base1);
1086  moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1087  if (wi::neg_p (moff))
1088    offset2p += (-moff).to_short_addr ();
1089  else
1090    offset1p += moff.to_short_addr ();
1091
1092  /* If only one reference is based on a variable, they cannot alias if
1093     the pointer access is beyond the extent of the variable access.
1094     (the pointer base cannot validly point to an offset less than zero
1095     of the variable).
1096     ???  IVOPTs creates bases that do not honor this restriction,
1097     so do not apply this optimization for TARGET_MEM_REFs.  */
1098  if (TREE_CODE (base1) != TARGET_MEM_REF
1099      && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
1100    return false;
1101  /* They also cannot alias if the pointer may not point to the decl.  */
1102  if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1103    return false;
1104
1105  /* Disambiguations that rely on strict aliasing rules follow.  */
1106  if (!flag_strict_aliasing || !tbaa_p)
1107    return true;
1108
1109  ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1110
1111  /* If the alias set for a pointer access is zero all bets are off.  */
1112  if (base1_alias_set == 0)
1113    return true;
1114
1115  /* When we are trying to disambiguate an access with a pointer dereference
1116     as base versus one with a decl as base we can use both the size
1117     of the decl and its dynamic type for extra disambiguation.
1118     ???  We do not know anything about the dynamic type of the decl
1119     other than that its alias-set contains base2_alias_set as a subset
1120     which does not help us here.  */
1121  /* As we know nothing useful about the dynamic type of the decl just
1122     use the usual conflict check rather than a subset test.
1123     ???  We could introduce -fvery-strict-aliasing when the language
1124     does not allow decls to have a dynamic type that differs from their
1125     static type.  Then we can check
1126     !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
1127  if (base1_alias_set != base2_alias_set
1128      && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1129    return false;
1130  /* If the size of the access relevant for TBAA through the pointer
1131     is bigger than the size of the decl we can't possibly access the
1132     decl via that pointer.  */
1133  if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1134      && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1135      && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1136      /* ???  This in turn may run afoul when a decl of type T which is
1137	 a member of union type U is accessed through a pointer to
1138	 type U and sizeof T is smaller than sizeof U.  */
1139      && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1140      && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1141      && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
1142    return false;
1143
1144  if (!ref2)
1145    return true;
1146
1147  /* If the decl is accessed via a MEM_REF, reconstruct the base
1148     we can use for TBAA and an appropriately adjusted offset.  */
1149  dbase2 = ref2;
1150  while (handled_component_p (dbase2))
1151    dbase2 = TREE_OPERAND (dbase2, 0);
1152  doffset1 = offset1;
1153  doffset2 = offset2;
1154  if (TREE_CODE (dbase2) == MEM_REF
1155      || TREE_CODE (dbase2) == TARGET_MEM_REF)
1156    {
1157      offset_int moff = mem_ref_offset (dbase2);
1158      moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1159      if (wi::neg_p (moff))
1160	doffset1 -= (-moff).to_short_addr ();
1161      else
1162	doffset2 -= moff.to_short_addr ();
1163    }
1164
1165  /* If either reference is view-converted, give up now.  */
1166  if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1167      || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1168    return true;
1169
1170  /* If both references are through the same type, they do not alias
1171     if the accesses do not overlap.  This does extra disambiguation
1172     for mixed/pointer accesses but requires strict aliasing.
1173     For MEM_REFs we require that the component-ref offset we computed
1174     is relative to the start of the type which we ensure by
1175     comparing rvalue and access type and disregarding the constant
1176     pointer offset.  */
1177  if ((TREE_CODE (base1) != TARGET_MEM_REF
1178       || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1179      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1180    return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1181
1182  if (ref1 && ref2
1183      && nonoverlapping_component_refs_p (ref1, ref2))
1184    return false;
1185
1186  /* Do access-path based disambiguation.  */
1187  if (ref1 && ref2
1188      && (handled_component_p (ref1) || handled_component_p (ref2)))
1189    return aliasing_component_refs_p (ref1,
1190				      ref1_alias_set, base1_alias_set,
1191				      offset1, max_size1,
1192				      ref2,
1193				      ref2_alias_set, base2_alias_set,
1194				      offset2, max_size2, true);
1195
1196  return true;
1197}
1198
1199/* Return true if two indirect references based on *PTR1
1200   and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1201   [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  *PTR1 and *PTR2 have
1202   the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1203   in which case they are computed on-demand.  REF1 and REF2
1204   if non-NULL are the complete memory reference trees. */
1205
1206static bool
1207indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1208			   HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1209			   alias_set_type ref1_alias_set,
1210			   alias_set_type base1_alias_set,
1211			   tree ref2 ATTRIBUTE_UNUSED, tree base2,
1212			   HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1213			   alias_set_type ref2_alias_set,
1214			   alias_set_type base2_alias_set, bool tbaa_p)
1215{
1216  tree ptr1;
1217  tree ptr2;
1218  tree ptrtype1, ptrtype2;
1219
1220  gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1221			|| TREE_CODE (base1) == TARGET_MEM_REF)
1222		       && (TREE_CODE (base2) == MEM_REF
1223			   || TREE_CODE (base2) == TARGET_MEM_REF));
1224
1225  ptr1 = TREE_OPERAND (base1, 0);
1226  ptr2 = TREE_OPERAND (base2, 0);
1227
1228  /* If both bases are based on pointers they cannot alias if they may not
1229     point to the same memory object or if they point to the same object
1230     and the accesses do not overlap.  */
1231  if ((!cfun || gimple_in_ssa_p (cfun))
1232      && operand_equal_p (ptr1, ptr2, 0)
1233      && (((TREE_CODE (base1) != TARGET_MEM_REF
1234	    || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1235	   && (TREE_CODE (base2) != TARGET_MEM_REF
1236	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1237	  || (TREE_CODE (base1) == TARGET_MEM_REF
1238	      && TREE_CODE (base2) == TARGET_MEM_REF
1239	      && (TMR_STEP (base1) == TMR_STEP (base2)
1240		  || (TMR_STEP (base1) && TMR_STEP (base2)
1241		      && operand_equal_p (TMR_STEP (base1),
1242					  TMR_STEP (base2), 0)))
1243	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
1244		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
1245		      && operand_equal_p (TMR_INDEX (base1),
1246					  TMR_INDEX (base2), 0)))
1247	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1248		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1249		      && operand_equal_p (TMR_INDEX2 (base1),
1250					  TMR_INDEX2 (base2), 0))))))
1251    {
1252      offset_int moff;
1253      /* The offset embedded in MEM_REFs can be negative.  Bias them
1254	 so that the resulting offset adjustment is positive.  */
1255      moff = mem_ref_offset (base1);
1256      moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1257      if (wi::neg_p (moff))
1258	offset2 += (-moff).to_short_addr ();
1259      else
1260	offset1 += moff.to_shwi ();
1261      moff = mem_ref_offset (base2);
1262      moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1263      if (wi::neg_p (moff))
1264	offset1 += (-moff).to_short_addr ();
1265      else
1266	offset2 += moff.to_short_addr ();
1267      return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1268    }
1269  if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1270    return false;
1271
1272  /* Disambiguations that rely on strict aliasing rules follow.  */
1273  if (!flag_strict_aliasing || !tbaa_p)
1274    return true;
1275
1276  ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1277  ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1278
1279  /* If the alias set for a pointer access is zero all bets are off.  */
1280  if (base1_alias_set == 0
1281      || base2_alias_set == 0)
1282    return true;
1283
1284  /* If both references are through the same type, they do not alias
1285     if the accesses do not overlap.  This does extra disambiguation
1286     for mixed/pointer accesses but requires strict aliasing.  */
1287  if ((TREE_CODE (base1) != TARGET_MEM_REF
1288       || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1289      && (TREE_CODE (base2) != TARGET_MEM_REF
1290	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1291      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1292      && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1293      && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1294			     TREE_TYPE (ptrtype2)) == 1)
1295    return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1296
1297  /* Do type-based disambiguation.  */
1298  if (base1_alias_set != base2_alias_set
1299      && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1300    return false;
1301
1302  /* If either reference is view-converted, give up now.  */
1303  if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1304      || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1305    return true;
1306
1307  if (ref1 && ref2
1308      && nonoverlapping_component_refs_p (ref1, ref2))
1309    return false;
1310
1311  /* Do access-path based disambiguation.  */
1312  if (ref1 && ref2
1313      && (handled_component_p (ref1) || handled_component_p (ref2)))
1314    return aliasing_component_refs_p (ref1,
1315				      ref1_alias_set, base1_alias_set,
1316				      offset1, max_size1,
1317				      ref2,
1318				      ref2_alias_set, base2_alias_set,
1319				      offset2, max_size2, false);
1320
1321  return true;
1322}
1323
1324/* Return true, if the two memory references REF1 and REF2 may alias.  */
1325
1326bool
1327refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1328{
1329  tree base1, base2;
1330  HOST_WIDE_INT offset1 = 0, offset2 = 0;
1331  HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1332  bool var1_p, var2_p, ind1_p, ind2_p;
1333
1334  gcc_checking_assert ((!ref1->ref
1335			|| TREE_CODE (ref1->ref) == SSA_NAME
1336			|| DECL_P (ref1->ref)
1337			|| TREE_CODE (ref1->ref) == STRING_CST
1338			|| handled_component_p (ref1->ref)
1339			|| TREE_CODE (ref1->ref) == MEM_REF
1340			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1341		       && (!ref2->ref
1342			   || TREE_CODE (ref2->ref) == SSA_NAME
1343			   || DECL_P (ref2->ref)
1344			   || TREE_CODE (ref2->ref) == STRING_CST
1345			   || handled_component_p (ref2->ref)
1346			   || TREE_CODE (ref2->ref) == MEM_REF
1347			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1348
1349  /* Decompose the references into their base objects and the access.  */
1350  base1 = ao_ref_base (ref1);
1351  offset1 = ref1->offset;
1352  max_size1 = ref1->max_size;
1353  base2 = ao_ref_base (ref2);
1354  offset2 = ref2->offset;
1355  max_size2 = ref2->max_size;
1356
1357  /* We can end up with registers or constants as bases for example from
1358     *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1359     which is seen as a struct copy.  */
1360  if (TREE_CODE (base1) == SSA_NAME
1361      || TREE_CODE (base1) == CONST_DECL
1362      || TREE_CODE (base1) == CONSTRUCTOR
1363      || TREE_CODE (base1) == ADDR_EXPR
1364      || CONSTANT_CLASS_P (base1)
1365      || TREE_CODE (base2) == SSA_NAME
1366      || TREE_CODE (base2) == CONST_DECL
1367      || TREE_CODE (base2) == CONSTRUCTOR
1368      || TREE_CODE (base2) == ADDR_EXPR
1369      || CONSTANT_CLASS_P (base2))
1370    return false;
1371
1372  /* We can end up referring to code via function and label decls.
1373     As we likely do not properly track code aliases conservatively
1374     bail out.  */
1375  if (TREE_CODE (base1) == FUNCTION_DECL
1376      || TREE_CODE (base1) == LABEL_DECL
1377      || TREE_CODE (base2) == FUNCTION_DECL
1378      || TREE_CODE (base2) == LABEL_DECL)
1379    return true;
1380
1381  /* Two volatile accesses always conflict.  */
1382  if (ref1->volatile_p
1383      && ref2->volatile_p)
1384    return true;
1385
1386  /* Defer to simple offset based disambiguation if we have
1387     references based on two decls.  Do this before defering to
1388     TBAA to handle must-alias cases in conformance with the
1389     GCC extension of allowing type-punning through unions.  */
1390  var1_p = DECL_P (base1);
1391  var2_p = DECL_P (base2);
1392  if (var1_p && var2_p)
1393    return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1394				  ref2->ref, base2, offset2, max_size2);
1395
1396  /* Handle restrict based accesses.
1397     ???  ao_ref_base strips inner MEM_REF [&decl], recover from that
1398     here.  */
1399  tree rbase1 = base1;
1400  tree rbase2 = base2;
1401  if (var1_p)
1402    {
1403      rbase1 = ref1->ref;
1404      if (rbase1)
1405	while (handled_component_p (rbase1))
1406	  rbase1 = TREE_OPERAND (rbase1, 0);
1407    }
1408  if (var2_p)
1409    {
1410      rbase2 = ref2->ref;
1411      if (rbase2)
1412	while (handled_component_p (rbase2))
1413	  rbase2 = TREE_OPERAND (rbase2, 0);
1414    }
1415  if (rbase1 && rbase2
1416      && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1417      && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1418      /* If the accesses are in the same restrict clique... */
1419      && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1420      /* But based on different pointers they do not alias.  */
1421      && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1422    return false;
1423
1424  ind1_p = (TREE_CODE (base1) == MEM_REF
1425	    || TREE_CODE (base1) == TARGET_MEM_REF);
1426  ind2_p = (TREE_CODE (base2) == MEM_REF
1427	    || TREE_CODE (base2) == TARGET_MEM_REF);
1428
1429  /* Canonicalize the pointer-vs-decl case.  */
1430  if (ind1_p && var2_p)
1431    {
1432      HOST_WIDE_INT tmp1;
1433      tree tmp2;
1434      ao_ref *tmp3;
1435      tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1436      tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1437      tmp2 = base1; base1 = base2; base2 = tmp2;
1438      tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1439      var1_p = true;
1440      ind1_p = false;
1441      var2_p = false;
1442      ind2_p = true;
1443    }
1444
1445  /* First defer to TBAA if possible.  */
1446  if (tbaa_p
1447      && flag_strict_aliasing
1448      && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1449				 ao_ref_alias_set (ref2)))
1450    return false;
1451
1452  /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
1453  if (var1_p && ind2_p)
1454    return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1455					  offset2, max_size2,
1456					  ao_ref_alias_set (ref2),
1457					  ao_ref_base_alias_set (ref2),
1458					  ref1->ref, base1,
1459					  offset1, max_size1,
1460					  ao_ref_alias_set (ref1),
1461					  ao_ref_base_alias_set (ref1),
1462					  tbaa_p);
1463  else if (ind1_p && ind2_p)
1464    return indirect_refs_may_alias_p (ref1->ref, base1,
1465				      offset1, max_size1,
1466				      ao_ref_alias_set (ref1),
1467				      ao_ref_base_alias_set (ref1),
1468				      ref2->ref, base2,
1469				      offset2, max_size2,
1470				      ao_ref_alias_set (ref2),
1471				      ao_ref_base_alias_set (ref2),
1472				      tbaa_p);
1473
1474  /* We really do not want to end up here, but returning true is safe.  */
1475#ifdef ENABLE_CHECKING
1476  gcc_unreachable ();
1477#else
1478  return true;
1479#endif
1480}
1481
1482static bool
1483refs_may_alias_p (tree ref1, ao_ref *ref2)
1484{
1485  ao_ref r1;
1486  ao_ref_init (&r1, ref1);
1487  return refs_may_alias_p_1 (&r1, ref2, true);
1488}
1489
1490bool
1491refs_may_alias_p (tree ref1, tree ref2)
1492{
1493  ao_ref r1, r2;
1494  bool res;
1495  ao_ref_init (&r1, ref1);
1496  ao_ref_init (&r2, ref2);
1497  res = refs_may_alias_p_1 (&r1, &r2, true);
1498  if (res)
1499    ++alias_stats.refs_may_alias_p_may_alias;
1500  else
1501    ++alias_stats.refs_may_alias_p_no_alias;
1502  return res;
1503}
1504
1505/* Returns true if there is a anti-dependence for the STORE that
1506   executes after the LOAD.  */
1507
1508bool
1509refs_anti_dependent_p (tree load, tree store)
1510{
1511  ao_ref r1, r2;
1512  ao_ref_init (&r1, load);
1513  ao_ref_init (&r2, store);
1514  return refs_may_alias_p_1 (&r1, &r2, false);
1515}
1516
1517/* Returns true if there is a output dependence for the stores
1518   STORE1 and STORE2.  */
1519
1520bool
1521refs_output_dependent_p (tree store1, tree store2)
1522{
1523  ao_ref r1, r2;
1524  ao_ref_init (&r1, store1);
1525  ao_ref_init (&r2, store2);
1526  return refs_may_alias_p_1 (&r1, &r2, false);
1527}
1528
1529/* If the call CALL may use the memory reference REF return true,
1530   otherwise return false.  */
1531
1532static bool
1533ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
1534{
1535  tree base, callee;
1536  unsigned i;
1537  int flags = gimple_call_flags (call);
1538
1539  /* Const functions without a static chain do not implicitly use memory.  */
1540  if (!gimple_call_chain (call)
1541      && (flags & (ECF_CONST|ECF_NOVOPS)))
1542    goto process_args;
1543
1544  base = ao_ref_base (ref);
1545  if (!base)
1546    return true;
1547
1548  /* A call that is not without side-effects might involve volatile
1549     accesses and thus conflicts with all other volatile accesses.  */
1550  if (ref->volatile_p)
1551    return true;
1552
1553  /* If the reference is based on a decl that is not aliased the call
1554     cannot possibly use it.  */
1555  if (DECL_P (base)
1556      && !may_be_aliased (base)
1557      /* But local statics can be used through recursion.  */
1558      && !is_global_var (base))
1559    goto process_args;
1560
1561  callee = gimple_call_fndecl (call);
1562
1563  /* Handle those builtin functions explicitly that do not act as
1564     escape points.  See tree-ssa-structalias.c:find_func_aliases
1565     for the list of builtins we might need to handle here.  */
1566  if (callee != NULL_TREE
1567      && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1568    switch (DECL_FUNCTION_CODE (callee))
1569      {
1570	/* All the following functions read memory pointed to by
1571	   their second argument.  strcat/strncat additionally
1572	   reads memory pointed to by the first argument.  */
1573	case BUILT_IN_STRCAT:
1574	case BUILT_IN_STRNCAT:
1575	  {
1576	    ao_ref dref;
1577	    ao_ref_init_from_ptr_and_size (&dref,
1578					   gimple_call_arg (call, 0),
1579					   NULL_TREE);
1580	    if (refs_may_alias_p_1 (&dref, ref, false))
1581	      return true;
1582	  }
1583	  /* FALLTHRU */
1584	case BUILT_IN_STRCPY:
1585	case BUILT_IN_STRNCPY:
1586	case BUILT_IN_MEMCPY:
1587	case BUILT_IN_MEMMOVE:
1588	case BUILT_IN_MEMPCPY:
1589	case BUILT_IN_STPCPY:
1590	case BUILT_IN_STPNCPY:
1591	case BUILT_IN_TM_MEMCPY:
1592	case BUILT_IN_TM_MEMMOVE:
1593	  {
1594	    ao_ref dref;
1595	    tree size = NULL_TREE;
1596	    if (gimple_call_num_args (call) == 3)
1597	      size = gimple_call_arg (call, 2);
1598	    ao_ref_init_from_ptr_and_size (&dref,
1599					   gimple_call_arg (call, 1),
1600					   size);
1601	    return refs_may_alias_p_1 (&dref, ref, false);
1602	  }
1603	case BUILT_IN_STRCAT_CHK:
1604	case BUILT_IN_STRNCAT_CHK:
1605	  {
1606	    ao_ref dref;
1607	    ao_ref_init_from_ptr_and_size (&dref,
1608					   gimple_call_arg (call, 0),
1609					   NULL_TREE);
1610	    if (refs_may_alias_p_1 (&dref, ref, false))
1611	      return true;
1612	  }
1613	  /* FALLTHRU */
1614	case BUILT_IN_STRCPY_CHK:
1615	case BUILT_IN_STRNCPY_CHK:
1616	case BUILT_IN_MEMCPY_CHK:
1617	case BUILT_IN_MEMMOVE_CHK:
1618	case BUILT_IN_MEMPCPY_CHK:
1619	case BUILT_IN_STPCPY_CHK:
1620	case BUILT_IN_STPNCPY_CHK:
1621	  {
1622	    ao_ref dref;
1623	    tree size = NULL_TREE;
1624	    if (gimple_call_num_args (call) == 4)
1625	      size = gimple_call_arg (call, 2);
1626	    ao_ref_init_from_ptr_and_size (&dref,
1627					   gimple_call_arg (call, 1),
1628					   size);
1629	    return refs_may_alias_p_1 (&dref, ref, false);
1630	  }
1631	case BUILT_IN_BCOPY:
1632	  {
1633	    ao_ref dref;
1634	    tree size = gimple_call_arg (call, 2);
1635	    ao_ref_init_from_ptr_and_size (&dref,
1636					   gimple_call_arg (call, 0),
1637					   size);
1638	    return refs_may_alias_p_1 (&dref, ref, false);
1639	  }
1640
1641	/* The following functions read memory pointed to by their
1642	   first argument.  */
1643	CASE_BUILT_IN_TM_LOAD (1):
1644	CASE_BUILT_IN_TM_LOAD (2):
1645	CASE_BUILT_IN_TM_LOAD (4):
1646	CASE_BUILT_IN_TM_LOAD (8):
1647	CASE_BUILT_IN_TM_LOAD (FLOAT):
1648	CASE_BUILT_IN_TM_LOAD (DOUBLE):
1649	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1650	CASE_BUILT_IN_TM_LOAD (M64):
1651	CASE_BUILT_IN_TM_LOAD (M128):
1652	CASE_BUILT_IN_TM_LOAD (M256):
1653	case BUILT_IN_TM_LOG:
1654	case BUILT_IN_TM_LOG_1:
1655	case BUILT_IN_TM_LOG_2:
1656	case BUILT_IN_TM_LOG_4:
1657	case BUILT_IN_TM_LOG_8:
1658	case BUILT_IN_TM_LOG_FLOAT:
1659	case BUILT_IN_TM_LOG_DOUBLE:
1660	case BUILT_IN_TM_LOG_LDOUBLE:
1661	case BUILT_IN_TM_LOG_M64:
1662	case BUILT_IN_TM_LOG_M128:
1663	case BUILT_IN_TM_LOG_M256:
1664	  return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1665
1666	/* These read memory pointed to by the first argument.  */
1667	case BUILT_IN_STRDUP:
1668	case BUILT_IN_STRNDUP:
1669	case BUILT_IN_REALLOC:
1670	  {
1671	    ao_ref dref;
1672	    tree size = NULL_TREE;
1673	    if (gimple_call_num_args (call) == 2)
1674	      size = gimple_call_arg (call, 1);
1675	    ao_ref_init_from_ptr_and_size (&dref,
1676					   gimple_call_arg (call, 0),
1677					   size);
1678	    return refs_may_alias_p_1 (&dref, ref, false);
1679	  }
1680	/* These read memory pointed to by the first argument.  */
1681	case BUILT_IN_INDEX:
1682	case BUILT_IN_STRCHR:
1683	case BUILT_IN_STRRCHR:
1684	  {
1685	    ao_ref dref;
1686	    ao_ref_init_from_ptr_and_size (&dref,
1687					   gimple_call_arg (call, 0),
1688					   NULL_TREE);
1689	    return refs_may_alias_p_1 (&dref, ref, false);
1690	  }
1691	/* These read memory pointed to by the first argument with size
1692	   in the third argument.  */
1693	case BUILT_IN_MEMCHR:
1694	  {
1695	    ao_ref dref;
1696	    ao_ref_init_from_ptr_and_size (&dref,
1697					   gimple_call_arg (call, 0),
1698					   gimple_call_arg (call, 2));
1699	    return refs_may_alias_p_1 (&dref, ref, false);
1700	  }
1701	/* These read memory pointed to by the first and second arguments.  */
1702	case BUILT_IN_STRSTR:
1703	case BUILT_IN_STRPBRK:
1704	  {
1705	    ao_ref dref;
1706	    ao_ref_init_from_ptr_and_size (&dref,
1707					   gimple_call_arg (call, 0),
1708					   NULL_TREE);
1709	    if (refs_may_alias_p_1 (&dref, ref, false))
1710	      return true;
1711	    ao_ref_init_from_ptr_and_size (&dref,
1712					   gimple_call_arg (call, 1),
1713					   NULL_TREE);
1714	    return refs_may_alias_p_1 (&dref, ref, false);
1715	  }
1716
1717	/* The following builtins do not read from memory.  */
1718	case BUILT_IN_FREE:
1719	case BUILT_IN_MALLOC:
1720	case BUILT_IN_POSIX_MEMALIGN:
1721	case BUILT_IN_ALIGNED_ALLOC:
1722	case BUILT_IN_CALLOC:
1723	case BUILT_IN_ALLOCA:
1724	case BUILT_IN_ALLOCA_WITH_ALIGN:
1725	case BUILT_IN_STACK_SAVE:
1726	case BUILT_IN_STACK_RESTORE:
1727	case BUILT_IN_MEMSET:
1728	case BUILT_IN_TM_MEMSET:
1729	case BUILT_IN_MEMSET_CHK:
1730	case BUILT_IN_FREXP:
1731	case BUILT_IN_FREXPF:
1732	case BUILT_IN_FREXPL:
1733	case BUILT_IN_GAMMA_R:
1734	case BUILT_IN_GAMMAF_R:
1735	case BUILT_IN_GAMMAL_R:
1736	case BUILT_IN_LGAMMA_R:
1737	case BUILT_IN_LGAMMAF_R:
1738	case BUILT_IN_LGAMMAL_R:
1739	case BUILT_IN_MODF:
1740	case BUILT_IN_MODFF:
1741	case BUILT_IN_MODFL:
1742	case BUILT_IN_REMQUO:
1743	case BUILT_IN_REMQUOF:
1744	case BUILT_IN_REMQUOL:
1745	case BUILT_IN_SINCOS:
1746	case BUILT_IN_SINCOSF:
1747	case BUILT_IN_SINCOSL:
1748	case BUILT_IN_ASSUME_ALIGNED:
1749	case BUILT_IN_VA_END:
1750	  return false;
1751	/* __sync_* builtins and some OpenMP builtins act as threading
1752	   barriers.  */
1753#undef DEF_SYNC_BUILTIN
1754#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1755#include "sync-builtins.def"
1756#undef DEF_SYNC_BUILTIN
1757	case BUILT_IN_GOMP_ATOMIC_START:
1758	case BUILT_IN_GOMP_ATOMIC_END:
1759	case BUILT_IN_GOMP_BARRIER:
1760	case BUILT_IN_GOMP_BARRIER_CANCEL:
1761	case BUILT_IN_GOMP_TASKWAIT:
1762	case BUILT_IN_GOMP_TASKGROUP_END:
1763	case BUILT_IN_GOMP_CRITICAL_START:
1764	case BUILT_IN_GOMP_CRITICAL_END:
1765	case BUILT_IN_GOMP_CRITICAL_NAME_START:
1766	case BUILT_IN_GOMP_CRITICAL_NAME_END:
1767	case BUILT_IN_GOMP_LOOP_END:
1768	case BUILT_IN_GOMP_LOOP_END_CANCEL:
1769	case BUILT_IN_GOMP_ORDERED_START:
1770	case BUILT_IN_GOMP_ORDERED_END:
1771	case BUILT_IN_GOMP_SECTIONS_END:
1772	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1773	case BUILT_IN_GOMP_SINGLE_COPY_START:
1774	case BUILT_IN_GOMP_SINGLE_COPY_END:
1775	  return true;
1776
1777	default:
1778	  /* Fallthru to general call handling.  */;
1779      }
1780
1781  /* Check if base is a global static variable that is not read
1782     by the function.  */
1783  if (callee != NULL_TREE
1784      && TREE_CODE (base) == VAR_DECL
1785      && TREE_STATIC (base))
1786    {
1787      struct cgraph_node *node = cgraph_node::get (callee);
1788      bitmap not_read;
1789
1790      /* FIXME: Callee can be an OMP builtin that does not have a call graph
1791	 node yet.  We should enforce that there are nodes for all decls in the
1792	 IL and remove this check instead.  */
1793      if (node
1794	  && (not_read = ipa_reference_get_not_read_global (node))
1795	  && bitmap_bit_p (not_read, DECL_UID (base)))
1796	goto process_args;
1797    }
1798
1799  /* Check if the base variable is call-used.  */
1800  if (DECL_P (base))
1801    {
1802      if (pt_solution_includes (gimple_call_use_set (call), base))
1803	return true;
1804    }
1805  else if ((TREE_CODE (base) == MEM_REF
1806	    || TREE_CODE (base) == TARGET_MEM_REF)
1807	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1808    {
1809      struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1810      if (!pi)
1811	return true;
1812
1813      if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1814	return true;
1815    }
1816  else
1817    return true;
1818
1819  /* Inspect call arguments for passed-by-value aliases.  */
1820process_args:
1821  for (i = 0; i < gimple_call_num_args (call); ++i)
1822    {
1823      tree op = gimple_call_arg (call, i);
1824      int flags = gimple_call_arg_flags (call, i);
1825
1826      if (flags & EAF_UNUSED)
1827	continue;
1828
1829      if (TREE_CODE (op) == WITH_SIZE_EXPR)
1830	op = TREE_OPERAND (op, 0);
1831
1832      if (TREE_CODE (op) != SSA_NAME
1833	  && !is_gimple_min_invariant (op))
1834	{
1835	  ao_ref r;
1836	  ao_ref_init (&r, op);
1837	  if (refs_may_alias_p_1 (&r, ref, true))
1838	    return true;
1839	}
1840    }
1841
1842  return false;
1843}
1844
1845static bool
1846ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
1847{
1848  bool res;
1849  res = ref_maybe_used_by_call_p_1 (call, ref);
1850  if (res)
1851    ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1852  else
1853    ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1854  return res;
1855}
1856
1857
1858/* If the statement STMT may use the memory reference REF return
1859   true, otherwise return false.  */
1860
1861bool
1862ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
1863{
1864  if (is_gimple_assign (stmt))
1865    {
1866      tree rhs;
1867
1868      /* All memory assign statements are single.  */
1869      if (!gimple_assign_single_p (stmt))
1870	return false;
1871
1872      rhs = gimple_assign_rhs1 (stmt);
1873      if (is_gimple_reg (rhs)
1874	  || is_gimple_min_invariant (rhs)
1875	  || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1876	return false;
1877
1878      return refs_may_alias_p (rhs, ref);
1879    }
1880  else if (is_gimple_call (stmt))
1881    return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1882  else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1883    {
1884      tree retval = gimple_return_retval (return_stmt);
1885      if (retval
1886	  && TREE_CODE (retval) != SSA_NAME
1887	  && !is_gimple_min_invariant (retval)
1888	  && refs_may_alias_p (retval, ref))
1889	return true;
1890      /* If ref escapes the function then the return acts as a use.  */
1891      tree base = ao_ref_base (ref);
1892      if (!base)
1893	;
1894      else if (DECL_P (base))
1895	return is_global_var (base);
1896      else if (TREE_CODE (base) == MEM_REF
1897	       || TREE_CODE (base) == TARGET_MEM_REF)
1898	return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1899      return false;
1900    }
1901
1902  return true;
1903}
1904
1905bool
1906ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1907{
1908  ao_ref r;
1909  ao_ref_init (&r, ref);
1910  return ref_maybe_used_by_stmt_p (stmt, &r);
1911}
1912
1913/* If the call in statement CALL may clobber the memory reference REF
1914   return true, otherwise return false.  */
1915
1916bool
1917call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1918{
1919  tree base;
1920  tree callee;
1921
1922  /* If the call is pure or const it cannot clobber anything.  */
1923  if (gimple_call_flags (call)
1924      & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1925    return false;
1926  if (gimple_call_internal_p (call))
1927    switch (gimple_call_internal_fn (call))
1928      {
1929	/* Treat these internal calls like ECF_PURE for aliasing,
1930	   they don't write to any memory the program should care about.
1931	   They have important other side-effects, and read memory,
1932	   so can't be ECF_NOVOPS.  */
1933      case IFN_UBSAN_NULL:
1934      case IFN_UBSAN_BOUNDS:
1935      case IFN_UBSAN_VPTR:
1936      case IFN_UBSAN_OBJECT_SIZE:
1937      case IFN_ASAN_CHECK:
1938	return false;
1939      default:
1940	break;
1941      }
1942
1943  base = ao_ref_base (ref);
1944  if (!base)
1945    return true;
1946
1947  if (TREE_CODE (base) == SSA_NAME
1948      || CONSTANT_CLASS_P (base))
1949    return false;
1950
1951  /* A call that is not without side-effects might involve volatile
1952     accesses and thus conflicts with all other volatile accesses.  */
1953  if (ref->volatile_p)
1954    return true;
1955
1956  /* If the reference is based on a decl that is not aliased the call
1957     cannot possibly clobber it.  */
1958  if (DECL_P (base)
1959      && !may_be_aliased (base)
1960      /* But local non-readonly statics can be modified through recursion
1961         or the call may implement a threading barrier which we must
1962	 treat as may-def.  */
1963      && (TREE_READONLY (base)
1964	  || !is_global_var (base)))
1965    return false;
1966
1967  callee = gimple_call_fndecl (call);
1968
1969  /* Handle those builtin functions explicitly that do not act as
1970     escape points.  See tree-ssa-structalias.c:find_func_aliases
1971     for the list of builtins we might need to handle here.  */
1972  if (callee != NULL_TREE
1973      && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1974    switch (DECL_FUNCTION_CODE (callee))
1975      {
1976	/* All the following functions clobber memory pointed to by
1977	   their first argument.  */
1978	case BUILT_IN_STRCPY:
1979	case BUILT_IN_STRNCPY:
1980	case BUILT_IN_MEMCPY:
1981	case BUILT_IN_MEMMOVE:
1982	case BUILT_IN_MEMPCPY:
1983	case BUILT_IN_STPCPY:
1984	case BUILT_IN_STPNCPY:
1985	case BUILT_IN_STRCAT:
1986	case BUILT_IN_STRNCAT:
1987	case BUILT_IN_MEMSET:
1988	case BUILT_IN_TM_MEMSET:
1989	CASE_BUILT_IN_TM_STORE (1):
1990	CASE_BUILT_IN_TM_STORE (2):
1991	CASE_BUILT_IN_TM_STORE (4):
1992	CASE_BUILT_IN_TM_STORE (8):
1993	CASE_BUILT_IN_TM_STORE (FLOAT):
1994	CASE_BUILT_IN_TM_STORE (DOUBLE):
1995	CASE_BUILT_IN_TM_STORE (LDOUBLE):
1996	CASE_BUILT_IN_TM_STORE (M64):
1997	CASE_BUILT_IN_TM_STORE (M128):
1998	CASE_BUILT_IN_TM_STORE (M256):
1999	case BUILT_IN_TM_MEMCPY:
2000	case BUILT_IN_TM_MEMMOVE:
2001	  {
2002	    ao_ref dref;
2003	    tree size = NULL_TREE;
2004	    /* Don't pass in size for strncat, as the maximum size
2005	       is strlen (dest) + n + 1 instead of n, resp.
2006	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
2007	       known.  */
2008	    if (gimple_call_num_args (call) == 3
2009		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2010	      size = gimple_call_arg (call, 2);
2011	    ao_ref_init_from_ptr_and_size (&dref,
2012					   gimple_call_arg (call, 0),
2013					   size);
2014	    return refs_may_alias_p_1 (&dref, ref, false);
2015	  }
2016	case BUILT_IN_STRCPY_CHK:
2017	case BUILT_IN_STRNCPY_CHK:
2018	case BUILT_IN_MEMCPY_CHK:
2019	case BUILT_IN_MEMMOVE_CHK:
2020	case BUILT_IN_MEMPCPY_CHK:
2021	case BUILT_IN_STPCPY_CHK:
2022	case BUILT_IN_STPNCPY_CHK:
2023	case BUILT_IN_STRCAT_CHK:
2024	case BUILT_IN_STRNCAT_CHK:
2025	case BUILT_IN_MEMSET_CHK:
2026	  {
2027	    ao_ref dref;
2028	    tree size = NULL_TREE;
2029	    /* Don't pass in size for __strncat_chk, as the maximum size
2030	       is strlen (dest) + n + 1 instead of n, resp.
2031	       n + 1 at dest + strlen (dest), but strlen (dest) isn't
2032	       known.  */
2033	    if (gimple_call_num_args (call) == 4
2034		&& DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2035	      size = gimple_call_arg (call, 2);
2036	    ao_ref_init_from_ptr_and_size (&dref,
2037					   gimple_call_arg (call, 0),
2038					   size);
2039	    return refs_may_alias_p_1 (&dref, ref, false);
2040	  }
2041	case BUILT_IN_BCOPY:
2042	  {
2043	    ao_ref dref;
2044	    tree size = gimple_call_arg (call, 2);
2045	    ao_ref_init_from_ptr_and_size (&dref,
2046					   gimple_call_arg (call, 1),
2047					   size);
2048	    return refs_may_alias_p_1 (&dref, ref, false);
2049	  }
2050	/* Allocating memory does not have any side-effects apart from
2051	   being the definition point for the pointer.  */
2052	case BUILT_IN_MALLOC:
2053	case BUILT_IN_ALIGNED_ALLOC:
2054	case BUILT_IN_CALLOC:
2055	case BUILT_IN_STRDUP:
2056	case BUILT_IN_STRNDUP:
2057	  /* Unix98 specifies that errno is set on allocation failure.  */
2058	  if (flag_errno_math
2059	      && targetm.ref_may_alias_errno (ref))
2060	    return true;
2061	  return false;
2062	case BUILT_IN_STACK_SAVE:
2063	case BUILT_IN_ALLOCA:
2064	case BUILT_IN_ALLOCA_WITH_ALIGN:
2065	case BUILT_IN_ASSUME_ALIGNED:
2066	  return false;
2067	/* But posix_memalign stores a pointer into the memory pointed to
2068	   by its first argument.  */
2069	case BUILT_IN_POSIX_MEMALIGN:
2070	  {
2071	    tree ptrptr = gimple_call_arg (call, 0);
2072	    ao_ref dref;
2073	    ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2074					   TYPE_SIZE_UNIT (ptr_type_node));
2075	    return (refs_may_alias_p_1 (&dref, ref, false)
2076		    || (flag_errno_math
2077			&& targetm.ref_may_alias_errno (ref)));
2078	  }
2079	/* Freeing memory kills the pointed-to memory.  More importantly
2080	   the call has to serve as a barrier for moving loads and stores
2081	   across it.  */
2082	case BUILT_IN_FREE:
2083	case BUILT_IN_VA_END:
2084	  {
2085	    tree ptr = gimple_call_arg (call, 0);
2086	    return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2087	  }
2088	/* Realloc serves both as allocation point and deallocation point.  */
2089	case BUILT_IN_REALLOC:
2090	  {
2091	    tree ptr = gimple_call_arg (call, 0);
2092	    /* Unix98 specifies that errno is set on allocation failure.  */
2093	    return ((flag_errno_math
2094		     && targetm.ref_may_alias_errno (ref))
2095		    || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2096	  }
2097	case BUILT_IN_GAMMA_R:
2098	case BUILT_IN_GAMMAF_R:
2099	case BUILT_IN_GAMMAL_R:
2100	case BUILT_IN_LGAMMA_R:
2101	case BUILT_IN_LGAMMAF_R:
2102	case BUILT_IN_LGAMMAL_R:
2103	  {
2104	    tree out = gimple_call_arg (call, 1);
2105	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2106	      return true;
2107	    if (flag_errno_math)
2108	      break;
2109	    return false;
2110	  }
2111	case BUILT_IN_FREXP:
2112	case BUILT_IN_FREXPF:
2113	case BUILT_IN_FREXPL:
2114	case BUILT_IN_MODF:
2115	case BUILT_IN_MODFF:
2116	case BUILT_IN_MODFL:
2117	  {
2118	    tree out = gimple_call_arg (call, 1);
2119	    return ptr_deref_may_alias_ref_p_1 (out, ref);
2120	  }
2121	case BUILT_IN_REMQUO:
2122	case BUILT_IN_REMQUOF:
2123	case BUILT_IN_REMQUOL:
2124	  {
2125	    tree out = gimple_call_arg (call, 2);
2126	    if (ptr_deref_may_alias_ref_p_1 (out, ref))
2127	      return true;
2128	    if (flag_errno_math)
2129	      break;
2130	    return false;
2131	  }
2132	case BUILT_IN_SINCOS:
2133	case BUILT_IN_SINCOSF:
2134	case BUILT_IN_SINCOSL:
2135	  {
2136	    tree sin = gimple_call_arg (call, 1);
2137	    tree cos = gimple_call_arg (call, 2);
2138	    return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2139		    || ptr_deref_may_alias_ref_p_1 (cos, ref));
2140	  }
2141	/* __sync_* builtins and some OpenMP builtins act as threading
2142	   barriers.  */
2143#undef DEF_SYNC_BUILTIN
2144#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2145#include "sync-builtins.def"
2146#undef DEF_SYNC_BUILTIN
2147	case BUILT_IN_GOMP_ATOMIC_START:
2148	case BUILT_IN_GOMP_ATOMIC_END:
2149	case BUILT_IN_GOMP_BARRIER:
2150	case BUILT_IN_GOMP_BARRIER_CANCEL:
2151	case BUILT_IN_GOMP_TASKWAIT:
2152	case BUILT_IN_GOMP_TASKGROUP_END:
2153	case BUILT_IN_GOMP_CRITICAL_START:
2154	case BUILT_IN_GOMP_CRITICAL_END:
2155	case BUILT_IN_GOMP_CRITICAL_NAME_START:
2156	case BUILT_IN_GOMP_CRITICAL_NAME_END:
2157	case BUILT_IN_GOMP_LOOP_END:
2158	case BUILT_IN_GOMP_LOOP_END_CANCEL:
2159	case BUILT_IN_GOMP_ORDERED_START:
2160	case BUILT_IN_GOMP_ORDERED_END:
2161	case BUILT_IN_GOMP_SECTIONS_END:
2162	case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2163	case BUILT_IN_GOMP_SINGLE_COPY_START:
2164	case BUILT_IN_GOMP_SINGLE_COPY_END:
2165	  return true;
2166	default:
2167	  /* Fallthru to general call handling.  */;
2168      }
2169
2170  /* Check if base is a global static variable that is not written
2171     by the function.  */
2172  if (callee != NULL_TREE
2173      && TREE_CODE (base) == VAR_DECL
2174      && TREE_STATIC (base))
2175    {
2176      struct cgraph_node *node = cgraph_node::get (callee);
2177      bitmap not_written;
2178
2179      if (node
2180	  && (not_written = ipa_reference_get_not_written_global (node))
2181	  && bitmap_bit_p (not_written, DECL_UID (base)))
2182	return false;
2183    }
2184
2185  /* Check if the base variable is call-clobbered.  */
2186  if (DECL_P (base))
2187    return pt_solution_includes (gimple_call_clobber_set (call), base);
2188  else if ((TREE_CODE (base) == MEM_REF
2189	    || TREE_CODE (base) == TARGET_MEM_REF)
2190	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2191    {
2192      struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2193      if (!pi)
2194	return true;
2195
2196      return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2197    }
2198
2199  return true;
2200}
2201
2202/* If the call in statement CALL may clobber the memory reference REF
2203   return true, otherwise return false.  */
2204
2205bool
2206call_may_clobber_ref_p (gcall *call, tree ref)
2207{
2208  bool res;
2209  ao_ref r;
2210  ao_ref_init (&r, ref);
2211  res = call_may_clobber_ref_p_1 (call, &r);
2212  if (res)
2213    ++alias_stats.call_may_clobber_ref_p_may_alias;
2214  else
2215    ++alias_stats.call_may_clobber_ref_p_no_alias;
2216  return res;
2217}
2218
2219
2220/* If the statement STMT may clobber the memory reference REF return true,
2221   otherwise return false.  */
2222
2223bool
2224stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
2225{
2226  if (is_gimple_call (stmt))
2227    {
2228      tree lhs = gimple_call_lhs (stmt);
2229      if (lhs
2230	  && TREE_CODE (lhs) != SSA_NAME)
2231	{
2232	  ao_ref r;
2233	  ao_ref_init (&r, lhs);
2234	  if (refs_may_alias_p_1 (ref, &r, true))
2235	    return true;
2236	}
2237
2238      return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2239    }
2240  else if (gimple_assign_single_p (stmt))
2241    {
2242      tree lhs = gimple_assign_lhs (stmt);
2243      if (TREE_CODE (lhs) != SSA_NAME)
2244	{
2245	  ao_ref r;
2246	  ao_ref_init (&r, lhs);
2247	  return refs_may_alias_p_1 (ref, &r, true);
2248	}
2249    }
2250  else if (gimple_code (stmt) == GIMPLE_ASM)
2251    return true;
2252
2253  return false;
2254}
2255
2256bool
2257stmt_may_clobber_ref_p (gimple stmt, tree ref)
2258{
2259  ao_ref r;
2260  ao_ref_init (&r, ref);
2261  return stmt_may_clobber_ref_p_1 (stmt, &r);
2262}
2263
2264/* If STMT kills the memory reference REF return true, otherwise
2265   return false.  */
2266
2267bool
2268stmt_kills_ref_p (gimple stmt, ao_ref *ref)
2269{
2270  if (!ao_ref_base (ref))
2271    return false;
2272
2273  if (gimple_has_lhs (stmt)
2274      && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2275      /* The assignment is not necessarily carried out if it can throw
2276	 and we can catch it in the current function where we could inspect
2277	 the previous value.
2278	 ???  We only need to care about the RHS throwing.  For aggregate
2279	 assignments or similar calls and non-call exceptions the LHS
2280	 might throw as well.  */
2281      && !stmt_can_throw_internal (stmt))
2282    {
2283      tree lhs = gimple_get_lhs (stmt);
2284      /* If LHS is literally a base of the access we are done.  */
2285      if (ref->ref)
2286	{
2287	  tree base = ref->ref;
2288	  if (handled_component_p (base))
2289	    {
2290	      tree saved_lhs0 = NULL_TREE;
2291	      if (handled_component_p (lhs))
2292		{
2293		  saved_lhs0 = TREE_OPERAND (lhs, 0);
2294		  TREE_OPERAND (lhs, 0) = integer_zero_node;
2295		}
2296	      do
2297		{
2298		  /* Just compare the outermost handled component, if
2299		     they are equal we have found a possible common
2300		     base.  */
2301		  tree saved_base0 = TREE_OPERAND (base, 0);
2302		  TREE_OPERAND (base, 0) = integer_zero_node;
2303		  bool res = operand_equal_p (lhs, base, 0);
2304		  TREE_OPERAND (base, 0) = saved_base0;
2305		  if (res)
2306		    break;
2307		  /* Otherwise drop handled components of the access.  */
2308		  base = saved_base0;
2309		}
2310	      while (handled_component_p (base));
2311	      if (saved_lhs0)
2312		TREE_OPERAND (lhs, 0) = saved_lhs0;
2313	    }
2314	  /* Finally check if lhs is equal or equal to the base candidate
2315	     of the access.  */
2316	  if (operand_equal_p (lhs, base, 0))
2317	    return true;
2318	}
2319
2320      /* Now look for non-literal equal bases with the restriction of
2321         handling constant offset and size.  */
2322      /* For a must-alias check we need to be able to constrain
2323	 the access properly.  */
2324      if (ref->max_size == -1)
2325	return false;
2326      HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2327      tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2328      /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2329	 so base == ref->base does not always hold.  */
2330      if (base != ref->base)
2331	{
2332	  /* If both base and ref->base are MEM_REFs, only compare the
2333	     first operand, and if the second operand isn't equal constant,
2334	     try to add the offsets into offset and ref_offset.  */
2335	  if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2336	      && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2337	    {
2338	      if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2339				       TREE_OPERAND (ref->base, 1)))
2340		{
2341		  offset_int off1 = mem_ref_offset (base);
2342		  off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
2343		  off1 += offset;
2344		  offset_int off2 = mem_ref_offset (ref->base);
2345		  off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
2346		  off2 += ref_offset;
2347		  if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
2348		    {
2349		      offset = off1.to_shwi ();
2350		      ref_offset = off2.to_shwi ();
2351		    }
2352		  else
2353		    size = -1;
2354		}
2355	    }
2356	  else
2357	    size = -1;
2358	}
2359      /* For a must-alias check we need to be able to constrain
2360	 the access properly.  */
2361      if (size != -1 && size == max_size)
2362	{
2363	  if (offset <= ref_offset
2364	      && offset + size >= ref_offset + ref->max_size)
2365	    return true;
2366	}
2367    }
2368
2369  if (is_gimple_call (stmt))
2370    {
2371      tree callee = gimple_call_fndecl (stmt);
2372      if (callee != NULL_TREE
2373	  && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2374	switch (DECL_FUNCTION_CODE (callee))
2375	  {
2376	  case BUILT_IN_FREE:
2377	    {
2378	      tree ptr = gimple_call_arg (stmt, 0);
2379	      tree base = ao_ref_base (ref);
2380	      if (base && TREE_CODE (base) == MEM_REF
2381		  && TREE_OPERAND (base, 0) == ptr)
2382		return true;
2383	      break;
2384	    }
2385
2386	  case BUILT_IN_MEMCPY:
2387	  case BUILT_IN_MEMPCPY:
2388	  case BUILT_IN_MEMMOVE:
2389	  case BUILT_IN_MEMSET:
2390	  case BUILT_IN_MEMCPY_CHK:
2391	  case BUILT_IN_MEMPCPY_CHK:
2392	  case BUILT_IN_MEMMOVE_CHK:
2393	  case BUILT_IN_MEMSET_CHK:
2394	    {
2395	      /* For a must-alias check we need to be able to constrain
2396		 the access properly.  */
2397	      if (ref->max_size == -1)
2398		return false;
2399	      tree dest = gimple_call_arg (stmt, 0);
2400	      tree len = gimple_call_arg (stmt, 2);
2401	      if (!tree_fits_shwi_p (len))
2402		return false;
2403	      tree rbase = ref->base;
2404	      offset_int roffset = ref->offset;
2405	      ao_ref dref;
2406	      ao_ref_init_from_ptr_and_size (&dref, dest, len);
2407	      tree base = ao_ref_base (&dref);
2408	      offset_int offset = dref.offset;
2409	      if (!base || dref.size == -1)
2410		return false;
2411	      if (TREE_CODE (base) == MEM_REF)
2412		{
2413		  if (TREE_CODE (rbase) != MEM_REF)
2414		    return false;
2415		  // Compare pointers.
2416		  offset += wi::lshift (mem_ref_offset (base),
2417					LOG2_BITS_PER_UNIT);
2418		  roffset += wi::lshift (mem_ref_offset (rbase),
2419					 LOG2_BITS_PER_UNIT);
2420		  base = TREE_OPERAND (base, 0);
2421		  rbase = TREE_OPERAND (rbase, 0);
2422		}
2423	      if (base == rbase
2424		  && wi::les_p (offset, roffset)
2425		  && wi::les_p (roffset + ref->max_size,
2426				offset + wi::lshift (wi::to_offset (len),
2427						     LOG2_BITS_PER_UNIT)))
2428		return true;
2429	      break;
2430	    }
2431
2432	  case BUILT_IN_VA_END:
2433	    {
2434	      tree ptr = gimple_call_arg (stmt, 0);
2435	      if (TREE_CODE (ptr) == ADDR_EXPR)
2436		{
2437		  tree base = ao_ref_base (ref);
2438		  if (TREE_OPERAND (ptr, 0) == base)
2439		    return true;
2440		}
2441	      break;
2442	    }
2443
2444	  default:;
2445	  }
2446    }
2447  return false;
2448}
2449
2450bool
2451stmt_kills_ref_p (gimple stmt, tree ref)
2452{
2453  ao_ref r;
2454  ao_ref_init (&r, ref);
2455  return stmt_kills_ref_p (stmt, &r);
2456}
2457
2458
2459/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2460   TARGET or a statement clobbering the memory reference REF in which
2461   case false is returned.  The walk starts with VUSE, one argument of PHI.  */
2462
2463static bool
2464maybe_skip_until (gimple phi, tree target, ao_ref *ref,
2465		  tree vuse, unsigned int *cnt, bitmap *visited,
2466		  bool abort_on_visited,
2467		  void *(*translate)(ao_ref *, tree, void *, bool),
2468		  void *data)
2469{
2470  basic_block bb = gimple_bb (phi);
2471
2472  if (!*visited)
2473    *visited = BITMAP_ALLOC (NULL);
2474
2475  bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2476
2477  /* Walk until we hit the target.  */
2478  while (vuse != target)
2479    {
2480      gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2481      /* Recurse for PHI nodes.  */
2482      if (gimple_code (def_stmt) == GIMPLE_PHI)
2483	{
2484	  /* An already visited PHI node ends the walk successfully.  */
2485	  if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2486	    return !abort_on_visited;
2487	  vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2488					   visited, abort_on_visited,
2489					   translate, data);
2490	  if (!vuse)
2491	    return false;
2492	  continue;
2493	}
2494      else if (gimple_nop_p (def_stmt))
2495	return false;
2496      else
2497	{
2498	  /* A clobbering statement or the end of the IL ends it failing.  */
2499	  ++*cnt;
2500	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2501	    {
2502	      if (translate
2503		  && (*translate) (ref, vuse, data, true) == NULL)
2504		;
2505	      else
2506		return false;
2507	    }
2508	}
2509      /* If we reach a new basic-block see if we already skipped it
2510         in a previous walk that ended successfully.  */
2511      if (gimple_bb (def_stmt) != bb)
2512	{
2513	  if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2514	    return !abort_on_visited;
2515	  bb = gimple_bb (def_stmt);
2516	}
2517      vuse = gimple_vuse (def_stmt);
2518    }
2519  return true;
2520}
2521
2522/* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2523   until we hit the phi argument definition that dominates the other one.
2524   Return that, or NULL_TREE if there is no such definition.  */
2525
2526static tree
2527get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
2528			    ao_ref *ref, unsigned int *cnt,
2529			    bitmap *visited, bool abort_on_visited,
2530			    void *(*translate)(ao_ref *, tree, void *, bool),
2531			    void *data)
2532{
2533  gimple def0 = SSA_NAME_DEF_STMT (arg0);
2534  gimple def1 = SSA_NAME_DEF_STMT (arg1);
2535  tree common_vuse;
2536
2537  if (arg0 == arg1)
2538    return arg0;
2539  else if (gimple_nop_p (def0)
2540	   || (!gimple_nop_p (def1)
2541	       && dominated_by_p (CDI_DOMINATORS,
2542				  gimple_bb (def1), gimple_bb (def0))))
2543    {
2544      if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2545			    visited, abort_on_visited, translate, data))
2546	return arg0;
2547    }
2548  else if (gimple_nop_p (def1)
2549	   || dominated_by_p (CDI_DOMINATORS,
2550			      gimple_bb (def0), gimple_bb (def1)))
2551    {
2552      if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2553			    visited, abort_on_visited, translate, data))
2554	return arg1;
2555    }
2556  /* Special case of a diamond:
2557       MEM_1 = ...
2558       goto (cond) ? L1 : L2
2559       L1: store1 = ...    #MEM_2 = vuse(MEM_1)
2560	   goto L3
2561       L2: store2 = ...    #MEM_3 = vuse(MEM_1)
2562       L3: MEM_4 = PHI<MEM_2, MEM_3>
2563     We were called with the PHI at L3, MEM_2 and MEM_3 don't
2564     dominate each other, but still we can easily skip this PHI node
2565     if we recognize that the vuse MEM operand is the same for both,
2566     and that we can skip both statements (they don't clobber us).
2567     This is still linear.  Don't use maybe_skip_until, that might
2568     potentially be slow.  */
2569  else if ((common_vuse = gimple_vuse (def0))
2570	   && common_vuse == gimple_vuse (def1))
2571    {
2572      *cnt += 2;
2573      if ((!stmt_may_clobber_ref_p_1 (def0, ref)
2574	   || (translate
2575	       && (*translate) (ref, arg0, data, true) == NULL))
2576	  && (!stmt_may_clobber_ref_p_1 (def1, ref)
2577	      || (translate
2578		  && (*translate) (ref, arg1, data, true) == NULL)))
2579	return common_vuse;
2580    }
2581
2582  return NULL_TREE;
2583}
2584
2585
2586/* Starting from a PHI node for the virtual operand of the memory reference
2587   REF find a continuation virtual operand that allows to continue walking
2588   statements dominating PHI skipping only statements that cannot possibly
2589   clobber REF.  Increments *CNT for each alias disambiguation done.
2590   Returns NULL_TREE if no suitable virtual operand can be found.  */
2591
2592tree
2593get_continuation_for_phi (gimple phi, ao_ref *ref,
2594			  unsigned int *cnt, bitmap *visited,
2595			  bool abort_on_visited,
2596			  void *(*translate)(ao_ref *, tree, void *, bool),
2597			  void *data)
2598{
2599  unsigned nargs = gimple_phi_num_args (phi);
2600
2601  /* Through a single-argument PHI we can simply look through.  */
2602  if (nargs == 1)
2603    return PHI_ARG_DEF (phi, 0);
2604
2605  /* For two or more arguments try to pairwise skip non-aliasing code
2606     until we hit the phi argument definition that dominates the other one.  */
2607  else if (nargs >= 2)
2608    {
2609      tree arg0, arg1;
2610      unsigned i;
2611
2612      /* Find a candidate for the virtual operand which definition
2613	 dominates those of all others.  */
2614      arg0 = PHI_ARG_DEF (phi, 0);
2615      if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2616	for (i = 1; i < nargs; ++i)
2617	  {
2618	    arg1 = PHI_ARG_DEF (phi, i);
2619	    if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2620	      {
2621		arg0 = arg1;
2622		break;
2623	      }
2624	    if (dominated_by_p (CDI_DOMINATORS,
2625				gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2626				gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2627	      arg0 = arg1;
2628	  }
2629
2630      /* Then pairwise reduce against the found candidate.  */
2631      for (i = 0; i < nargs; ++i)
2632	{
2633	  arg1 = PHI_ARG_DEF (phi, i);
2634	  arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2635					     cnt, visited, abort_on_visited,
2636					     translate, data);
2637	  if (!arg0)
2638	    return NULL_TREE;
2639	}
2640
2641      return arg0;
2642    }
2643
2644  return NULL_TREE;
2645}
2646
2647/* Based on the memory reference REF and its virtual use VUSE call
2648   WALKER for each virtual use that is equivalent to VUSE, including VUSE
2649   itself.  That is, for each virtual use for which its defining statement
2650   does not clobber REF.
2651
2652   WALKER is called with REF, the current virtual use and DATA.  If
2653   WALKER returns non-NULL the walk stops and its result is returned.
2654   At the end of a non-successful walk NULL is returned.
2655
2656   TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2657   use which definition is a statement that may clobber REF and DATA.
2658   If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2659   If TRANSLATE returns non-NULL the walk stops and its result is returned.
2660   If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2661   to adjust REF and *DATA to make that valid.
2662
2663   VALUEIZE if non-NULL is called with the next VUSE that is considered
2664   and return value is substituted for that.  This can be used to
2665   implement optimistic value-numbering for example.  Note that the
2666   VUSE argument is assumed to be valueized already.
2667
2668   TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
2669
2670void *
2671walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2672			void *(*walker)(ao_ref *, tree, unsigned int, void *),
2673			void *(*translate)(ao_ref *, tree, void *, bool),
2674			tree (*valueize)(tree),
2675			void *data)
2676{
2677  bitmap visited = NULL;
2678  void *res;
2679  unsigned int cnt = 0;
2680  bool translated = false;
2681
2682  timevar_push (TV_ALIAS_STMT_WALK);
2683
2684  do
2685    {
2686      gimple def_stmt;
2687
2688      /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2689      res = (*walker) (ref, vuse, cnt, data);
2690      /* Abort walk.  */
2691      if (res == (void *)-1)
2692	{
2693	  res = NULL;
2694	  break;
2695	}
2696      /* Lookup succeeded.  */
2697      else if (res != NULL)
2698	break;
2699
2700      if (valueize)
2701	vuse = valueize (vuse);
2702      def_stmt = SSA_NAME_DEF_STMT (vuse);
2703      if (gimple_nop_p (def_stmt))
2704	break;
2705      else if (gimple_code (def_stmt) == GIMPLE_PHI)
2706	vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2707					 &visited, translated, translate, data);
2708      else
2709	{
2710	  cnt++;
2711	  if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2712	    {
2713	      if (!translate)
2714		break;
2715	      res = (*translate) (ref, vuse, data, false);
2716	      /* Failed lookup and translation.  */
2717	      if (res == (void *)-1)
2718		{
2719		  res = NULL;
2720		  break;
2721		}
2722	      /* Lookup succeeded.  */
2723	      else if (res != NULL)
2724		break;
2725	      /* Translation succeeded, continue walking.  */
2726	      translated = true;
2727	    }
2728	  vuse = gimple_vuse (def_stmt);
2729	}
2730    }
2731  while (vuse);
2732
2733  if (visited)
2734    BITMAP_FREE (visited);
2735
2736  timevar_pop (TV_ALIAS_STMT_WALK);
2737
2738  return res;
2739}
2740
2741
2742/* Based on the memory reference REF call WALKER for each vdef which
2743   defining statement may clobber REF, starting with VDEF.  If REF
2744   is NULL_TREE, each defining statement is visited.
2745
2746   WALKER is called with REF, the current vdef and DATA.  If WALKER
2747   returns true the walk is stopped, otherwise it continues.
2748
2749   If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2750   The pointer may be NULL and then we do not track this information.
2751
2752   At PHI nodes walk_aliased_vdefs forks into one walk for reach
2753   PHI argument (but only one walk continues on merge points), the
2754   return value is true if any of the walks was successful.
2755
2756   The function returns the number of statements walked.  */
2757
2758static unsigned int
2759walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2760		      bool (*walker)(ao_ref *, tree, void *), void *data,
2761		      bitmap *visited, unsigned int cnt,
2762		      bool *function_entry_reached)
2763{
2764  do
2765    {
2766      gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2767
2768      if (*visited
2769	  && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2770	return cnt;
2771
2772      if (gimple_nop_p (def_stmt))
2773	{
2774	  if (function_entry_reached)
2775	    *function_entry_reached = true;
2776	  return cnt;
2777	}
2778      else if (gimple_code (def_stmt) == GIMPLE_PHI)
2779	{
2780	  unsigned i;
2781	  if (!*visited)
2782	    *visited = BITMAP_ALLOC (NULL);
2783	  for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2784	    cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2785					 walker, data, visited, 0,
2786					 function_entry_reached);
2787	  return cnt;
2788	}
2789
2790      /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
2791      cnt++;
2792      if ((!ref
2793	   || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2794	  && (*walker) (ref, vdef, data))
2795	return cnt;
2796
2797      vdef = gimple_vuse (def_stmt);
2798    }
2799  while (1);
2800}
2801
2802unsigned int
2803walk_aliased_vdefs (ao_ref *ref, tree vdef,
2804		    bool (*walker)(ao_ref *, tree, void *), void *data,
2805		    bitmap *visited,
2806		    bool *function_entry_reached)
2807{
2808  bitmap local_visited = NULL;
2809  unsigned int ret;
2810
2811  timevar_push (TV_ALIAS_STMT_WALK);
2812
2813  if (function_entry_reached)
2814    *function_entry_reached = false;
2815
2816  ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2817			      visited ? visited : &local_visited, 0,
2818			      function_entry_reached);
2819  if (local_visited)
2820    BITMAP_FREE (local_visited);
2821
2822  timevar_pop (TV_ALIAS_STMT_WALK);
2823
2824  return ret;
2825}
2826
2827