1/* Analysis of polymorphic call context.
2   Copyright (C) 2013-2015 Free Software Foundation, Inc.
3   Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "hash-set.h"
26#include "machmode.h"
27#include "vec.h"
28#include "double-int.h"
29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"
35#include "fold-const.h"
36#include "print-tree.h"
37#include "calls.h"
38#include "hashtab.h"
39#include "hard-reg-set.h"
40#include "function.h"
41#include "rtl.h"
42#include "flags.h"
43#include "statistics.h"
44#include "real.h"
45#include "fixed-value.h"
46#include "insn-config.h"
47#include "expmed.h"
48#include "dojump.h"
49#include "explow.h"
50#include "emit-rtl.h"
51#include "varasm.h"
52#include "stmt.h"
53#include "expr.h"
54#include "tree-pass.h"
55#include "target.h"
56#include "tree-pretty-print.h"
57#include "predict.h"
58#include "basic-block.h"
59#include "hash-map.h"
60#include "is-a.h"
61#include "plugin-api.h"
62#include "ipa-ref.h"
63#include "cgraph.h"
64#include "ipa-utils.h"
65#include "tree-ssa-alias.h"
66#include "internal-fn.h"
67#include "gimple-fold.h"
68#include "gimple-expr.h"
69#include "gimple.h"
70#include "alloc-pool.h"
71#include "symbol-summary.h"
72#include "ipa-prop.h"
73#include "ipa-inline.h"
74#include "diagnostic.h"
75#include "tree-dfa.h"
76#include "demangle.h"
77#include "dbgcnt.h"
78#include "gimple-pretty-print.h"
79#include "stor-layout.h"
80#include "intl.h"
81#include "data-streamer.h"
82#include "lto-streamer.h"
83#include "streamer-hooks.h"
84#include "tree-ssa-operands.h"
85#include "tree-into-ssa.h"
86
87/* Return true when TYPE contains an polymorphic type and thus is interesting
88   for devirtualization machinery.  */
89
90static bool contains_type_p (tree, HOST_WIDE_INT, tree,
91			     bool consider_placement_new = true,
92			     bool consider_bases = true);
93
94bool
95contains_polymorphic_type_p (const_tree type)
96{
97  type = TYPE_MAIN_VARIANT (type);
98
99  if (RECORD_OR_UNION_TYPE_P (type))
100    {
101      if (TYPE_BINFO (type)
102          && polymorphic_type_binfo_p (TYPE_BINFO (type)))
103	return true;
104      for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
105	if (TREE_CODE (fld) == FIELD_DECL
106	    && !DECL_ARTIFICIAL (fld)
107	    && contains_polymorphic_type_p (TREE_TYPE (fld)))
108	  return true;
109      return false;
110    }
111  if (TREE_CODE (type) == ARRAY_TYPE)
112    return contains_polymorphic_type_p (TREE_TYPE (type));
113  return false;
114}
115
116/* Return true if it seems valid to use placement new to build EXPECTED_TYPE
117   at possition CUR_OFFSET within TYPE.
118
119   POD can be changed to an instance of a polymorphic type by
120   placement new.  Here we play safe and assume that any
121   non-polymorphic type is POD.  */
122bool
123possible_placement_new (tree type, tree expected_type,
124			HOST_WIDE_INT cur_offset)
125{
126  if (cur_offset < 0)
127    return true;
128  return ((TREE_CODE (type) != RECORD_TYPE
129	   || !TYPE_BINFO (type)
130	   || cur_offset >= POINTER_SIZE
131	   || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
132	  && (!TYPE_SIZE (type)
133	      || !tree_fits_shwi_p (TYPE_SIZE (type))
134	      || (cur_offset
135		  + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
136		     : POINTER_SIZE)
137		  <= tree_to_uhwi (TYPE_SIZE (type)))));
138}
139
140/* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
141   is contained at THIS->OFFSET.  Walk the memory representation of
142   THIS->OUTER_TYPE and find the outermost class type that match
143   OTR_TYPE or contain OTR_TYPE as a base.  Update THIS
144   to represent it.
145
146   If OTR_TYPE is NULL, just find outermost polymorphic type with
147   virtual table present at possition OFFSET.
148
149   For example when THIS represents type
150   class A
151     {
152       int a;
153       class B b;
154     }
155   and we look for type at offset sizeof(int), we end up with B and offset 0.
156   If the same is produced by multiple inheritance, we end up with A and offset
157   sizeof(int).
158
159   If we can not find corresponding class, give up by setting
160   THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
161   Return true when lookup was sucesful.
162
163   When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
164   valid only via alocation of new polymorphic type inside by means
165   of placement new.
166
167   When CONSIDER_BASES is false, only look for actual fields, not base types
168   of TYPE.  */
169
170bool
171ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
172						       bool consider_placement_new,
173						       bool consider_bases)
174{
175  tree type = outer_type;
176  HOST_WIDE_INT cur_offset = offset;
177  bool speculative = false;
178  bool size_unknown = false;
179  unsigned HOST_WIDE_INT otr_type_size = POINTER_SIZE;
180
181  /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set.  */
182  if (!outer_type)
183    {
184      clear_outer_type (otr_type);
185      type = otr_type;
186      cur_offset = 0;
187    }
188 /* See if OFFSET points inside OUTER_TYPE.  If it does not, we know
189    that the context is either invalid, or the instance type must be
190    derived from OUTER_TYPE.
191
192    Because the instance type may contain field whose type is of OUTER_TYPE,
193    we can not derive any effective information about it.
194
195    TODO: In the case we know all derrived types, we can definitely do better
196    here.  */
197  else if (TYPE_SIZE (outer_type)
198	   && tree_fits_shwi_p (TYPE_SIZE (outer_type))
199	   && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
200	   && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
201   {
202     bool der = maybe_derived_type; /* clear_outer_type will reset it.  */
203     bool dyn = dynamic;
204     clear_outer_type (otr_type);
205     type = otr_type;
206     cur_offset = 0;
207
208     /* If derived type is not allowed, we know that the context is invalid.
209	For dynamic types, we really do not have information about
210	size of the memory location.  It is possible that completely
211	different type is stored after outer_type.  */
212     if (!der && !dyn)
213       {
214	 clear_speculation ();
215	 invalid = true;
216	 return false;
217       }
218   }
219
220  if (otr_type && TYPE_SIZE (otr_type)
221      && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
222    otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
223
224  if (!type || offset < 0)
225    goto no_useful_type_info;
226
227  /* Find the sub-object the constant actually refers to and mark whether it is
228     an artificial one (as opposed to a user-defined one).
229
230     This loop is performed twice; first time for outer_type and second time
231     for speculative_outer_type.  The second run has SPECULATIVE set.  */
232  while (true)
233    {
234      unsigned HOST_WIDE_INT pos, size;
235      tree fld;
236
237      /* If we do not know size of TYPE, we need to be more conservative
238         about accepting cases where we can not find EXPECTED_TYPE.
239	 Generally the types that do matter here are of constant size.
240	 Size_unknown case should be very rare.  */
241      if (TYPE_SIZE (type)
242	  && tree_fits_shwi_p (TYPE_SIZE (type))
243	  && tree_to_shwi (TYPE_SIZE (type)) >= 0)
244	size_unknown = false;
245      else
246	size_unknown = true;
247
248      /* On a match, just return what we found.  */
249      if ((otr_type
250	   && types_odr_comparable (type, otr_type)
251	   && types_same_for_odr (type, otr_type))
252	  || (!otr_type
253	      && TREE_CODE (type) == RECORD_TYPE
254	      && TYPE_BINFO (type)
255	      && polymorphic_type_binfo_p (TYPE_BINFO (type))))
256	{
257	  if (speculative)
258	    {
259	      /* If we did not match the offset, just give up on speculation.  */
260	      if (cur_offset != 0
261		  /* Also check if speculation did not end up being same as
262		     non-speculation.  */
263		  || (types_must_be_same_for_odr (speculative_outer_type,
264						  outer_type)
265		      && (maybe_derived_type
266			  == speculative_maybe_derived_type)))
267		clear_speculation ();
268	      return true;
269	    }
270	  else
271	    {
272	      /* If type is known to be final, do not worry about derived
273		 types.  Testing it here may help us to avoid speculation.  */
274	      if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
275		  && (!in_lto_p || odr_type_p (outer_type))
276		  && type_known_to_have_no_deriavations_p (outer_type))
277		maybe_derived_type = false;
278
279	      /* Type can not contain itself on an non-zero offset.  In that case
280		 just give up.  Still accept the case where size is now known.
281		 Either the second copy may appear past the end of type or within
282		 the non-POD buffer located inside the variably sized type
283		 itself.  */
284	      if (cur_offset != 0)
285		goto no_useful_type_info;
286	      /* If we determined type precisely or we have no clue on
287 		 speuclation, we are done.  */
288	      if (!maybe_derived_type || !speculative_outer_type
289		  || !speculation_consistent_p (speculative_outer_type,
290					        speculative_offset,
291					        speculative_maybe_derived_type,
292						otr_type))
293		{
294		  clear_speculation ();
295	          return true;
296		}
297	      /* Otherwise look into speculation now.  */
298	      else
299		{
300		  speculative = true;
301		  type = speculative_outer_type;
302		  cur_offset = speculative_offset;
303		  continue;
304		}
305	    }
306	}
307
308      /* Walk fields and find corresponding on at OFFSET.  */
309      if (TREE_CODE (type) == RECORD_TYPE)
310	{
311	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
312	    {
313	      if (TREE_CODE (fld) != FIELD_DECL)
314		continue;
315
316	      pos = int_bit_position (fld);
317	      if (pos > (unsigned HOST_WIDE_INT)cur_offset)
318		continue;
319
320	      /* Do not consider vptr itself.  Not even for placement new.  */
321	      if (!pos && DECL_ARTIFICIAL (fld)
322		  && POINTER_TYPE_P (TREE_TYPE (fld))
323		  && TYPE_BINFO (type)
324		  && polymorphic_type_binfo_p (TYPE_BINFO (type)))
325		continue;
326
327	      if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
328		goto no_useful_type_info;
329	      size = tree_to_uhwi (DECL_SIZE (fld));
330
331	      /* We can always skip types smaller than pointer size:
332		 those can not contain a virtual table pointer.
333
334		 Disqualifying fields that are too small to fit OTR_TYPE
335		 saves work needed to walk them for no benefit.
336		 Because of the way the bases are packed into a class, the
337		 field's size may be smaller than type size, so it needs
338		 to be done with a care.  */
339
340	      if (pos <= (unsigned HOST_WIDE_INT)cur_offset
341		  && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
342				     + POINTER_SIZE
343		  && (!otr_type
344		      || !TYPE_SIZE (TREE_TYPE (fld))
345		      || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
346		      || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
347			  >= cur_offset + otr_type_size))
348		break;
349	    }
350
351	  if (!fld)
352	    goto no_useful_type_info;
353
354	  type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
355	  cur_offset -= pos;
356	  /* DECL_ARTIFICIAL represents a basetype.  */
357	  if (!DECL_ARTIFICIAL (fld))
358	    {
359	      if (!speculative)
360		{
361		  outer_type = type;
362		  offset = cur_offset;
363		  /* As soon as we se an field containing the type,
364		     we know we are not looking for derivations.  */
365		  maybe_derived_type = false;
366		}
367	      else
368		{
369		  speculative_outer_type = type;
370		  speculative_offset = cur_offset;
371		  speculative_maybe_derived_type = false;
372		}
373	    }
374	  else if (!consider_bases)
375	    goto no_useful_type_info;
376	}
377      else if (TREE_CODE (type) == ARRAY_TYPE)
378	{
379	  tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
380
381	  /* Give up if we don't know array field size.
382	     Also give up on non-polymorphic types as they are used
383	     as buffers for placement new.  */
384	  if (!TYPE_SIZE (subtype)
385	      || !tree_fits_shwi_p (TYPE_SIZE (subtype))
386	      || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
387	      || !contains_polymorphic_type_p (subtype))
388	    goto no_useful_type_info;
389
390	  HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
391
392	  /* We may see buffer for placement new.  In this case the expected type
393	     can be bigger than the subtype.  */
394	  if (TYPE_SIZE (subtype)
395	      && (cur_offset + otr_type_size
396		  > tree_to_uhwi (TYPE_SIZE (subtype))))
397	    goto no_useful_type_info;
398
399	  cur_offset = new_offset;
400	  type = subtype;
401	  if (!speculative)
402	    {
403	      outer_type = type;
404	      offset = cur_offset;
405	      maybe_derived_type = false;
406	    }
407	  else
408	    {
409	      speculative_outer_type = type;
410	      speculative_offset = cur_offset;
411	      speculative_maybe_derived_type = false;
412	    }
413	}
414      /* Give up on anything else.  */
415      else
416	{
417no_useful_type_info:
418	  if (maybe_derived_type && !speculative
419	      && TREE_CODE (outer_type) == RECORD_TYPE
420	      && TREE_CODE (otr_type) == RECORD_TYPE
421	      && TYPE_BINFO (otr_type)
422	      && !offset
423	      && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
424	    {
425	      clear_outer_type (otr_type);
426	      if (!speculative_outer_type
427		  || !speculation_consistent_p (speculative_outer_type,
428						speculative_offset,
429					        speculative_maybe_derived_type,
430						otr_type))
431		clear_speculation ();
432	      if (speculative_outer_type)
433		{
434		  speculative = true;
435		  type = speculative_outer_type;
436		  cur_offset = speculative_offset;
437		}
438	      else
439		return true;
440	    }
441	  /* We found no way to embedd EXPECTED_TYPE in TYPE.
442	     We still permit two special cases - placement new and
443	     the case of variadic types containing themselves.  */
444	  if (!speculative
445	      && consider_placement_new
446	      && (size_unknown || !type || maybe_derived_type
447		  || possible_placement_new (type, otr_type, cur_offset)))
448	    {
449	      /* In these weird cases we want to accept the context.
450		 In non-speculative run we have no useful outer_type info
451		 (TODO: we may eventually want to record upper bound on the
452		  type size that can be used to prune the walk),
453		 but we still want to consider speculation that may
454		 give useful info.  */
455	      if (!speculative)
456		{
457		  clear_outer_type (otr_type);
458		  if (!speculative_outer_type
459		      || !speculation_consistent_p (speculative_outer_type,
460						    speculative_offset,
461						    speculative_maybe_derived_type,
462						    otr_type))
463		    clear_speculation ();
464		  if (speculative_outer_type)
465		    {
466		      speculative = true;
467		      type = speculative_outer_type;
468		      cur_offset = speculative_offset;
469		    }
470		  else
471		    return true;
472		}
473	      else
474		{
475		  clear_speculation ();
476	          return true;
477		}
478	    }
479	  else
480	    {
481	      clear_speculation ();
482	      if (speculative)
483		return true;
484	      clear_outer_type (otr_type);
485	      invalid = true;
486	      return false;
487	    }
488	}
489    }
490}
491
492/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
493   CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
494   be built within OUTER_TYPE by means of placement new.  CONSIDER_BASES makes
495   function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
496   base of one of fields of OUTER_TYPE.  */
497
498static bool
499contains_type_p (tree outer_type, HOST_WIDE_INT offset,
500		 tree otr_type,
501		 bool consider_placement_new,
502		 bool consider_bases)
503{
504  ipa_polymorphic_call_context context;
505
506  /* Check that type is within range.  */
507  if (offset < 0)
508    return false;
509  if (TYPE_SIZE (outer_type) && TYPE_SIZE (otr_type)
510      && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
511      && TREE_CODE (TYPE_SIZE (otr_type)) == INTEGER_CST
512      && wi::ltu_p (wi::to_offset (TYPE_SIZE (outer_type)),
513		    (wi::to_offset (TYPE_SIZE (otr_type)) + offset)))
514    return false;
515
516  context.offset = offset;
517  context.outer_type = TYPE_MAIN_VARIANT (outer_type);
518  context.maybe_derived_type = false;
519  context.dynamic = false;
520  return context.restrict_to_inner_class (otr_type, consider_placement_new,
521					  consider_bases);
522}
523
524
525/* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor.
526   If CHECK_CLONES is true, also check for clones of ctor/dtors.  */
527
528tree
529inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones)
530{
531  tree fn = block_ultimate_origin (block);
532  if (fn == NULL || TREE_CODE (fn) != FUNCTION_DECL)
533    return NULL_TREE;
534
535  if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
536      || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
537    {
538      if (!check_clones)
539	return NULL_TREE;
540
541      /* Watch for clones where we constant propagated the first
542	 argument (pointer to the instance).  */
543      fn = DECL_ABSTRACT_ORIGIN (fn);
544      if (!fn
545	  || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
546	  || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
547	return NULL_TREE;
548    }
549
550  if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
551    return NULL_TREE;
552
553  return fn;
554}
555
556
557/* We know that the instance is stored in variable or parameter
558   (not dynamically allocated) and we want to disprove the fact
559   that it may be in construction at invocation of CALL.
560
561   BASE represents memory location where instance is stored.
562   If BASE is NULL, it is assumed to be global memory.
563   OUTER_TYPE is known type of the instance or NULL if not
564   known.
565
566   For the variable to be in construction we actually need to
567   be in constructor of corresponding global variable or
568   the inline stack of CALL must contain the constructor.
569   Check this condition.  This check works safely only before
570   IPA passes, because inline stacks may become out of date
571   later.  */
572
573bool
574decl_maybe_in_construction_p (tree base, tree outer_type,
575			      gimple call, tree function)
576{
577  if (outer_type)
578    outer_type = TYPE_MAIN_VARIANT (outer_type);
579  gcc_assert (!base || DECL_P (base));
580
581  /* After inlining the code unification optimizations may invalidate
582     inline stacks.  Also we need to give up on global variables after
583     IPA, because addresses of these may have been propagated to their
584     constructors.  */
585  if (DECL_STRUCT_FUNCTION (function)->after_inlining)
586    return true;
587
588  /* Pure functions can not do any changes on the dynamic type;
589     that require writting to memory.  */
590  if ((!base || !auto_var_in_fn_p (base, function))
591      && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
592    return false;
593
594  bool check_clones = !base || is_global_var (base);
595  for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
596       block = BLOCK_SUPERCONTEXT (block))
597    if (tree fn = inlined_polymorphic_ctor_dtor_block_p (block, check_clones))
598      {
599	tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
600
601	if (!outer_type || !types_odr_comparable (type, outer_type))
602	  {
603	    if (TREE_CODE (type) == RECORD_TYPE
604		&& TYPE_BINFO (type)
605		&& polymorphic_type_binfo_p (TYPE_BINFO (type)))
606	      return true;
607	  }
608 	else if (types_same_for_odr (type, outer_type))
609	  return true;
610      }
611
612  if (!base || (TREE_CODE (base) == VAR_DECL && is_global_var (base)))
613    {
614      if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
615	  || (!DECL_CXX_CONSTRUCTOR_P (function)
616	      && !DECL_CXX_DESTRUCTOR_P (function)))
617	{
618	  if (!DECL_ABSTRACT_ORIGIN (function))
619	    return false;
620	  /* Watch for clones where we constant propagated the first
621	     argument (pointer to the instance).  */
622	  function = DECL_ABSTRACT_ORIGIN (function);
623	  if (!function
624	      || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
625	      || (!DECL_CXX_CONSTRUCTOR_P (function)
626		  && !DECL_CXX_DESTRUCTOR_P (function)))
627	    return false;
628	}
629      tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
630      if (!outer_type || !types_odr_comparable (type, outer_type))
631	{
632	  if (TREE_CODE (type) == RECORD_TYPE
633	      && TYPE_BINFO (type)
634	      && polymorphic_type_binfo_p (TYPE_BINFO (type)))
635	    return true;
636	}
637      else if (types_same_for_odr (type, outer_type))
638	return true;
639    }
640  return false;
641}
642
643/* Dump human readable context to F.  If NEWLINE is true, it will be terminated
644   by a newline.  */
645
646void
647ipa_polymorphic_call_context::dump (FILE *f, bool newline) const
648{
649  fprintf (f, "    ");
650  if (invalid)
651    fprintf (f, "Call is known to be undefined");
652  else
653    {
654      if (useless_p ())
655	fprintf (f, "nothing known");
656      if (outer_type || offset)
657	{
658	  fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
659	  print_generic_expr (f, outer_type, TDF_SLIM);
660	  if (maybe_derived_type)
661	    fprintf (f, " (or a derived type)");
662	  if (maybe_in_construction)
663	    fprintf (f, " (maybe in construction)");
664	  fprintf (f, " offset "HOST_WIDE_INT_PRINT_DEC,
665		   offset);
666	}
667      if (speculative_outer_type)
668	{
669	  if (outer_type || offset)
670	    fprintf (f, " ");
671	  fprintf (f, "Speculative outer type:");
672	  print_generic_expr (f, speculative_outer_type, TDF_SLIM);
673	  if (speculative_maybe_derived_type)
674	    fprintf (f, " (or a derived type)");
675	  fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC,
676		   speculative_offset);
677	}
678    }
679  if (newline)
680    fprintf(f, "\n");
681}
682
683/* Print context to stderr.  */
684
685void
686ipa_polymorphic_call_context::debug () const
687{
688  dump (stderr);
689}
690
691/* Stream out the context to OB.  */
692
693void
694ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
695{
696  struct bitpack_d bp = bitpack_create (ob->main_stream);
697
698  bp_pack_value (&bp, invalid, 1);
699  bp_pack_value (&bp, maybe_in_construction, 1);
700  bp_pack_value (&bp, maybe_derived_type, 1);
701  bp_pack_value (&bp, speculative_maybe_derived_type, 1);
702  bp_pack_value (&bp, dynamic, 1);
703  bp_pack_value (&bp, outer_type != NULL, 1);
704  bp_pack_value (&bp, offset != 0, 1);
705  bp_pack_value (&bp, speculative_outer_type != NULL, 1);
706  streamer_write_bitpack (&bp);
707
708  if (outer_type != NULL)
709    stream_write_tree (ob, outer_type, true);
710  if (offset)
711    streamer_write_hwi (ob, offset);
712  if (speculative_outer_type != NULL)
713    {
714      stream_write_tree (ob, speculative_outer_type, true);
715      streamer_write_hwi (ob, speculative_offset);
716    }
717  else
718    gcc_assert (!speculative_offset);
719}
720
721/* Stream in the context from IB and DATA_IN.  */
722
723void
724ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
725					 struct data_in *data_in)
726{
727  struct bitpack_d bp = streamer_read_bitpack (ib);
728
729  invalid = bp_unpack_value (&bp, 1);
730  maybe_in_construction = bp_unpack_value (&bp, 1);
731  maybe_derived_type = bp_unpack_value (&bp, 1);
732  speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
733  dynamic = bp_unpack_value (&bp, 1);
734  bool outer_type_p = bp_unpack_value (&bp, 1);
735  bool offset_p = bp_unpack_value (&bp, 1);
736  bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
737
738  if (outer_type_p)
739    outer_type = stream_read_tree (ib, data_in);
740  else
741    outer_type = NULL;
742  if (offset_p)
743    offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
744  else
745    offset = 0;
746  if (speculative_outer_type_p)
747    {
748      speculative_outer_type = stream_read_tree (ib, data_in);
749      speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
750    }
751  else
752    {
753      speculative_outer_type = NULL;
754      speculative_offset = 0;
755    }
756}
757
758/* Proudce polymorphic call context for call method of instance
759   that is located within BASE (that is assumed to be a decl) at offset OFF. */
760
761void
762ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
763{
764  gcc_assert (DECL_P (base));
765  clear_speculation ();
766
767  if (!contains_polymorphic_type_p (TREE_TYPE (base)))
768    {
769      clear_outer_type ();
770      offset = off;
771      return;
772    }
773  outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
774  offset = off;
775  /* Make very conservative assumption that all objects
776     may be in construction.
777
778     It is up to caller to revisit this via
779     get_dynamic_type or decl_maybe_in_construction_p.  */
780  maybe_in_construction = true;
781  maybe_derived_type = false;
782  dynamic = false;
783}
784
785/* CST is an invariant (address of decl), try to get meaningful
786   polymorphic call context for polymorphic call of method
787   if instance of OTR_TYPE that is located at offset OFF of this invariant.
788   Return FALSE if nothing meaningful can be found.  */
789
790bool
791ipa_polymorphic_call_context::set_by_invariant (tree cst,
792						tree otr_type,
793						HOST_WIDE_INT off)
794{
795  HOST_WIDE_INT offset2, size, max_size;
796  tree base;
797
798  invalid = false;
799  off = 0;
800  clear_outer_type (otr_type);
801
802  if (TREE_CODE (cst) != ADDR_EXPR)
803    return false;
804
805  cst = TREE_OPERAND (cst, 0);
806  base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
807  if (!DECL_P (base) || max_size == -1 || max_size != size)
808    return false;
809
810  /* Only type inconsistent programs can have otr_type that is
811     not part of outer type.  */
812  if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
813    return false;
814
815  set_by_decl (base, off);
816  return true;
817}
818
819/* See if OP is SSA name initialized as a copy or by single assignment.
820   If so, walk the SSA graph up.  Because simple PHI conditional is considered
821   copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
822   graph.  */
823
824static tree
825walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
826{
827  hash_set <tree> *visited = NULL;
828  STRIP_NOPS (op);
829  while (TREE_CODE (op) == SSA_NAME
830	 && !SSA_NAME_IS_DEFAULT_DEF (op)
831	 /* We might be called via fold_stmt during cfgcleanup where
832	    SSA form need not be up-to-date.  */
833	 && !name_registered_for_update_p (op)
834	 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
835	     || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
836    {
837      if (global_visited)
838	{
839	  if (!*global_visited)
840	    *global_visited = new hash_set<tree>;
841	  if ((*global_visited)->add (op))
842	    goto done;
843	}
844      else
845	{
846	  if (!visited)
847	    visited = new hash_set<tree>;
848	  if (visited->add (op))
849	    goto done;
850	}
851      /* Special case
852	 if (ptr == 0)
853	   ptr = 0;
854	 else
855	   ptr = ptr.foo;
856	 This pattern is implicitly produced for casts to non-primary
857	 bases.  When doing context analysis, we do not really care
858	 about the case pointer is NULL, becuase the call will be
859	 undefined anyway.  */
860      if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
861	{
862	  gimple phi = SSA_NAME_DEF_STMT (op);
863
864	  if (gimple_phi_num_args (phi) > 2)
865	    goto done;
866	  if (gimple_phi_num_args (phi) == 1)
867	    op = gimple_phi_arg_def (phi, 0);
868	  else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
869	    op = gimple_phi_arg_def (phi, 1);
870	  else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
871	    op = gimple_phi_arg_def (phi, 0);
872	  else
873	    goto done;
874	}
875      else
876	{
877	  if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
878	    goto done;
879	  op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
880	}
881      STRIP_NOPS (op);
882    }
883done:
884  if (visited)
885    delete (visited);
886  return op;
887}
888
889/* Create polymorphic call context from IP invariant CST.
890   This is typically &global_var.
891   OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
892   is offset of call.  */
893
894ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
895							    tree otr_type,
896							    HOST_WIDE_INT off)
897{
898  clear_speculation ();
899  set_by_invariant (cst, otr_type, off);
900}
901
902/* Build context for pointer REF contained in FNDECL at statement STMT.
903   if INSTANCE is non-NULL, return pointer to the object described by
904   the context or DECL where context is contained in.  */
905
906ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
907							    tree ref,
908							    gimple stmt,
909							    tree *instance)
910{
911  tree otr_type = NULL;
912  tree base_pointer;
913  hash_set <tree> *visited = NULL;
914
915  if (TREE_CODE (ref) == OBJ_TYPE_REF)
916    {
917      otr_type = obj_type_ref_class (ref);
918      base_pointer = OBJ_TYPE_REF_OBJECT (ref);
919    }
920  else
921    base_pointer = ref;
922
923  /* Set up basic info in case we find nothing interesting in the analysis.  */
924  clear_speculation ();
925  clear_outer_type (otr_type);
926  invalid = false;
927
928  /* Walk SSA for outer object.  */
929  while (true)
930    {
931      base_pointer = walk_ssa_copies (base_pointer, &visited);
932      if (TREE_CODE (base_pointer) == ADDR_EXPR)
933	{
934	  HOST_WIDE_INT size, max_size;
935	  HOST_WIDE_INT offset2;
936	  tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
937					       &offset2, &size, &max_size);
938
939	  if (max_size != -1 && max_size == size)
940	    combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
941				      offset + offset2,
942				      true,
943				      NULL /* Do not change outer type.  */);
944
945	  /* If this is a varying address, punt.  */
946	  if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
947	      && max_size != -1
948	      && max_size == size)
949	    {
950	      /* We found dereference of a pointer.  Type of the pointer
951		 and MEM_REF is meaningless, but we can look futher.  */
952	      if (TREE_CODE (base) == MEM_REF)
953		{
954		  base_pointer = TREE_OPERAND (base, 0);
955		  offset
956		    += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
957		  outer_type = NULL;
958		}
959	      /* We found base object.  In this case the outer_type
960		 is known.  */
961	      else if (DECL_P (base))
962		{
963		  if (visited)
964		    delete (visited);
965		  /* Only type inconsistent programs can have otr_type that is
966		     not part of outer type.  */
967		  if (otr_type
968		      && !contains_type_p (TREE_TYPE (base),
969					   offset + offset2, otr_type))
970		    {
971		      invalid = true;
972		      if (instance)
973			*instance = base_pointer;
974		      return;
975		    }
976		  set_by_decl (base, offset + offset2);
977		  if (outer_type && maybe_in_construction && stmt)
978		    maybe_in_construction
979		     = decl_maybe_in_construction_p (base,
980						     outer_type,
981						     stmt,
982						     fndecl);
983		  if (instance)
984		    *instance = base;
985		  return;
986		}
987	      else
988		break;
989	    }
990	  else
991	    break;
992	}
993      else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
994	       && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
995	{
996	  offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
997		    * BITS_PER_UNIT;
998	  base_pointer = TREE_OPERAND (base_pointer, 0);
999	}
1000      else
1001	break;
1002    }
1003
1004  if (visited)
1005    delete (visited);
1006
1007  /* Try to determine type of the outer object.  */
1008  if (TREE_CODE (base_pointer) == SSA_NAME
1009      && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1010      && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1011    {
1012      /* See if parameter is THIS pointer of a method.  */
1013      if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1014	  && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1015	{
1016	  outer_type
1017	     = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1018	  gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
1019		      || TREE_CODE (outer_type) == UNION_TYPE);
1020
1021	  /* Dynamic casting has possibly upcasted the type
1022	     in the hiearchy.  In this case outer type is less
1023	     informative than inner type and we should forget
1024	     about it.  */
1025	  if ((otr_type
1026	       && !contains_type_p (outer_type, offset,
1027				    otr_type))
1028	      || !contains_polymorphic_type_p (outer_type))
1029	    {
1030	      outer_type = NULL;
1031	      if (instance)
1032		*instance = base_pointer;
1033	      return;
1034	    }
1035
1036	  dynamic = true;
1037
1038	  /* If the function is constructor or destructor, then
1039	     the type is possibly in construction, but we know
1040	     it is not derived type.  */
1041	  if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1042	      || DECL_CXX_DESTRUCTOR_P (fndecl))
1043	    {
1044	      maybe_in_construction = true;
1045	      maybe_derived_type = false;
1046	    }
1047	  else
1048	    {
1049	      maybe_derived_type = true;
1050	      maybe_in_construction = false;
1051	    }
1052	  if (instance)
1053	    *instance = base_pointer;
1054	  return;
1055	}
1056      /* Non-PODs passed by value are really passed by invisible
1057	 reference.  In this case we also know the type of the
1058	 object.  */
1059      if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1060	{
1061	  outer_type
1062	     = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1063	  /* Only type inconsistent programs can have otr_type that is
1064	     not part of outer type.  */
1065	  if (otr_type && !contains_type_p (outer_type, offset,
1066					    otr_type))
1067	    {
1068	      invalid = true;
1069	      if (instance)
1070		*instance = base_pointer;
1071	      return;
1072	    }
1073	  /* Non-polymorphic types have no interest for us.  */
1074	  else if (!otr_type && !contains_polymorphic_type_p (outer_type))
1075	    {
1076	      outer_type = NULL;
1077	      if (instance)
1078		*instance = base_pointer;
1079	      return;
1080	    }
1081	  maybe_derived_type = false;
1082	  maybe_in_construction = false;
1083	  if (instance)
1084	    *instance = base_pointer;
1085	  return;
1086	}
1087    }
1088
1089  tree base_type = TREE_TYPE (base_pointer);
1090
1091  if (TREE_CODE (base_pointer) == SSA_NAME
1092      && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1093      && !(TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL
1094	   || TREE_CODE (SSA_NAME_VAR (base_pointer)) == RESULT_DECL))
1095    {
1096      invalid = true;
1097      if (instance)
1098	*instance = base_pointer;
1099      return;
1100    }
1101  if (TREE_CODE (base_pointer) == SSA_NAME
1102      && SSA_NAME_DEF_STMT (base_pointer)
1103      && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1104    base_type = TREE_TYPE (gimple_assign_rhs1
1105			    (SSA_NAME_DEF_STMT (base_pointer)));
1106
1107  if (base_type && POINTER_TYPE_P (base_type))
1108    combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
1109			      offset,
1110			      true, NULL /* Do not change type here */);
1111  /* TODO: There are multiple ways to derive a type.  For instance
1112     if BASE_POINTER is passed to an constructor call prior our refernece.
1113     We do not make this type of flow sensitive analysis yet.  */
1114  if (instance)
1115    *instance = base_pointer;
1116  return;
1117}
1118
1119/* Structure to be passed in between detect_type_change and
1120   check_stmt_for_type_change.  */
1121
1122struct type_change_info
1123{
1124  /* Offset into the object where there is the virtual method pointer we are
1125     looking for.  */
1126  HOST_WIDE_INT offset;
1127  /* The declaration or SSA_NAME pointer of the base that we are checking for
1128     type change.  */
1129  tree instance;
1130  /* The reference to virtual table pointer used.  */
1131  tree vtbl_ptr_ref;
1132  tree otr_type;
1133  /* If we actually can tell the type that the object has changed to, it is
1134     stored in this field.  Otherwise it remains NULL_TREE.  */
1135  tree known_current_type;
1136  HOST_WIDE_INT known_current_offset;
1137
1138  /* Set to true if dynamic type change has been detected.  */
1139  bool type_maybe_changed;
1140  /* Set to true if multiple types have been encountered.  known_current_type
1141     must be disregarded in that case.  */
1142  bool multiple_types_encountered;
1143  /* Set to true if we possibly missed some dynamic type changes and we should
1144     consider the set to be speculative.  */
1145  bool speculative;
1146  bool seen_unanalyzed_store;
1147};
1148
1149/* Return true if STMT is not call and can modify a virtual method table pointer.
1150   We take advantage of fact that vtable stores must appear within constructor
1151   and destructor functions.  */
1152
1153static bool
1154noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
1155{
1156  if (is_gimple_assign (stmt))
1157    {
1158      tree lhs = gimple_assign_lhs (stmt);
1159
1160      if (gimple_clobber_p (stmt))
1161	return false;
1162      if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
1163	{
1164	  if (flag_strict_aliasing
1165	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
1166	    return false;
1167
1168	  if (TREE_CODE (lhs) == COMPONENT_REF
1169	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1170	    return false;
1171	  /* In the future we might want to use get_base_ref_and_offset to find
1172	     if there is a field corresponding to the offset and if so, proceed
1173	     almost like if it was a component ref.  */
1174	}
1175    }
1176
1177  /* Code unification may mess with inline stacks.  */
1178  if (cfun->after_inlining)
1179    return true;
1180
1181  /* Walk the inline stack and watch out for ctors/dtors.
1182     TODO: Maybe we can require the store to appear in toplevel
1183     block of CTOR/DTOR.  */
1184  for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
1185       block = BLOCK_SUPERCONTEXT (block))
1186    if (BLOCK_ABSTRACT_ORIGIN (block)
1187	&& TREE_CODE (block_ultimate_origin (block)) == FUNCTION_DECL)
1188      return inlined_polymorphic_ctor_dtor_block_p (block, false);
1189  return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
1190	  && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
1191	      || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
1192}
1193
1194/* If STMT can be proved to be an assignment to the virtual method table
1195   pointer of ANALYZED_OBJ and the type associated with the new table
1196   identified, return the type.  Otherwise return NULL_TREE if type changes
1197   in unknown way or ERROR_MARK_NODE if type is unchanged.  */
1198
1199static tree
1200extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
1201			       HOST_WIDE_INT *type_offset)
1202{
1203  HOST_WIDE_INT offset, size, max_size;
1204  tree lhs, rhs, base;
1205
1206  if (!gimple_assign_single_p (stmt))
1207    return NULL_TREE;
1208
1209  lhs = gimple_assign_lhs (stmt);
1210  rhs = gimple_assign_rhs1 (stmt);
1211  if (TREE_CODE (lhs) != COMPONENT_REF
1212      || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1213     {
1214	if (dump_file)
1215	  fprintf (dump_file, "  LHS is not virtual table.\n");
1216	return NULL_TREE;
1217     }
1218
1219  if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1220    ;
1221  else
1222    {
1223      base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
1224      if (DECL_P (tci->instance))
1225	{
1226	  if (base != tci->instance)
1227	    {
1228	      if (dump_file)
1229		{
1230		  fprintf (dump_file, "    base:");
1231		  print_generic_expr (dump_file, base, TDF_SLIM);
1232		  fprintf (dump_file, " does not match instance:");
1233		  print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1234		  fprintf (dump_file, "\n");
1235		}
1236	      return NULL_TREE;
1237	    }
1238	}
1239      else if (TREE_CODE (base) == MEM_REF)
1240	{
1241	  if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0))
1242	    {
1243	      if (dump_file)
1244		{
1245		  fprintf (dump_file, "    base mem ref:");
1246		  print_generic_expr (dump_file, base, TDF_SLIM);
1247		  fprintf (dump_file, " does not match instance:");
1248		  print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1249		  fprintf (dump_file, "\n");
1250		}
1251	      return NULL_TREE;
1252	    }
1253	  if (!integer_zerop (TREE_OPERAND (base, 1)))
1254	    {
1255	      if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
1256		{
1257		  if (dump_file)
1258		    {
1259		      fprintf (dump_file, "    base mem ref:");
1260		      print_generic_expr (dump_file, base, TDF_SLIM);
1261		      fprintf (dump_file, " has non-representable offset:");
1262		      print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1263		      fprintf (dump_file, "\n");
1264		    }
1265		  return NULL_TREE;
1266		}
1267	      else
1268	        offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT;
1269	    }
1270	}
1271      else if (!operand_equal_p (tci->instance, base, 0)
1272	       || tci->offset)
1273	{
1274	  if (dump_file)
1275	    {
1276	      fprintf (dump_file, "    base:");
1277	      print_generic_expr (dump_file, base, TDF_SLIM);
1278	      fprintf (dump_file, " does not match instance:");
1279	      print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1280	      fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1281	    }
1282	  return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE;
1283	}
1284      if (offset != tci->offset
1285	  || size != POINTER_SIZE
1286	  || max_size != POINTER_SIZE)
1287	{
1288	  if (dump_file)
1289	    fprintf (dump_file, "    wrong offset %i!=%i or size %i\n",
1290		     (int)offset, (int)tci->offset, (int)size);
1291	  return offset + POINTER_SIZE <= tci->offset
1292	         || (max_size != -1
1293		     && tci->offset + POINTER_SIZE > offset + max_size)
1294		 ? error_mark_node : NULL;
1295	}
1296    }
1297
1298  tree vtable;
1299  unsigned HOST_WIDE_INT offset2;
1300
1301  if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1302    {
1303      if (dump_file)
1304	fprintf (dump_file, "    Failed to lookup binfo\n");
1305      return NULL;
1306    }
1307
1308  tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1309					       offset2, vtable);
1310  if (!binfo)
1311    {
1312      if (dump_file)
1313	fprintf (dump_file, "    Construction vtable used\n");
1314      /* FIXME: We should suport construction contexts.  */
1315      return NULL;
1316    }
1317
1318  *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1319  return DECL_CONTEXT (vtable);
1320}
1321
1322/* Record dynamic type change of TCI to TYPE.  */
1323
1324static void
1325record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1326{
1327  if (dump_file)
1328    {
1329      if (type)
1330	{
1331          fprintf (dump_file, "  Recording type: ");
1332	  print_generic_expr (dump_file, type, TDF_SLIM);
1333          fprintf (dump_file, " at offset %i\n", (int)offset);
1334	}
1335     else
1336       fprintf (dump_file, "  Recording unknown type\n");
1337    }
1338
1339  /* If we found a constructor of type that is not polymorphic or
1340     that may contain the type in question as a field (not as base),
1341     restrict to the inner class first to make type matching bellow
1342     happier.  */
1343  if (type
1344      && (offset
1345          || (TREE_CODE (type) != RECORD_TYPE
1346	      || !TYPE_BINFO (type)
1347	      || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1348    {
1349      ipa_polymorphic_call_context context;
1350
1351      context.offset = offset;
1352      context.outer_type = type;
1353      context.maybe_in_construction = false;
1354      context.maybe_derived_type = false;
1355      context.dynamic = true;
1356      /* If we failed to find the inner type, we know that the call
1357	 would be undefined for type produced here.  */
1358      if (!context.restrict_to_inner_class (tci->otr_type))
1359	{
1360	  if (dump_file)
1361	    fprintf (dump_file, "  Ignoring; does not contain otr_type\n");
1362	  return;
1363	}
1364      /* Watch for case we reached an POD type and anticipate placement
1365	 new.  */
1366      if (!context.maybe_derived_type)
1367	{
1368          type = context.outer_type;
1369          offset = context.offset;
1370	}
1371    }
1372  if (tci->type_maybe_changed
1373      && (!types_same_for_odr (type, tci->known_current_type)
1374	  || offset != tci->known_current_offset))
1375    tci->multiple_types_encountered = true;
1376  tci->known_current_type = TYPE_MAIN_VARIANT (type);
1377  tci->known_current_offset = offset;
1378  tci->type_maybe_changed = true;
1379}
1380
1381/* Callback of walk_aliased_vdefs and a helper function for
1382   detect_type_change to check whether a particular statement may modify
1383   the virtual table pointer, and if possible also determine the new type of
1384   the (sub-)object.  It stores its result into DATA, which points to a
1385   type_change_info structure.  */
1386
1387static bool
1388check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1389{
1390  gimple stmt = SSA_NAME_DEF_STMT (vdef);
1391  struct type_change_info *tci = (struct type_change_info *) data;
1392  tree fn;
1393
1394  /* If we already gave up, just terminate the rest of walk.  */
1395  if (tci->multiple_types_encountered)
1396    return true;
1397
1398  if (is_gimple_call (stmt))
1399    {
1400      if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1401	return false;
1402
1403      /* Check for a constructor call.  */
1404      if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1405	  && DECL_CXX_CONSTRUCTOR_P (fn)
1406	  && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1407	  && gimple_call_num_args (stmt))
1408      {
1409	tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1410	tree type = method_class_type (TREE_TYPE (fn));
1411	HOST_WIDE_INT offset = 0, size, max_size;
1412
1413	if (dump_file)
1414	  {
1415	    fprintf (dump_file, "  Checking constructor call: ");
1416	    print_gimple_stmt (dump_file, stmt, 0, 0);
1417	  }
1418
1419	/* See if THIS parameter seems like instance pointer.  */
1420	if (TREE_CODE (op) == ADDR_EXPR)
1421	  {
1422	    op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
1423					  &offset, &size, &max_size);
1424	    if (size != max_size || max_size == -1)
1425	      {
1426                tci->speculative = true;
1427	        return false;
1428	      }
1429	    if (op && TREE_CODE (op) == MEM_REF)
1430	      {
1431		if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1432		  {
1433                    tci->speculative = true;
1434		    return false;
1435		  }
1436		offset += tree_to_shwi (TREE_OPERAND (op, 1))
1437			  * BITS_PER_UNIT;
1438		op = TREE_OPERAND (op, 0);
1439	      }
1440	    else if (DECL_P (op))
1441	      ;
1442	    else
1443	      {
1444                tci->speculative = true;
1445	        return false;
1446	      }
1447	    op = walk_ssa_copies (op);
1448	  }
1449	if (operand_equal_p (op, tci->instance, 0)
1450	    && TYPE_SIZE (type)
1451	    && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1452	    && tree_fits_shwi_p (TYPE_SIZE (type))
1453	    && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset
1454	    /* Some inlined constructors may look as follows:
1455		  _3 = operator new (16);
1456		  MEM[(struct  &)_3] ={v} {CLOBBER};
1457		  MEM[(struct CompositeClass *)_3]._vptr.CompositeClass
1458		    = &MEM[(void *)&_ZTV14CompositeClass + 16B];
1459		  _7 = &MEM[(struct CompositeClass *)_3].object;
1460		  EmptyClass::EmptyClass (_7);
1461
1462	       When determining dynamic type of _3 and because we stop at first
1463	       dynamic type found, we would stop on EmptyClass::EmptyClass (_7).
1464	       In this case the emptyclass is not even polymorphic and we miss
1465	       it is contained in an outer type that is polymorphic.  */
1466
1467	    && (tci->offset == offset || contains_polymorphic_type_p (type)))
1468	  {
1469	    record_known_type (tci, type, tci->offset - offset);
1470	    return true;
1471	  }
1472      }
1473     /* Calls may possibly change dynamic type by placement new. Assume
1474        it will not happen, but make result speculative only.  */
1475     if (dump_file)
1476	{
1477          fprintf (dump_file, "  Function call may change dynamic type:");
1478	  print_gimple_stmt (dump_file, stmt, 0, 0);
1479	}
1480     tci->speculative = true;
1481     return false;
1482   }
1483  /* Check for inlined virtual table store.  */
1484  else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1485    {
1486      tree type;
1487      HOST_WIDE_INT offset = 0;
1488      if (dump_file)
1489	{
1490	  fprintf (dump_file, "  Checking vtbl store: ");
1491	  print_gimple_stmt (dump_file, stmt, 0, 0);
1492	}
1493
1494      type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1495      if (type == error_mark_node)
1496	return false;
1497      gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1498      if (!type)
1499	{
1500	  if (dump_file)
1501	    fprintf (dump_file, "  Unanalyzed store may change type.\n");
1502	  tci->seen_unanalyzed_store = true;
1503	  tci->speculative = true;
1504	}
1505      else
1506        record_known_type (tci, type, offset);
1507      return true;
1508    }
1509  else
1510    return false;
1511}
1512
1513/* THIS is polymorphic call context obtained from get_polymorphic_context.
1514   OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1515   INSTANCE is pointer to the outer instance as returned by
1516   get_polymorphic_context.  To avoid creation of temporary expressions,
1517   INSTANCE may also be an declaration of get_polymorphic_context found the
1518   value to be in static storage.
1519
1520   If the type of instance is not fully determined
1521   (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1522   is set), try to walk memory writes and find the actual construction of the
1523   instance.
1524
1525   Return true if memory is unchanged from function entry.
1526
1527   We do not include this analysis in the context analysis itself, because
1528   it needs memory SSA to be fully built and the walk may be expensive.
1529   So it is not suitable for use withing fold_stmt and similar uses.  */
1530
1531bool
1532ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1533						tree otr_object,
1534						tree otr_type,
1535						gimple call)
1536{
1537  struct type_change_info tci;
1538  ao_ref ao;
1539  bool function_entry_reached = false;
1540  tree instance_ref = NULL;
1541  gimple stmt = call;
1542  /* Remember OFFSET before it is modified by restrict_to_inner_class.
1543     This is because we do not update INSTANCE when walking inwards.  */
1544  HOST_WIDE_INT instance_offset = offset;
1545  tree instance_outer_type = outer_type;
1546
1547  if (otr_type)
1548    otr_type = TYPE_MAIN_VARIANT (otr_type);
1549
1550  /* Walk into inner type. This may clear maybe_derived_type and save us
1551     from useless work.  It also makes later comparsions with static type
1552     easier.  */
1553  if (outer_type && otr_type)
1554    {
1555      if (!restrict_to_inner_class (otr_type))
1556        return false;
1557    }
1558
1559  if (!maybe_in_construction && !maybe_derived_type)
1560    return false;
1561
1562  /* We need to obtain refernce to virtual table pointer.  It is better
1563     to look it up in the code rather than build our own.  This require bit
1564     of pattern matching, but we end up verifying that what we found is
1565     correct.
1566
1567     What we pattern match is:
1568
1569       tmp = instance->_vptr.A;   // vtbl ptr load
1570       tmp2 = tmp[otr_token];	  // vtable lookup
1571       OBJ_TYPE_REF(tmp2;instance->0) (instance);
1572
1573     We want to start alias oracle walk from vtbl pointer load,
1574     but we may not be able to identify it, for example, when PRE moved the
1575     load around.  */
1576
1577  if (gimple_code (call) == GIMPLE_CALL)
1578    {
1579      tree ref = gimple_call_fn (call);
1580      HOST_WIDE_INT offset2, size, max_size;
1581
1582      if (TREE_CODE (ref) == OBJ_TYPE_REF)
1583	{
1584	  ref = OBJ_TYPE_REF_EXPR (ref);
1585	  ref = walk_ssa_copies (ref);
1586
1587	  /* Check if definition looks like vtable lookup.  */
1588	  if (TREE_CODE (ref) == SSA_NAME
1589	      && !SSA_NAME_IS_DEFAULT_DEF (ref)
1590	      && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1591	      && TREE_CODE (gimple_assign_rhs1
1592			     (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1593	    {
1594	      ref = get_base_address
1595		     (TREE_OPERAND (gimple_assign_rhs1
1596				     (SSA_NAME_DEF_STMT (ref)), 0));
1597	      ref = walk_ssa_copies (ref);
1598	      /* Find base address of the lookup and see if it looks like
1599		 vptr load.  */
1600	      if (TREE_CODE (ref) == SSA_NAME
1601		  && !SSA_NAME_IS_DEFAULT_DEF (ref)
1602		  && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1603		{
1604		  tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1605		  tree base_ref = get_ref_base_and_extent
1606				   (ref_exp, &offset2, &size, &max_size);
1607
1608		  /* Finally verify that what we found looks like read from OTR_OBJECT
1609		     or from INSTANCE with offset OFFSET.  */
1610		  if (base_ref
1611		      && ((TREE_CODE (base_ref) == MEM_REF
1612		           && ((offset2 == instance_offset
1613		                && TREE_OPERAND (base_ref, 0) == instance)
1614			       || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object)))
1615			  || (DECL_P (instance) && base_ref == instance
1616			      && offset2 == instance_offset)))
1617		    {
1618		      stmt = SSA_NAME_DEF_STMT (ref);
1619		      instance_ref = ref_exp;
1620		    }
1621		}
1622	    }
1623	}
1624    }
1625
1626  /* If we failed to look up the reference in code, build our own.  */
1627  if (!instance_ref)
1628    {
1629      /* If the statement in question does not use memory, we can't tell
1630	 anything.  */
1631      if (!gimple_vuse (stmt))
1632	return false;
1633      ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1634    }
1635  else
1636  /* Otherwise use the real reference.  */
1637    ao_ref_init (&ao, instance_ref);
1638
1639  /* We look for vtbl pointer read.  */
1640  ao.size = POINTER_SIZE;
1641  ao.max_size = ao.size;
1642  if (otr_type)
1643    ao.ref_alias_set
1644      = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1645
1646  if (dump_file)
1647    {
1648      fprintf (dump_file, "Determining dynamic type for call: ");
1649      print_gimple_stmt (dump_file, call, 0, 0);
1650      fprintf (dump_file, "  Starting walk at: ");
1651      print_gimple_stmt (dump_file, stmt, 0, 0);
1652      fprintf (dump_file, "  instance pointer: ");
1653      print_generic_expr (dump_file, otr_object, TDF_SLIM);
1654      fprintf (dump_file, "  Outer instance pointer: ");
1655      print_generic_expr (dump_file, instance, TDF_SLIM);
1656      fprintf (dump_file, " offset: %i (bits)", (int)instance_offset);
1657      fprintf (dump_file, " vtbl reference: ");
1658      print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1659      fprintf (dump_file, "\n");
1660    }
1661
1662  tci.offset = instance_offset;
1663  tci.instance = instance;
1664  tci.vtbl_ptr_ref = instance_ref;
1665  gcc_assert (TREE_CODE (instance) != MEM_REF);
1666  tci.known_current_type = NULL_TREE;
1667  tci.known_current_offset = 0;
1668  tci.otr_type = otr_type;
1669  tci.type_maybe_changed = false;
1670  tci.multiple_types_encountered = false;
1671  tci.speculative = false;
1672  tci.seen_unanalyzed_store = false;
1673
1674  walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1675		      &tci, NULL, &function_entry_reached);
1676
1677  /* If we did not find any type changing statements, we may still drop
1678     maybe_in_construction flag if the context already have outer type.
1679
1680     Here we make special assumptions about both constructors and
1681     destructors which are all the functions that are allowed to alter the
1682     VMT pointers.  It assumes that destructors begin with assignment into
1683     all VMT pointers and that constructors essentially look in the
1684     following way:
1685
1686     1) The very first thing they do is that they call constructors of
1687     ancestor sub-objects that have them.
1688
1689     2) Then VMT pointers of this and all its ancestors is set to new
1690     values corresponding to the type corresponding to the constructor.
1691
1692     3) Only afterwards, other stuff such as constructor of member
1693     sub-objects and the code written by the user is run.  Only this may
1694     include calling virtual functions, directly or indirectly.
1695
1696     4) placement new can not be used to change type of non-POD statically
1697     allocated variables.
1698
1699     There is no way to call a constructor of an ancestor sub-object in any
1700     other way.
1701
1702     This means that we do not have to care whether constructors get the
1703     correct type information because they will always change it (in fact,
1704     if we define the type to be given by the VMT pointer, it is undefined).
1705
1706     The most important fact to derive from the above is that if, for some
1707     statement in the section 3, we try to detect whether the dynamic type
1708     has changed, we can safely ignore all calls as we examine the function
1709     body backwards until we reach statements in section 2 because these
1710     calls cannot be ancestor constructors or destructors (if the input is
1711     not bogus) and so do not change the dynamic type (this holds true only
1712     for automatically allocated objects but at the moment we devirtualize
1713     only these).  We then must detect that statements in section 2 change
1714     the dynamic type and can try to derive the new type.  That is enough
1715     and we can stop, we will never see the calls into constructors of
1716     sub-objects in this code.
1717
1718     Therefore if the static outer type was found (outer_type)
1719     we can safely ignore tci.speculative that is set on calls and give up
1720     only if there was dyanmic type store that may affect given variable
1721     (seen_unanalyzed_store)  */
1722
1723  if (!tci.type_maybe_changed
1724      || (outer_type
1725	  && !dynamic
1726	  && !tci.seen_unanalyzed_store
1727	  && !tci.multiple_types_encountered
1728	  && ((offset == tci.offset
1729	       && types_same_for_odr (tci.known_current_type,
1730				      outer_type))
1731	       || (instance_offset == offset
1732		   && types_same_for_odr (tci.known_current_type,
1733					  instance_outer_type)))))
1734    {
1735      if (!outer_type || tci.seen_unanalyzed_store)
1736	return false;
1737      if (maybe_in_construction)
1738        maybe_in_construction = false;
1739      if (dump_file)
1740	fprintf (dump_file, "  No dynamic type change found.\n");
1741      return true;
1742    }
1743
1744  if (tci.known_current_type
1745      && !function_entry_reached
1746      && !tci.multiple_types_encountered)
1747    {
1748      if (!tci.speculative)
1749	{
1750	  outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1751	  offset = tci.known_current_offset;
1752	  dynamic = true;
1753	  maybe_in_construction = false;
1754	  maybe_derived_type = false;
1755	  if (dump_file)
1756	    fprintf (dump_file, "  Determined dynamic type.\n");
1757	}
1758      else if (!speculative_outer_type
1759	       || speculative_maybe_derived_type)
1760	{
1761	  speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1762	  speculative_offset = tci.known_current_offset;
1763	  speculative_maybe_derived_type = false;
1764	  if (dump_file)
1765	    fprintf (dump_file, "  Determined speculative dynamic type.\n");
1766	}
1767    }
1768  else if (dump_file)
1769    {
1770      fprintf (dump_file, "  Found multiple types%s%s\n",
1771	       function_entry_reached ? " (function entry reached)" : "",
1772	       function_entry_reached ? " (multiple types encountered)" : "");
1773    }
1774
1775  return false;
1776}
1777
1778/* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1779   seems consistent (and useful) with what we already have in the non-speculative context.  */
1780
1781bool
1782ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
1783							HOST_WIDE_INT spec_offset,
1784							bool spec_maybe_derived_type,
1785							tree otr_type) const
1786{
1787  if (!flag_devirtualize_speculatively)
1788    return false;
1789
1790  /* Non-polymorphic types are useless for deriving likely polymorphic
1791     call targets.  */
1792  if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
1793    return false;
1794
1795  /* If we know nothing, speculation is always good.  */
1796  if (!outer_type)
1797    return true;
1798
1799  /* Speculation is only useful to avoid derived types.
1800     This is not 100% true for placement new, where the outer context may
1801     turn out to be useless, but ignore these for now.  */
1802  if (!maybe_derived_type)
1803    return false;
1804
1805  /* If types agrees, speculation is consistent, but it makes sense only
1806     when it says something new.  */
1807  if (types_must_be_same_for_odr (spec_outer_type, outer_type))
1808    return maybe_derived_type && !spec_maybe_derived_type;
1809
1810  /* If speculation does not contain the type in question, ignore it.  */
1811  if (otr_type
1812      && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
1813    return false;
1814
1815  /* If outer type already contains speculation as a filed,
1816     it is useless.  We already know from OUTER_TYPE
1817     SPEC_TYPE and that it is not in the construction.  */
1818  if (contains_type_p (outer_type, offset - spec_offset,
1819		       spec_outer_type, false, false))
1820    return false;
1821
1822  /* If speculative outer type is not more specified than outer
1823     type, just give up.
1824     We can only decide this safely if we can compare types with OUTER_TYPE.
1825   */
1826  if ((!in_lto_p || odr_type_p (outer_type))
1827      && !contains_type_p (spec_outer_type,
1828			   spec_offset - offset,
1829			   outer_type, false))
1830    return false;
1831  return true;
1832}
1833
1834/* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1835   NEW_MAYBE_DERIVED_TYPE
1836   If OTR_TYPE is set, assume the context is used with OTR_TYPE.  */
1837
1838bool
1839ipa_polymorphic_call_context::combine_speculation_with
1840   (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1841    tree otr_type)
1842{
1843  if (!new_outer_type)
1844    return false;
1845
1846  /* restrict_to_inner_class may eliminate wrong speculation making our job
1847     easeier.  */
1848  if (otr_type)
1849    restrict_to_inner_class (otr_type);
1850
1851  if (!speculation_consistent_p (new_outer_type, new_offset,
1852				 new_maybe_derived_type, otr_type))
1853    return false;
1854
1855  /* New speculation is a win in case we have no speculation or new
1856     speculation does not consider derivations.  */
1857  if (!speculative_outer_type
1858      || (speculative_maybe_derived_type
1859	  && !new_maybe_derived_type))
1860    {
1861      speculative_outer_type = new_outer_type;
1862      speculative_offset = new_offset;
1863      speculative_maybe_derived_type = new_maybe_derived_type;
1864      return true;
1865    }
1866  else if (types_must_be_same_for_odr (speculative_outer_type,
1867				       new_outer_type))
1868    {
1869      if (speculative_offset != new_offset)
1870	{
1871	  /* OK we have two contexts that seems valid but they disagree,
1872	     just give up.
1873
1874	     This is not a lattice operation, so we may want to drop it later.  */
1875	  if (dump_file && (dump_flags & TDF_DETAILS))
1876	    fprintf (dump_file,
1877		     "Speculative outer types match, "
1878		     "offset mismatch -> invalid speculation\n");
1879	  clear_speculation ();
1880	  return true;
1881	}
1882      else
1883	{
1884	  if (speculative_maybe_derived_type && !new_maybe_derived_type)
1885	    {
1886	      speculative_maybe_derived_type = false;
1887	      return true;
1888	    }
1889	  else
1890	    return false;
1891	}
1892    }
1893  /* Choose type that contains the other.  This one either contains the outer
1894     as a field (thus giving exactly one target) or is deeper in the type
1895     hiearchy.  */
1896  else if (speculative_outer_type
1897	   && speculative_maybe_derived_type
1898	   && (new_offset > speculative_offset
1899	       || (new_offset == speculative_offset
1900		   && contains_type_p (new_outer_type,
1901				       0, speculative_outer_type, false))))
1902    {
1903      tree old_outer_type = speculative_outer_type;
1904      HOST_WIDE_INT old_offset = speculative_offset;
1905      bool old_maybe_derived_type = speculative_maybe_derived_type;
1906
1907      speculative_outer_type = new_outer_type;
1908      speculative_offset = new_offset;
1909      speculative_maybe_derived_type = new_maybe_derived_type;
1910
1911      if (otr_type)
1912	restrict_to_inner_class (otr_type);
1913
1914      /* If the speculation turned out to make no sense, revert to sensible
1915	 one.  */
1916      if (!speculative_outer_type)
1917	{
1918	  speculative_outer_type = old_outer_type;
1919	  speculative_offset = old_offset;
1920	  speculative_maybe_derived_type = old_maybe_derived_type;
1921	  return false;
1922	}
1923      return (old_offset != speculative_offset
1924	      || old_maybe_derived_type != speculative_maybe_derived_type
1925	      || types_must_be_same_for_odr (speculative_outer_type,
1926					     new_outer_type));
1927    }
1928  return false;
1929}
1930
1931/* Make speculation less specific so
1932   NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
1933   If OTR_TYPE is set, assume the context is used with OTR_TYPE.  */
1934
1935bool
1936ipa_polymorphic_call_context::meet_speculation_with
1937   (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1938    tree otr_type)
1939{
1940  if (!new_outer_type && speculative_outer_type)
1941    {
1942      clear_speculation ();
1943      return true;
1944    }
1945
1946  /* restrict_to_inner_class may eliminate wrong speculation making our job
1947     easeier.  */
1948  if (otr_type)
1949    restrict_to_inner_class (otr_type);
1950
1951  if (!speculative_outer_type
1952      || !speculation_consistent_p (speculative_outer_type,
1953				    speculative_offset,
1954				    speculative_maybe_derived_type,
1955				    otr_type))
1956    return false;
1957
1958  if (!speculation_consistent_p (new_outer_type, new_offset,
1959				 new_maybe_derived_type, otr_type))
1960    {
1961      clear_speculation ();
1962      return true;
1963    }
1964
1965  else if (types_must_be_same_for_odr (speculative_outer_type,
1966				       new_outer_type))
1967    {
1968      if (speculative_offset != new_offset)
1969	{
1970	  clear_speculation ();
1971	  return true;
1972	}
1973      else
1974	{
1975	  if (!speculative_maybe_derived_type && new_maybe_derived_type)
1976	    {
1977	      speculative_maybe_derived_type = true;
1978	      return true;
1979	    }
1980	  else
1981	    return false;
1982	}
1983    }
1984  /* See if one type contains the other as a field (not base).  */
1985  else if (contains_type_p (new_outer_type, new_offset - speculative_offset,
1986			    speculative_outer_type, false, false))
1987    return false;
1988  else if (contains_type_p (speculative_outer_type,
1989			    speculative_offset - new_offset,
1990			    new_outer_type, false, false))
1991    {
1992      speculative_outer_type = new_outer_type;
1993      speculative_offset = new_offset;
1994      speculative_maybe_derived_type = new_maybe_derived_type;
1995      return true;
1996    }
1997  /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
1998  else if (contains_type_p (new_outer_type,
1999			    new_offset - speculative_offset,
2000			    speculative_outer_type, false, true))
2001    {
2002      if (!speculative_maybe_derived_type)
2003	{
2004	  speculative_maybe_derived_type = true;
2005	  return true;
2006	}
2007      return false;
2008    }
2009  /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2010  else if (contains_type_p (speculative_outer_type,
2011			    speculative_offset - new_offset, new_outer_type, false, true))
2012    {
2013      speculative_outer_type = new_outer_type;
2014      speculative_offset = new_offset;
2015      speculative_maybe_derived_type = true;
2016      return true;
2017    }
2018  else
2019    {
2020      if (dump_file && (dump_flags & TDF_DETAILS))
2021        fprintf (dump_file, "Giving up on speculative meet\n");
2022      clear_speculation ();
2023      return true;
2024    }
2025}
2026
2027/* Assume that both THIS and a given context is valid and strenghten THIS
2028   if possible.  Return true if any strenghtening was made.
2029   If actual type the context is being used in is known, OTR_TYPE should be
2030   set accordingly. This improves quality of combined result.  */
2031
2032bool
2033ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
2034					    tree otr_type)
2035{
2036  bool updated = false;
2037
2038  if (ctx.useless_p () || invalid)
2039    return false;
2040
2041  /* Restricting context to inner type makes merging easier, however do not
2042     do that unless we know how the context is used (OTR_TYPE is non-NULL)  */
2043  if (otr_type && !invalid && !ctx.invalid)
2044    {
2045      restrict_to_inner_class (otr_type);
2046      ctx.restrict_to_inner_class (otr_type);
2047      if(invalid)
2048        return false;
2049    }
2050
2051  if (dump_file && (dump_flags & TDF_DETAILS))
2052    {
2053      fprintf (dump_file, "Polymorphic call context combine:");
2054      dump (dump_file);
2055      fprintf (dump_file, "With context:                    ");
2056      ctx.dump (dump_file);
2057      if (otr_type)
2058	{
2059          fprintf (dump_file, "To be used with type:            ");
2060	  print_generic_expr (dump_file, otr_type, TDF_SLIM);
2061          fprintf (dump_file, "\n");
2062	}
2063    }
2064
2065  /* If call is known to be invalid, we are done.  */
2066  if (ctx.invalid)
2067    {
2068      if (dump_file && (dump_flags & TDF_DETAILS))
2069        fprintf (dump_file, "-> Invalid context\n");
2070      goto invalidate;
2071    }
2072
2073  if (!ctx.outer_type)
2074    ;
2075  else if (!outer_type)
2076    {
2077      outer_type = ctx.outer_type;
2078      offset = ctx.offset;
2079      dynamic = ctx.dynamic;
2080      maybe_in_construction = ctx.maybe_in_construction;
2081      maybe_derived_type = ctx.maybe_derived_type;
2082      updated = true;
2083    }
2084  /* If types are known to be same, merging is quite easy.  */
2085  else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2086    {
2087      if (offset != ctx.offset
2088	  && TYPE_SIZE (outer_type)
2089	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2090	{
2091	  if (dump_file && (dump_flags & TDF_DETAILS))
2092	    fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
2093	  clear_speculation ();
2094	  clear_outer_type ();
2095	  invalid = true;
2096	  return true;
2097	}
2098      if (dump_file && (dump_flags & TDF_DETAILS))
2099        fprintf (dump_file, "Outer types match, merging flags\n");
2100      if (maybe_in_construction && !ctx.maybe_in_construction)
2101	{
2102	  updated = true;
2103	  maybe_in_construction = false;
2104	}
2105      if (maybe_derived_type && !ctx.maybe_derived_type)
2106	{
2107	  updated = true;
2108	  maybe_derived_type = false;
2109	}
2110      if (dynamic && !ctx.dynamic)
2111	{
2112	  updated = true;
2113	  dynamic = false;
2114	}
2115    }
2116  /* If we know the type precisely, there is not much to improve.  */
2117  else if (!maybe_derived_type && !maybe_in_construction
2118	   && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
2119    {
2120      /* It may be easy to check if second context permits the first
2121	 and set INVALID otherwise.  This is not easy to do in general;
2122	 contains_type_p may return false negatives for non-comparable
2123	 types.
2124
2125	 If OTR_TYPE is known, we however can expect that
2126	 restrict_to_inner_class should have discovered the same base
2127	 type.  */
2128      if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
2129	{
2130	  if (dump_file && (dump_flags & TDF_DETAILS))
2131	    fprintf (dump_file, "Contextes disagree -> invalid\n");
2132	  goto invalidate;
2133	}
2134    }
2135  /* See if one type contains the other as a field (not base).
2136     In this case we want to choose the wider type, because it contains
2137     more information.  */
2138  else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2139			    outer_type, false, false))
2140    {
2141      if (dump_file && (dump_flags & TDF_DETAILS))
2142	fprintf (dump_file, "Second type contain the first as a field\n");
2143
2144      if (maybe_derived_type)
2145	{
2146	  outer_type = ctx.outer_type;
2147	  maybe_derived_type = ctx.maybe_derived_type;
2148	  offset = ctx.offset;
2149	  dynamic = ctx.dynamic;
2150	  updated = true;
2151	}
2152
2153      /* If we do not know how the context is being used, we can
2154	 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2155	 to other component of OUTER_TYPE later and we know nothing
2156	 about it.  */
2157      if (otr_type && maybe_in_construction
2158	  && !ctx.maybe_in_construction)
2159	{
2160          maybe_in_construction = false;
2161	  updated = true;
2162	}
2163    }
2164  else if (contains_type_p (outer_type, offset - ctx.offset,
2165			    ctx.outer_type, false, false))
2166    {
2167      if (dump_file && (dump_flags & TDF_DETAILS))
2168	fprintf (dump_file, "First type contain the second as a field\n");
2169
2170      if (otr_type && maybe_in_construction
2171	  && !ctx.maybe_in_construction)
2172	{
2173          maybe_in_construction = false;
2174	  updated = true;
2175	}
2176    }
2177  /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2178  else if (contains_type_p (ctx.outer_type,
2179			    ctx.offset - offset, outer_type, false, true))
2180    {
2181      if (dump_file && (dump_flags & TDF_DETAILS))
2182	fprintf (dump_file, "First type is base of second\n");
2183      if (!maybe_derived_type)
2184	{
2185	  if (!ctx.maybe_in_construction
2186	      && types_odr_comparable (outer_type, ctx.outer_type))
2187	    {
2188	      if (dump_file && (dump_flags & TDF_DETAILS))
2189		fprintf (dump_file, "Second context does not permit base -> invalid\n");
2190	      goto invalidate;
2191	    }
2192	}
2193      /* Pick variant deeper in the hiearchy.  */
2194      else
2195	{
2196	  outer_type = ctx.outer_type;
2197	  maybe_in_construction = ctx.maybe_in_construction;
2198	  maybe_derived_type = ctx.maybe_derived_type;
2199	  offset = ctx.offset;
2200	  dynamic = ctx.dynamic;
2201          updated = true;
2202	}
2203    }
2204  /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2205  else if (contains_type_p (outer_type,
2206			    offset - ctx.offset, ctx.outer_type, false, true))
2207    {
2208      if (dump_file && (dump_flags & TDF_DETAILS))
2209	fprintf (dump_file, "Second type is base of first\n");
2210      if (!ctx.maybe_derived_type)
2211	{
2212	  if (!maybe_in_construction
2213	      && types_odr_comparable (outer_type, ctx.outer_type))
2214	    {
2215	      if (dump_file && (dump_flags & TDF_DETAILS))
2216		fprintf (dump_file, "First context does not permit base -> invalid\n");
2217	      goto invalidate;
2218	    }
2219	  /* Pick the base type.  */
2220	  else if (maybe_in_construction)
2221	    {
2222	      outer_type = ctx.outer_type;
2223	      maybe_in_construction = ctx.maybe_in_construction;
2224	      maybe_derived_type = ctx.maybe_derived_type;
2225	      offset = ctx.offset;
2226	      dynamic = ctx.dynamic;
2227	      updated = true;
2228	    }
2229	}
2230    }
2231  /* TODO handle merging using hiearchy. */
2232  else if (dump_file && (dump_flags & TDF_DETAILS))
2233    fprintf (dump_file, "Giving up on merge\n");
2234
2235  updated |= combine_speculation_with (ctx.speculative_outer_type,
2236				       ctx.speculative_offset,
2237				       ctx.speculative_maybe_derived_type,
2238				       otr_type);
2239
2240  if (updated && dump_file && (dump_flags & TDF_DETAILS))
2241    {
2242      fprintf (dump_file, "Updated as:                      ");
2243      dump (dump_file);
2244      fprintf (dump_file, "\n");
2245    }
2246  return updated;
2247
2248invalidate:
2249  invalid = true;
2250  clear_speculation ();
2251  clear_outer_type ();
2252  return true;
2253}
2254
2255/* Take non-speculative info, merge it with speculative and clear speculation.
2256   Used when we no longer manage to keep track of actual outer type, but we
2257   think it is still there.
2258
2259   If OTR_TYPE is set, the transformation can be done more effectively assuming
2260   that context is going to be used only that way.  */
2261
2262void
2263ipa_polymorphic_call_context::make_speculative (tree otr_type)
2264{
2265  tree spec_outer_type = outer_type;
2266  HOST_WIDE_INT spec_offset = offset;
2267  bool spec_maybe_derived_type = maybe_derived_type;
2268
2269  if (invalid)
2270    {
2271      invalid = false;
2272      clear_outer_type ();
2273      clear_speculation ();
2274      return;
2275    }
2276  if (!outer_type)
2277    return;
2278  clear_outer_type ();
2279  combine_speculation_with (spec_outer_type, spec_offset,
2280			    spec_maybe_derived_type,
2281			    otr_type);
2282}
2283
2284/* Use when we can not track dynamic type change.  This speculatively assume
2285   type change is not happening.  */
2286
2287void
2288ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
2289							    tree otr_type)
2290{
2291  if (dynamic)
2292    make_speculative (otr_type);
2293  else if (in_poly_cdtor)
2294    maybe_in_construction = true;
2295}
2296
2297/* Return TRUE if this context conveys the same information as OTHER.  */
2298
2299bool
2300ipa_polymorphic_call_context::equal_to
2301    (const ipa_polymorphic_call_context &x) const
2302{
2303  if (useless_p ())
2304    return x.useless_p ();
2305  if (invalid)
2306    return x.invalid;
2307  if (x.useless_p () || x.invalid)
2308    return false;
2309
2310  if (outer_type)
2311    {
2312      if (!x.outer_type
2313	  || !types_odr_comparable (outer_type, x.outer_type)
2314	  || !types_same_for_odr (outer_type, x.outer_type)
2315	  || offset != x.offset
2316	  || maybe_in_construction != x.maybe_in_construction
2317	  || maybe_derived_type != x.maybe_derived_type
2318	  || dynamic != x.dynamic)
2319	return false;
2320    }
2321  else if (x.outer_type)
2322    return false;
2323
2324
2325  if (speculative_outer_type
2326      && speculation_consistent_p (speculative_outer_type, speculative_offset,
2327				   speculative_maybe_derived_type, NULL_TREE))
2328    {
2329      if (!x.speculative_outer_type)
2330	return false;
2331
2332      if (!types_odr_comparable (speculative_outer_type,
2333				 x.speculative_outer_type)
2334	  || !types_same_for_odr  (speculative_outer_type,
2335				   x.speculative_outer_type)
2336	  || speculative_offset != x.speculative_offset
2337	  || speculative_maybe_derived_type != x.speculative_maybe_derived_type)
2338	return false;
2339    }
2340  else if (x.speculative_outer_type
2341	   && x.speculation_consistent_p (x.speculative_outer_type,
2342					  x.speculative_offset,
2343				  	  x.speculative_maybe_derived_type,
2344					  NULL))
2345    return false;
2346
2347  return true;
2348}
2349
2350/* Modify context to be strictly less restrictive than CTX.  */
2351
2352bool
2353ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx,
2354					 tree otr_type)
2355{
2356  bool updated = false;
2357
2358  if (useless_p () || ctx.invalid)
2359    return false;
2360
2361  /* Restricting context to inner type makes merging easier, however do not
2362     do that unless we know how the context is used (OTR_TYPE is non-NULL)  */
2363  if (otr_type && !useless_p () && !ctx.useless_p ())
2364    {
2365      restrict_to_inner_class (otr_type);
2366      ctx.restrict_to_inner_class (otr_type);
2367      if(invalid)
2368        return false;
2369    }
2370
2371  if (equal_to (ctx))
2372    return false;
2373
2374  if (ctx.useless_p () || invalid)
2375    {
2376      *this = ctx;
2377      return true;
2378    }
2379
2380  if (dump_file && (dump_flags & TDF_DETAILS))
2381    {
2382      fprintf (dump_file, "Polymorphic call context meet:");
2383      dump (dump_file);
2384      fprintf (dump_file, "With context:                    ");
2385      ctx.dump (dump_file);
2386      if (otr_type)
2387	{
2388          fprintf (dump_file, "To be used with type:            ");
2389	  print_generic_expr (dump_file, otr_type, TDF_SLIM);
2390          fprintf (dump_file, "\n");
2391	}
2392    }
2393
2394  if (!dynamic && ctx.dynamic)
2395    {
2396      dynamic = true;
2397      updated = true;
2398    }
2399
2400  /* If call is known to be invalid, we are done.  */
2401  if (!outer_type)
2402    ;
2403  else if (!ctx.outer_type)
2404    {
2405      clear_outer_type ();
2406      updated = true;
2407    }
2408  /* If types are known to be same, merging is quite easy.  */
2409  else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2410    {
2411      if (offset != ctx.offset
2412	  && TYPE_SIZE (outer_type)
2413	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2414	{
2415	  if (dump_file && (dump_flags & TDF_DETAILS))
2416	    fprintf (dump_file, "Outer types match, offset mismatch -> clearing\n");
2417	  clear_outer_type ();
2418	  return true;
2419	}
2420      if (dump_file && (dump_flags & TDF_DETAILS))
2421        fprintf (dump_file, "Outer types match, merging flags\n");
2422      if (!maybe_in_construction && ctx.maybe_in_construction)
2423	{
2424	  updated = true;
2425	  maybe_in_construction = true;
2426	}
2427      if (!maybe_derived_type && ctx.maybe_derived_type)
2428	{
2429	  updated = true;
2430	  maybe_derived_type = true;
2431	}
2432      if (!dynamic && ctx.dynamic)
2433	{
2434	  updated = true;
2435	  dynamic = true;
2436	}
2437    }
2438  /* See if one type contains the other as a field (not base).  */
2439  else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2440			    outer_type, false, false))
2441    {
2442      if (dump_file && (dump_flags & TDF_DETAILS))
2443	fprintf (dump_file, "Second type contain the first as a field\n");
2444
2445      /* The second type is more specified, so we keep the first.
2446         We need to set DYNAMIC flag to avoid declaring context INVALID
2447	 of OFFSET ends up being out of range.  */
2448      if (!dynamic
2449	  && (ctx.dynamic
2450	      || (!otr_type
2451		  && (!TYPE_SIZE (ctx.outer_type)
2452		      || !TYPE_SIZE (outer_type)
2453		      || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2454					   TYPE_SIZE (outer_type), 0)))))
2455	{
2456	  dynamic = true;
2457	  updated = true;
2458	}
2459    }
2460  else if (contains_type_p (outer_type, offset - ctx.offset,
2461			    ctx.outer_type, false, false))
2462    {
2463      if (dump_file && (dump_flags & TDF_DETAILS))
2464	fprintf (dump_file, "First type contain the second as a field\n");
2465
2466      if (!dynamic
2467	  && (ctx.dynamic
2468	      || (!otr_type
2469		  && (!TYPE_SIZE (ctx.outer_type)
2470		      || !TYPE_SIZE (outer_type)
2471		      || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2472					   TYPE_SIZE (outer_type), 0)))))
2473	dynamic = true;
2474      outer_type = ctx.outer_type;
2475      offset = ctx.offset;
2476      dynamic = ctx.dynamic;
2477      maybe_in_construction = ctx.maybe_in_construction;
2478      maybe_derived_type = ctx.maybe_derived_type;
2479      updated = true;
2480    }
2481  /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2482  else if (contains_type_p (ctx.outer_type,
2483			    ctx.offset - offset, outer_type, false, true))
2484    {
2485      if (dump_file && (dump_flags & TDF_DETAILS))
2486	fprintf (dump_file, "First type is base of second\n");
2487      if (!maybe_derived_type)
2488	{
2489	  maybe_derived_type = true;
2490	  updated = true;
2491	}
2492      if (!maybe_in_construction && ctx.maybe_in_construction)
2493	{
2494	  maybe_in_construction = true;
2495	  updated = true;
2496	}
2497      if (!dynamic && ctx.dynamic)
2498	{
2499	  dynamic = true;
2500	  updated = true;
2501	}
2502    }
2503  /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2504  else if (contains_type_p (outer_type,
2505			    offset - ctx.offset, ctx.outer_type, false, true))
2506    {
2507      if (dump_file && (dump_flags & TDF_DETAILS))
2508	fprintf (dump_file, "Second type is base of first\n");
2509      outer_type = ctx.outer_type;
2510      offset = ctx.offset;
2511      updated = true;
2512      if (!maybe_derived_type)
2513	maybe_derived_type = true;
2514      if (!maybe_in_construction && ctx.maybe_in_construction)
2515	maybe_in_construction = true;
2516      if (!dynamic && ctx.dynamic)
2517	dynamic = true;
2518    }
2519  /* TODO handle merging using hiearchy. */
2520  else
2521    {
2522      if (dump_file && (dump_flags & TDF_DETAILS))
2523        fprintf (dump_file, "Giving up on meet\n");
2524      clear_outer_type ();
2525      updated = true;
2526    }
2527
2528  updated |= meet_speculation_with (ctx.speculative_outer_type,
2529				    ctx.speculative_offset,
2530				    ctx.speculative_maybe_derived_type,
2531				    otr_type);
2532
2533  if (updated && dump_file && (dump_flags & TDF_DETAILS))
2534    {
2535      fprintf (dump_file, "Updated as:                      ");
2536      dump (dump_file);
2537      fprintf (dump_file, "\n");
2538    }
2539  return updated;
2540}
2541