1/* Driver of optimization process
2   Copyright (C) 2003-2015 Free Software Foundation, Inc.
3   Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21/* This module implements main driver of compilation process.
22
23   The main scope of this file is to act as an interface in between
24   tree based frontends and the backend.
25
26   The front-end is supposed to use following functionality:
27
28    - finalize_function
29
30      This function is called once front-end has parsed whole body of function
31      and it is certain that the function body nor the declaration will change.
32
33      (There is one exception needed for implementing GCC extern inline
34	function.)
35
36    - varpool_finalize_decl
37
38      This function has same behavior as the above but is used for static
39      variables.
40
41    - add_asm_node
42
43      Insert new toplevel ASM statement
44
45    - finalize_compilation_unit
46
47      This function is called once (source level) compilation unit is finalized
48      and it will no longer change.
49
50      The symbol table is constructed starting from the trivially needed
51      symbols finalized by the frontend.  Functions are lowered into
52      GIMPLE representation and callgraph/reference lists are constructed.
53      Those are used to discover other necessary functions and variables.
54
55      At the end the bodies of unreachable functions are removed.
56
57      The function can be called multiple times when multiple source level
58      compilation units are combined.
59
60    - compile
61
62      This passes control to the back-end.  Optimizations are performed and
63      final assembler is generated.  This is done in the following way. Note
64      that with link time optimization the process is split into three
65      stages (compile time, linktime analysis and parallel linktime as
66      indicated bellow).
67
68      Compile time:
69
70	1) Inter-procedural optimization.
71	   (ipa_passes)
72
73	   This part is further split into:
74
75	   a) early optimizations. These are local passes executed in
76	      the topological order on the callgraph.
77
78	      The purpose of early optimiations is to optimize away simple
79	      things that may otherwise confuse IP analysis. Very simple
80	      propagation across the callgraph is done i.e. to discover
81	      functions without side effects and simple inlining is performed.
82
83	   b) early small interprocedural passes.
84
85	      Those are interprocedural passes executed only at compilation
86	      time.  These include, for example, transational memory lowering,
87	      unreachable code removal and other simple transformations.
88
89	   c) IP analysis stage.  All interprocedural passes do their
90	      analysis.
91
92	      Interprocedural passes differ from small interprocedural
93	      passes by their ability to operate across whole program
94	      at linktime.  Their analysis stage is performed early to
95	      both reduce linking times and linktime memory usage by
96	      not having to represent whole program in memory.
97
98	   d) LTO sreaming.  When doing LTO, everything important gets
99	      streamed into the object file.
100
101       Compile time and or linktime analysis stage (WPA):
102
103	      At linktime units gets streamed back and symbol table is
104	      merged.  Function bodies are not streamed in and not
105	      available.
106	   e) IP propagation stage.  All IP passes execute their
107	      IP propagation. This is done based on the earlier analysis
108	      without having function bodies at hand.
109	   f) Ltrans streaming.  When doing WHOPR LTO, the program
110	      is partitioned and streamed into multple object files.
111
112       Compile time and/or parallel linktime stage (ltrans)
113
114	      Each of the object files is streamed back and compiled
115	      separately.  Now the function bodies becomes available
116	      again.
117
118	 2) Virtual clone materialization
119	    (cgraph_materialize_clone)
120
121	    IP passes can produce copies of existing functoins (such
122	    as versioned clones or inline clones) without actually
123	    manipulating their bodies by creating virtual clones in
124	    the callgraph. At this time the virtual clones are
125	    turned into real functions
126	 3) IP transformation
127
128	    All IP passes transform function bodies based on earlier
129	    decision of the IP propagation.
130
131	 4) late small IP passes
132
133	    Simple IP passes working within single program partition.
134
135	 5) Expansion
136	    (expand_all_functions)
137
138	    At this stage functions that needs to be output into
139	    assembler are identified and compiled in topological order
140	 6) Output of variables and aliases
141	    Now it is known what variable references was not optimized
142	    out and thus all variables are output to the file.
143
144	    Note that with -fno-toplevel-reorder passes 5 and 6
145	    are combined together in cgraph_output_in_order.
146
147   Finally there are functions to manipulate the callgraph from
148   backend.
149    - cgraph_add_new_function is used to add backend produced
150      functions introduced after the unit is finalized.
151      The functions are enqueue for later processing and inserted
152      into callgraph with cgraph_process_new_functions.
153
154    - cgraph_function_versioning
155
156      produces a copy of function into new one (a version)
157      and apply simple transformations
158*/
159
160#include "config.h"
161#include "system.h"
162#include "coretypes.h"
163#include "tm.h"
164#include "hash-set.h"
165#include "machmode.h"
166#include "vec.h"
167#include "double-int.h"
168#include "input.h"
169#include "alias.h"
170#include "symtab.h"
171#include "wide-int.h"
172#include "inchash.h"
173#include "tree.h"
174#include "fold-const.h"
175#include "varasm.h"
176#include "stor-layout.h"
177#include "stringpool.h"
178#include "output.h"
179#include "rtl.h"
180#include "predict.h"
181#include "hard-reg-set.h"
182#include "input.h"
183#include "function.h"
184#include "basic-block.h"
185#include "tree-ssa-alias.h"
186#include "internal-fn.h"
187#include "gimple-fold.h"
188#include "gimple-expr.h"
189#include "is-a.h"
190#include "gimple.h"
191#include "gimplify.h"
192#include "gimple-iterator.h"
193#include "gimplify-me.h"
194#include "gimple-ssa.h"
195#include "tree-cfg.h"
196#include "tree-into-ssa.h"
197#include "tree-ssa.h"
198#include "tree-inline.h"
199#include "langhooks.h"
200#include "toplev.h"
201#include "flags.h"
202#include "debug.h"
203#include "target.h"
204#include "diagnostic.h"
205#include "params.h"
206#include "intl.h"
207#include "hash-map.h"
208#include "plugin-api.h"
209#include "ipa-ref.h"
210#include "cgraph.h"
211#include "alloc-pool.h"
212#include "symbol-summary.h"
213#include "ipa-prop.h"
214#include "tree-iterator.h"
215#include "tree-pass.h"
216#include "tree-dump.h"
217#include "gimple-pretty-print.h"
218#include "output.h"
219#include "coverage.h"
220#include "plugin.h"
221#include "ipa-inline.h"
222#include "ipa-utils.h"
223#include "lto-streamer.h"
224#include "except.h"
225#include "cfgloop.h"
226#include "regset.h"     /* FIXME: For reg_obstack.  */
227#include "context.h"
228#include "pass_manager.h"
229#include "tree-nested.h"
230#include "gimplify.h"
231#include "dbgcnt.h"
232#include "tree-chkp.h"
233#include "lto-section-names.h"
234#include "omp-low.h"
235#include "print-tree.h"
236
237/* Queue of cgraph nodes scheduled to be added into cgraph.  This is a
238   secondary queue used during optimization to accommodate passes that
239   may generate new functions that need to be optimized and expanded.  */
240vec<cgraph_node *> cgraph_new_nodes;
241
242static void expand_all_functions (void);
243static void mark_functions_to_output (void);
244static void handle_alias_pairs (void);
245
246/* Used for vtable lookup in thunk adjusting.  */
247static GTY (()) tree vtable_entry_type;
248
249/* Determine if symbol declaration is needed.  That is, visible to something
250   either outside this translation unit, something magic in the system
251   configury */
252bool
253symtab_node::needed_p (void)
254{
255  /* Double check that no one output the function into assembly file
256     early.  */
257  gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
258	               || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
259
260  if (!definition)
261    return false;
262
263  if (DECL_EXTERNAL (decl))
264    return false;
265
266  /* If the user told us it is used, then it must be so.  */
267  if (force_output)
268    return true;
269
270  /* ABI forced symbols are needed when they are external.  */
271  if (forced_by_abi && TREE_PUBLIC (decl))
272    return true;
273
274  /* Keep constructors, destructors and virtual functions.  */
275   if (TREE_CODE (decl) == FUNCTION_DECL
276       && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
277    return true;
278
279  /* Externally visible variables must be output.  The exception is
280     COMDAT variables that must be output only when they are needed.  */
281  if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
282    return true;
283
284  return false;
285}
286
287/* Head and terminator of the queue of nodes to be processed while building
288   callgraph.  */
289
290static symtab_node symtab_terminator;
291static symtab_node *queued_nodes = &symtab_terminator;
292
293/* Add NODE to queue starting at QUEUED_NODES.
294   The queue is linked via AUX pointers and terminated by pointer to 1.  */
295
296static void
297enqueue_node (symtab_node *node)
298{
299  if (node->aux)
300    return;
301  gcc_checking_assert (queued_nodes);
302  node->aux = queued_nodes;
303  queued_nodes = node;
304}
305
306/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
307   functions into callgraph in a way so they look like ordinary reachable
308   functions inserted into callgraph already at construction time.  */
309
310void
311symbol_table::process_new_functions (void)
312{
313  tree fndecl;
314
315  if (!cgraph_new_nodes.exists ())
316    return;
317
318  handle_alias_pairs ();
319  /*  Note that this queue may grow as its being processed, as the new
320      functions may generate new ones.  */
321  for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
322    {
323      cgraph_node *node = cgraph_new_nodes[i];
324      fndecl = node->decl;
325      switch (state)
326	{
327	case CONSTRUCTION:
328	  /* At construction time we just need to finalize function and move
329	     it into reachable functions list.  */
330
331	  cgraph_node::finalize_function (fndecl, false);
332	  call_cgraph_insertion_hooks (node);
333	  enqueue_node (node);
334	  break;
335
336	case IPA:
337	case IPA_SSA:
338	case IPA_SSA_AFTER_INLINING:
339	  /* When IPA optimization already started, do all essential
340	     transformations that has been already performed on the whole
341	     cgraph but not on this function.  */
342
343	  gimple_register_cfg_hooks ();
344	  if (!node->analyzed)
345	    node->analyze ();
346	  push_cfun (DECL_STRUCT_FUNCTION (fndecl));
347	  if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
348	      && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
349	    g->get_passes ()->execute_early_local_passes ();
350	  else if (inline_summaries != NULL)
351	    compute_inline_parameters (node, true);
352	  free_dominance_info (CDI_POST_DOMINATORS);
353	  free_dominance_info (CDI_DOMINATORS);
354	  pop_cfun ();
355	  call_cgraph_insertion_hooks (node);
356	  break;
357
358	case EXPANSION:
359	  /* Functions created during expansion shall be compiled
360	     directly.  */
361	  node->process = 0;
362	  call_cgraph_insertion_hooks (node);
363	  node->expand ();
364	  break;
365
366	default:
367	  gcc_unreachable ();
368	  break;
369	}
370    }
371
372  cgraph_new_nodes.release ();
373}
374
375/* As an GCC extension we allow redefinition of the function.  The
376   semantics when both copies of bodies differ is not well defined.
377   We replace the old body with new body so in unit at a time mode
378   we always use new body, while in normal mode we may end up with
379   old body inlined into some functions and new body expanded and
380   inlined in others.
381
382   ??? It may make more sense to use one body for inlining and other
383   body for expanding the function but this is difficult to do.  */
384
385void
386cgraph_node::reset (void)
387{
388  /* If process is set, then we have already begun whole-unit analysis.
389     This is *not* testing for whether we've already emitted the function.
390     That case can be sort-of legitimately seen with real function redefinition
391     errors.  I would argue that the front end should never present us with
392     such a case, but don't enforce that for now.  */
393  gcc_assert (!process);
394
395  /* Reset our data structures so we can analyze the function again.  */
396  memset (&local, 0, sizeof (local));
397  memset (&global, 0, sizeof (global));
398  memset (&rtl, 0, sizeof (rtl));
399  analyzed = false;
400  definition = false;
401  alias = false;
402  weakref = false;
403  cpp_implicit_alias = false;
404
405  remove_callees ();
406  remove_all_references ();
407}
408
409/* Return true when there are references to the node.  */
410
411bool
412symtab_node::referred_to_p (void)
413{
414  ipa_ref *ref = NULL;
415
416  /* See if there are any references at all.  */
417  if (iterate_referring (0, ref))
418    return true;
419  /* For functions check also calls.  */
420  cgraph_node *cn = dyn_cast <cgraph_node *> (this);
421  if (cn && cn->callers)
422    return true;
423  return false;
424}
425
426/* DECL has been parsed.  Take it, queue it, compile it at the whim of the
427   logic in effect.  If NO_COLLECT is true, then our caller cannot stand to have
428   the garbage collector run at the moment.  We would need to either create
429   a new GC context, or just not compile right now.  */
430
431void
432cgraph_node::finalize_function (tree decl, bool no_collect)
433{
434  cgraph_node *node = cgraph_node::get_create (decl);
435
436  if (node->definition)
437    {
438      /* Nested functions should only be defined once.  */
439      gcc_assert (!DECL_CONTEXT (decl)
440		  || TREE_CODE (DECL_CONTEXT (decl)) !=	FUNCTION_DECL);
441      node->reset ();
442      node->local.redefined_extern_inline = true;
443    }
444
445  /* Set definition first before calling notice_global_symbol so that
446     it is available to notice_global_symbol.  */
447  node->definition = true;
448  notice_global_symbol (decl);
449  node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
450
451  /* With -fkeep-inline-functions we are keeping all inline functions except
452     for extern inline ones.  */
453  if (flag_keep_inline_functions
454      && DECL_DECLARED_INLINE_P (decl)
455      && !DECL_EXTERNAL (decl)
456      && !DECL_DISREGARD_INLINE_LIMITS (decl))
457    node->force_output = 1;
458
459  /* When not optimizing, also output the static functions. (see
460     PR24561), but don't do so for always_inline functions, functions
461     declared inline and nested functions.  These were optimized out
462     in the original implementation and it is unclear whether we want
463     to change the behavior here.  */
464  if ((!opt_for_fn (decl, optimize)
465       && !node->cpp_implicit_alias
466       && !DECL_DISREGARD_INLINE_LIMITS (decl)
467       && !DECL_DECLARED_INLINE_P (decl)
468       && !(DECL_CONTEXT (decl)
469	    && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
470      && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
471    node->force_output = 1;
472
473  /* If we've not yet emitted decl, tell the debug info about it.  */
474  if (!TREE_ASM_WRITTEN (decl))
475    (*debug_hooks->deferred_inline_function) (decl);
476
477  /* Possibly warn about unused parameters.  */
478  if (warn_unused_parameter)
479    do_warn_unused_parameter (decl);
480
481  if (!no_collect)
482    ggc_collect ();
483
484  if (symtab->state == CONSTRUCTION
485      && (node->needed_p () || node->referred_to_p ()))
486    enqueue_node (node);
487}
488
489/* Add the function FNDECL to the call graph.
490   Unlike finalize_function, this function is intended to be used
491   by middle end and allows insertion of new function at arbitrary point
492   of compilation.  The function can be either in high, low or SSA form
493   GIMPLE.
494
495   The function is assumed to be reachable and have address taken (so no
496   API breaking optimizations are performed on it).
497
498   Main work done by this function is to enqueue the function for later
499   processing to avoid need the passes to be re-entrant.  */
500
501void
502cgraph_node::add_new_function (tree fndecl, bool lowered)
503{
504  gcc::pass_manager *passes = g->get_passes ();
505  cgraph_node *node;
506  switch (symtab->state)
507    {
508      case PARSING:
509	cgraph_node::finalize_function (fndecl, false);
510	break;
511      case CONSTRUCTION:
512	/* Just enqueue function to be processed at nearest occurrence.  */
513	node = cgraph_node::get_create (fndecl);
514	if (lowered)
515	  node->lowered = true;
516	cgraph_new_nodes.safe_push (node);
517        break;
518
519      case IPA:
520      case IPA_SSA:
521      case IPA_SSA_AFTER_INLINING:
522      case EXPANSION:
523	/* Bring the function into finalized state and enqueue for later
524	   analyzing and compilation.  */
525	node = cgraph_node::get_create (fndecl);
526	node->local.local = false;
527	node->definition = true;
528	node->force_output = true;
529	if (!lowered && symtab->state == EXPANSION)
530	  {
531	    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
532	    gimple_register_cfg_hooks ();
533	    bitmap_obstack_initialize (NULL);
534	    execute_pass_list (cfun, passes->all_lowering_passes);
535	    passes->execute_early_local_passes ();
536	    bitmap_obstack_release (NULL);
537	    pop_cfun ();
538
539	    lowered = true;
540	  }
541	if (lowered)
542	  node->lowered = true;
543	cgraph_new_nodes.safe_push (node);
544        break;
545
546      case FINISHED:
547	/* At the very end of compilation we have to do all the work up
548	   to expansion.  */
549	node = cgraph_node::create (fndecl);
550	if (lowered)
551	  node->lowered = true;
552	node->definition = true;
553	node->analyze ();
554	push_cfun (DECL_STRUCT_FUNCTION (fndecl));
555	gimple_register_cfg_hooks ();
556	bitmap_obstack_initialize (NULL);
557	if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
558	  g->get_passes ()->execute_early_local_passes ();
559	bitmap_obstack_release (NULL);
560	pop_cfun ();
561	node->expand ();
562	break;
563
564      default:
565	gcc_unreachable ();
566    }
567
568  /* Set a personality if required and we already passed EH lowering.  */
569  if (lowered
570      && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
571	  == eh_personality_lang))
572    DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
573}
574
575/* Analyze the function scheduled to be output.  */
576void
577cgraph_node::analyze (void)
578{
579  tree decl = this->decl;
580  location_t saved_loc = input_location;
581  input_location = DECL_SOURCE_LOCATION (decl);
582
583  if (thunk.thunk_p)
584    {
585      cgraph_node *t = cgraph_node::get (thunk.alias);
586
587      create_edge (t, NULL, 0, CGRAPH_FREQ_BASE);
588      callees->can_throw_external = !TREE_NOTHROW (t->decl);
589      /* Target code in expand_thunk may need the thunk's target
590	 to be analyzed, so recurse here.  */
591      if (!t->analyzed)
592	t->analyze ();
593      if (t->alias)
594	{
595	  t = t->get_alias_target ();
596	  if (!t->analyzed)
597	    t->analyze ();
598	}
599      if (!expand_thunk (false, false))
600	{
601	  thunk.alias = NULL;
602	  return;
603	}
604      thunk.alias = NULL;
605    }
606  if (alias)
607    resolve_alias (cgraph_node::get (alias_target));
608  else if (dispatcher_function)
609    {
610      /* Generate the dispatcher body of multi-versioned functions.  */
611      cgraph_function_version_info *dispatcher_version_info
612	= function_version ();
613      if (dispatcher_version_info != NULL
614          && (dispatcher_version_info->dispatcher_resolver
615	      == NULL_TREE))
616	{
617	  tree resolver = NULL_TREE;
618	  gcc_assert (targetm.generate_version_dispatcher_body);
619	  resolver = targetm.generate_version_dispatcher_body (this);
620	  gcc_assert (resolver != NULL_TREE);
621	}
622    }
623  else
624    {
625      push_cfun (DECL_STRUCT_FUNCTION (decl));
626
627      assign_assembler_name_if_neeeded (decl);
628
629      /* Make sure to gimplify bodies only once.  During analyzing a
630	 function we lower it, which will require gimplified nested
631	 functions, so we can end up here with an already gimplified
632	 body.  */
633      if (!gimple_has_body_p (decl))
634	gimplify_function_tree (decl);
635      dump_function (TDI_generic, decl);
636
637      /* Lower the function.  */
638      if (!lowered)
639	{
640	  if (nested)
641	    lower_nested_functions (decl);
642	  gcc_assert (!nested);
643
644	  gimple_register_cfg_hooks ();
645	  bitmap_obstack_initialize (NULL);
646	  execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
647	  free_dominance_info (CDI_POST_DOMINATORS);
648	  free_dominance_info (CDI_DOMINATORS);
649	  compact_blocks ();
650	  bitmap_obstack_release (NULL);
651	  lowered = true;
652	}
653
654      pop_cfun ();
655    }
656  analyzed = true;
657
658  input_location = saved_loc;
659}
660
661/* C++ frontend produce same body aliases all over the place, even before PCH
662   gets streamed out. It relies on us linking the aliases with their function
663   in order to do the fixups, but ipa-ref is not PCH safe.  Consequentely we
664   first produce aliases without links, but once C++ FE is sure he won't sream
665   PCH we build the links via this function.  */
666
667void
668symbol_table::process_same_body_aliases (void)
669{
670  symtab_node *node;
671  FOR_EACH_SYMBOL (node)
672    if (node->cpp_implicit_alias && !node->analyzed)
673      node->resolve_alias
674	(TREE_CODE (node->alias_target) == VAR_DECL
675	 ? (symtab_node *)varpool_node::get_create (node->alias_target)
676	 : (symtab_node *)cgraph_node::get_create (node->alias_target));
677  cpp_implicit_aliases_done = true;
678}
679
680/* Process attributes common for vars and functions.  */
681
682static void
683process_common_attributes (symtab_node *node, tree decl)
684{
685  tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
686
687  if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
688    {
689      warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
690		  "%<weakref%> attribute should be accompanied with"
691		  " an %<alias%> attribute");
692      DECL_WEAK (decl) = 0;
693      DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
694						 DECL_ATTRIBUTES (decl));
695    }
696
697  if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
698    node->no_reorder = 1;
699}
700
701/* Look for externally_visible and used attributes and mark cgraph nodes
702   accordingly.
703
704   We cannot mark the nodes at the point the attributes are processed (in
705   handle_*_attribute) because the copy of the declarations available at that
706   point may not be canonical.  For example, in:
707
708    void f();
709    void f() __attribute__((used));
710
711   the declaration we see in handle_used_attribute will be the second
712   declaration -- but the front end will subsequently merge that declaration
713   with the original declaration and discard the second declaration.
714
715   Furthermore, we can't mark these nodes in finalize_function because:
716
717    void f() {}
718    void f() __attribute__((externally_visible));
719
720   is valid.
721
722   So, we walk the nodes at the end of the translation unit, applying the
723   attributes at that point.  */
724
725static void
726process_function_and_variable_attributes (cgraph_node *first,
727                                          varpool_node *first_var)
728{
729  cgraph_node *node;
730  varpool_node *vnode;
731
732  for (node = symtab->first_function (); node != first;
733       node = symtab->next_function (node))
734    {
735      tree decl = node->decl;
736      if (DECL_PRESERVE_P (decl))
737	node->mark_force_output ();
738      else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
739	{
740	  if (! TREE_PUBLIC (node->decl))
741	    warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
742			"%<externally_visible%>"
743			" attribute have effect only on public objects");
744	}
745      if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
746	  && (node->definition && !node->alias))
747	{
748	  warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
749		      "%<weakref%> attribute ignored"
750		      " because function is defined");
751	  DECL_WEAK (decl) = 0;
752	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
753						     DECL_ATTRIBUTES (decl));
754	}
755
756      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
757	  && !DECL_DECLARED_INLINE_P (decl)
758	  /* redefining extern inline function makes it DECL_UNINLINABLE.  */
759	  && !DECL_UNINLINABLE (decl))
760	warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
761		    "always_inline function might not be inlinable");
762
763      process_common_attributes (node, decl);
764    }
765  for (vnode = symtab->first_variable (); vnode != first_var;
766       vnode = symtab->next_variable (vnode))
767    {
768      tree decl = vnode->decl;
769      if (DECL_EXTERNAL (decl)
770	  && DECL_INITIAL (decl))
771	varpool_node::finalize_decl (decl);
772      if (DECL_PRESERVE_P (decl))
773	vnode->force_output = true;
774      else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
775	{
776	  if (! TREE_PUBLIC (vnode->decl))
777	    warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
778			"%<externally_visible%>"
779			" attribute have effect only on public objects");
780	}
781      if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
782	  && vnode->definition
783	  && DECL_INITIAL (decl))
784	{
785	  warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
786		      "%<weakref%> attribute ignored"
787		      " because variable is initialized");
788	  DECL_WEAK (decl) = 0;
789	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
790						      DECL_ATTRIBUTES (decl));
791	}
792      process_common_attributes (vnode, decl);
793    }
794}
795
796/* Mark DECL as finalized.  By finalizing the declaration, frontend instruct the
797   middle end to output the variable to asm file, if needed or externally
798   visible.  */
799
800void
801varpool_node::finalize_decl (tree decl)
802{
803  varpool_node *node = varpool_node::get_create (decl);
804
805  gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
806
807  if (node->definition)
808    return;
809  /* Set definition first before calling notice_global_symbol so that
810     it is available to notice_global_symbol.  */
811  node->definition = true;
812  notice_global_symbol (decl);
813  if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
814      /* Traditionally we do not eliminate static variables when not
815	 optimizing and when not doing toplevel reoder.  */
816      || node->no_reorder
817      || ((!flag_toplevel_reorder
818          && !DECL_COMDAT (node->decl)
819	   && !DECL_ARTIFICIAL (node->decl))))
820    node->force_output = true;
821
822  if (symtab->state == CONSTRUCTION
823      && (node->needed_p () || node->referred_to_p ()))
824    enqueue_node (node);
825  if (symtab->state >= IPA_SSA)
826    node->analyze ();
827  /* Some frontends produce various interface variables after compilation
828     finished.  */
829  if (symtab->state == FINISHED
830      || (!flag_toplevel_reorder
831	&& symtab->state == EXPANSION))
832    node->assemble_decl ();
833
834  if (DECL_INITIAL (decl))
835    chkp_register_var_initializer (decl);
836}
837
838/* EDGE is an polymorphic call.  Mark all possible targets as reachable
839   and if there is only one target, perform trivial devirtualization.
840   REACHABLE_CALL_TARGETS collects target lists we already walked to
841   avoid udplicate work.  */
842
843static void
844walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
845			       cgraph_edge *edge)
846{
847  unsigned int i;
848  void *cache_token;
849  bool final;
850  vec <cgraph_node *>targets
851    = possible_polymorphic_call_targets
852	(edge, &final, &cache_token);
853
854  if (!reachable_call_targets->add (cache_token))
855    {
856      if (symtab->dump_file)
857	dump_possible_polymorphic_call_targets
858	  (symtab->dump_file, edge);
859
860      for (i = 0; i < targets.length (); i++)
861	{
862	  /* Do not bother to mark virtual methods in anonymous namespace;
863	     either we will find use of virtual table defining it, or it is
864	     unused.  */
865	  if (targets[i]->definition
866	      && TREE_CODE
867		  (TREE_TYPE (targets[i]->decl))
868		   == METHOD_TYPE
869	      && !type_in_anonymous_namespace_p
870		   (method_class_type
871		     (TREE_TYPE (targets[i]->decl))))
872	  enqueue_node (targets[i]);
873	}
874    }
875
876  /* Very trivial devirtualization; when the type is
877     final or anonymous (so we know all its derivation)
878     and there is only one possible virtual call target,
879     make the edge direct.  */
880  if (final)
881    {
882      if (targets.length () <= 1 && dbg_cnt (devirt))
883	{
884	  cgraph_node *target;
885	  if (targets.length () == 1)
886	    target = targets[0];
887	  else
888	    target = cgraph_node::create
889			(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
890
891	  if (symtab->dump_file)
892	    {
893	      fprintf (symtab->dump_file,
894		       "Devirtualizing call: ");
895	      print_gimple_stmt (symtab->dump_file,
896				 edge->call_stmt, 0,
897				 TDF_SLIM);
898	    }
899          if (dump_enabled_p ())
900            {
901	      location_t locus = gimple_location_safe (edge->call_stmt);
902	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
903			       "devirtualizing call in %s to %s\n",
904			       edge->caller->name (), target->name ());
905	    }
906
907	  edge->make_direct (target);
908	  edge->redirect_call_stmt_to_callee ();
909
910	  /* Call to __builtin_unreachable shouldn't be instrumented.  */
911	  if (!targets.length ())
912	    gimple_call_set_with_bounds (edge->call_stmt, false);
913
914	  if (symtab->dump_file)
915	    {
916	      fprintf (symtab->dump_file,
917		       "Devirtualized as: ");
918	      print_gimple_stmt (symtab->dump_file,
919				 edge->call_stmt, 0,
920				 TDF_SLIM);
921	    }
922	}
923    }
924}
925
926
927/* Discover all functions and variables that are trivially needed, analyze
928   them as well as all functions and variables referred by them  */
929static cgraph_node *first_analyzed;
930static varpool_node *first_analyzed_var;
931
932static void
933analyze_functions (void)
934{
935  /* Keep track of already processed nodes when called multiple times for
936     intermodule optimization.  */
937  cgraph_node *first_handled = first_analyzed;
938  varpool_node *first_handled_var = first_analyzed_var;
939  hash_set<void *> reachable_call_targets;
940
941  symtab_node *node;
942  symtab_node *next;
943  int i;
944  ipa_ref *ref;
945  bool changed = true;
946  location_t saved_loc = input_location;
947
948  bitmap_obstack_initialize (NULL);
949  symtab->state = CONSTRUCTION;
950  input_location = UNKNOWN_LOCATION;
951
952  /* Ugly, but the fixup can not happen at a time same body alias is created;
953     C++ FE is confused about the COMDAT groups being right.  */
954  if (symtab->cpp_implicit_aliases_done)
955    FOR_EACH_SYMBOL (node)
956      if (node->cpp_implicit_alias)
957	  node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
958  build_type_inheritance_graph ();
959
960  /* Analysis adds static variables that in turn adds references to new functions.
961     So we need to iterate the process until it stabilize.  */
962  while (changed)
963    {
964      changed = false;
965      process_function_and_variable_attributes (first_analyzed,
966						first_analyzed_var);
967
968      /* First identify the trivially needed symbols.  */
969      for (node = symtab->first_symbol ();
970	   node != first_analyzed
971	   && node != first_analyzed_var; node = node->next)
972	{
973	  /* Convert COMDAT group designators to IDENTIFIER_NODEs.  */
974	  node->get_comdat_group_id ();
975	  if (node->needed_p ())
976	    {
977	      enqueue_node (node);
978	      if (!changed && symtab->dump_file)
979		fprintf (symtab->dump_file, "Trivially needed symbols:");
980	      changed = true;
981	      if (symtab->dump_file)
982		fprintf (symtab->dump_file, " %s", node->asm_name ());
983	      if (!changed && symtab->dump_file)
984		fprintf (symtab->dump_file, "\n");
985	    }
986	  if (node == first_analyzed
987	      || node == first_analyzed_var)
988	    break;
989	}
990      symtab->process_new_functions ();
991      first_analyzed_var = symtab->first_variable ();
992      first_analyzed = symtab->first_function ();
993
994      if (changed && symtab->dump_file)
995	fprintf (symtab->dump_file, "\n");
996
997      /* Lower representation, build callgraph edges and references for all trivially
998         needed symbols and all symbols referred by them.  */
999      while (queued_nodes != &symtab_terminator)
1000	{
1001	  changed = true;
1002	  node = queued_nodes;
1003	  queued_nodes = (symtab_node *)queued_nodes->aux;
1004	  cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1005	  if (cnode && cnode->definition)
1006	    {
1007	      cgraph_edge *edge;
1008	      tree decl = cnode->decl;
1009
1010	      /* ??? It is possible to create extern inline function
1011	      and later using weak alias attribute to kill its body.
1012	      See gcc.c-torture/compile/20011119-1.c  */
1013	      if (!DECL_STRUCT_FUNCTION (decl)
1014		  && !cnode->alias
1015		  && !cnode->thunk.thunk_p
1016		  && !cnode->dispatcher_function)
1017		{
1018		  cnode->reset ();
1019		  cnode->local.redefined_extern_inline = true;
1020		  continue;
1021		}
1022
1023	      if (!cnode->analyzed)
1024		cnode->analyze ();
1025
1026	      for (edge = cnode->callees; edge; edge = edge->next_callee)
1027		if (edge->callee->definition
1028		    && (!DECL_EXTERNAL (edge->callee->decl)
1029			/* When not optimizing, do not try to analyze extern
1030			   inline functions.  Doing so is pointless.  */
1031			|| opt_for_fn (edge->callee->decl, optimize)
1032			/* Weakrefs needs to be preserved.  */
1033			|| edge->callee->alias
1034			/* always_inline functions are inlined aven at -O0.  */
1035		        || lookup_attribute
1036				 ("always_inline",
1037			          DECL_ATTRIBUTES (edge->callee->decl))
1038			/* Multiversioned functions needs the dispatcher to
1039			   be produced locally even for extern functions.  */
1040			|| edge->callee->function_version ()))
1041		   enqueue_node (edge->callee);
1042	      if (opt_for_fn (cnode->decl, optimize)
1043		  && opt_for_fn (cnode->decl, flag_devirtualize))
1044		{
1045		  cgraph_edge *next;
1046
1047		  for (edge = cnode->indirect_calls; edge; edge = next)
1048		    {
1049		      next = edge->next_callee;
1050		      if (edge->indirect_info->polymorphic)
1051			walk_polymorphic_call_targets (&reachable_call_targets,
1052						       edge);
1053		    }
1054		}
1055
1056	      /* If decl is a clone of an abstract function,
1057	      mark that abstract function so that we don't release its body.
1058	      The DECL_INITIAL() of that abstract function declaration
1059	      will be later needed to output debug info.  */
1060	      if (DECL_ABSTRACT_ORIGIN (decl))
1061		{
1062		  cgraph_node *origin_node
1063		    = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1064		  origin_node->used_as_abstract_origin = true;
1065		}
1066	    }
1067	  else
1068	    {
1069	      varpool_node *vnode = dyn_cast <varpool_node *> (node);
1070	      if (vnode && vnode->definition && !vnode->analyzed)
1071		vnode->analyze ();
1072	    }
1073
1074	  if (node->same_comdat_group)
1075	    {
1076	      symtab_node *next;
1077	      for (next = node->same_comdat_group;
1078		   next != node;
1079		   next = next->same_comdat_group)
1080		if (!next->comdat_local_p ())
1081		  enqueue_node (next);
1082	    }
1083	  for (i = 0; node->iterate_reference (i, ref); i++)
1084	    if (ref->referred->definition
1085		&& (!DECL_EXTERNAL (ref->referred->decl)
1086		    || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1087			 && optimize)
1088			|| (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1089			    && opt_for_fn (ref->referred->decl, optimize))
1090		    || node->alias
1091		    || ref->referred->alias)))
1092	      enqueue_node (ref->referred);
1093	  symtab->process_new_functions ();
1094	}
1095    }
1096  update_type_inheritance_graph ();
1097
1098  /* Collect entry points to the unit.  */
1099  if (symtab->dump_file)
1100    {
1101      fprintf (symtab->dump_file, "\n\nInitial ");
1102      symtab_node::dump_table (symtab->dump_file);
1103    }
1104
1105  if (symtab->dump_file)
1106    fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1107
1108  for (node = symtab->first_symbol ();
1109       node != first_handled
1110       && node != first_handled_var; node = next)
1111    {
1112      next = node->next;
1113      if (!node->aux && !node->referred_to_p ())
1114	{
1115	  if (symtab->dump_file)
1116	    fprintf (symtab->dump_file, " %s", node->name ());
1117	  node->remove ();
1118	  continue;
1119	}
1120      if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1121	{
1122	  tree decl = node->decl;
1123
1124	  if (cnode->definition && !gimple_has_body_p (decl)
1125	      && !cnode->alias
1126	      && !cnode->thunk.thunk_p)
1127	    cnode->reset ();
1128
1129	  gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1130		      || cnode->alias
1131		      || gimple_has_body_p (decl));
1132	  gcc_assert (cnode->analyzed == cnode->definition);
1133	}
1134      node->aux = NULL;
1135    }
1136  for (;node; node = node->next)
1137    node->aux = NULL;
1138  first_analyzed = symtab->first_function ();
1139  first_analyzed_var = symtab->first_variable ();
1140  if (symtab->dump_file)
1141    {
1142      fprintf (symtab->dump_file, "\n\nReclaimed ");
1143      symtab_node::dump_table (symtab->dump_file);
1144    }
1145  bitmap_obstack_release (NULL);
1146  ggc_collect ();
1147  /* Initialize assembler name hash, in particular we want to trigger C++
1148     mangling and same body alias creation before we free DECL_ARGUMENTS
1149     used by it.  */
1150  if (!seen_error ())
1151    symtab->symtab_initialize_asm_name_hash ();
1152
1153  input_location = saved_loc;
1154}
1155
1156/* Translate the ugly representation of aliases as alias pairs into nice
1157   representation in callgraph.  We don't handle all cases yet,
1158   unfortunately.  */
1159
1160static void
1161handle_alias_pairs (void)
1162{
1163  alias_pair *p;
1164  unsigned i;
1165
1166  for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1167    {
1168      symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1169
1170      /* Weakrefs with target not defined in current unit are easy to handle:
1171	 they behave just as external variables except we need to note the
1172	 alias flag to later output the weakref pseudo op into asm file.  */
1173      if (!target_node
1174	  && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1175	{
1176	  symtab_node *node = symtab_node::get (p->decl);
1177	  if (node)
1178	    {
1179	      node->alias_target = p->target;
1180	      node->weakref = true;
1181	      node->alias = true;
1182	    }
1183	  alias_pairs->unordered_remove (i);
1184	  continue;
1185	}
1186      else if (!target_node)
1187	{
1188	  error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1189	  symtab_node *node = symtab_node::get (p->decl);
1190	  if (node)
1191	    node->alias = false;
1192	  alias_pairs->unordered_remove (i);
1193	  continue;
1194	}
1195
1196      if (DECL_EXTERNAL (target_node->decl)
1197	  /* We use local aliases for C++ thunks to force the tailcall
1198	     to bind locally.  This is a hack - to keep it working do
1199	     the following (which is not strictly correct).  */
1200	  && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1201	      || ! DECL_VIRTUAL_P (target_node->decl))
1202	  && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1203	{
1204	  error ("%q+D aliased to external symbol %qE",
1205		 p->decl, p->target);
1206	}
1207
1208      if (TREE_CODE (p->decl) == FUNCTION_DECL
1209          && target_node && is_a <cgraph_node *> (target_node))
1210	{
1211	  cgraph_node *src_node = cgraph_node::get (p->decl);
1212	  if (src_node && src_node->definition)
1213	    src_node->reset ();
1214	  cgraph_node::create_alias (p->decl, target_node->decl);
1215	  alias_pairs->unordered_remove (i);
1216	}
1217      else if (TREE_CODE (p->decl) == VAR_DECL
1218	       && target_node && is_a <varpool_node *> (target_node))
1219	{
1220	  varpool_node::create_alias (p->decl, target_node->decl);
1221	  alias_pairs->unordered_remove (i);
1222	}
1223      else
1224	{
1225	  error ("%q+D alias in between function and variable is not supported",
1226		 p->decl);
1227	  warning (0, "%q+D aliased declaration",
1228		   target_node->decl);
1229	  alias_pairs->unordered_remove (i);
1230	}
1231    }
1232  vec_free (alias_pairs);
1233}
1234
1235
1236/* Figure out what functions we want to assemble.  */
1237
1238static void
1239mark_functions_to_output (void)
1240{
1241  cgraph_node *node;
1242#ifdef ENABLE_CHECKING
1243  bool check_same_comdat_groups = false;
1244
1245  FOR_EACH_FUNCTION (node)
1246    gcc_assert (!node->process);
1247#endif
1248
1249  FOR_EACH_FUNCTION (node)
1250    {
1251      tree decl = node->decl;
1252
1253      gcc_assert (!node->process || node->same_comdat_group);
1254      if (node->process)
1255	continue;
1256
1257      /* We need to output all local functions that are used and not
1258	 always inlined, as well as those that are reachable from
1259	 outside the current compilation unit.  */
1260      if (node->analyzed
1261	  && !node->thunk.thunk_p
1262	  && !node->alias
1263	  && !node->global.inlined_to
1264	  && !TREE_ASM_WRITTEN (decl)
1265	  && !DECL_EXTERNAL (decl))
1266	{
1267	  node->process = 1;
1268	  if (node->same_comdat_group)
1269	    {
1270	      cgraph_node *next;
1271	      for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1272		   next != node;
1273		   next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1274		if (!next->thunk.thunk_p && !next->alias
1275		    && !next->comdat_local_p ())
1276		  next->process = 1;
1277	    }
1278	}
1279      else if (node->same_comdat_group)
1280	{
1281#ifdef ENABLE_CHECKING
1282	  check_same_comdat_groups = true;
1283#endif
1284	}
1285      else
1286	{
1287	  /* We should've reclaimed all functions that are not needed.  */
1288#ifdef ENABLE_CHECKING
1289	  if (!node->global.inlined_to
1290	      && gimple_has_body_p (decl)
1291	      /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1292		 are inside partition, we can end up not removing the body since we no longer
1293		 have analyzed node pointing to it.  */
1294	      && !node->in_other_partition
1295	      && !node->alias
1296	      && !node->clones
1297	      && !DECL_EXTERNAL (decl))
1298	    {
1299	      node->debug ();
1300	      internal_error ("failed to reclaim unneeded function");
1301	    }
1302#endif
1303	  gcc_assert (node->global.inlined_to
1304		      || !gimple_has_body_p (decl)
1305		      || node->in_other_partition
1306		      || node->clones
1307		      || DECL_ARTIFICIAL (decl)
1308		      || DECL_EXTERNAL (decl));
1309
1310	}
1311
1312    }
1313#ifdef ENABLE_CHECKING
1314  if (check_same_comdat_groups)
1315    FOR_EACH_FUNCTION (node)
1316      if (node->same_comdat_group && !node->process)
1317	{
1318	  tree decl = node->decl;
1319	  if (!node->global.inlined_to
1320	      && gimple_has_body_p (decl)
1321	      /* FIXME: in an ltrans unit when the offline copy is outside a
1322		 partition but inline copies are inside a partition, we can
1323		 end up not removing the body since we no longer have an
1324		 analyzed node pointing to it.  */
1325	      && !node->in_other_partition
1326	      && !node->clones
1327	      && !DECL_EXTERNAL (decl))
1328	    {
1329	      node->debug ();
1330	      internal_error ("failed to reclaim unneeded function in same "
1331			      "comdat group");
1332	    }
1333	}
1334#endif
1335}
1336
1337/* DECL is FUNCTION_DECL.  Initialize datastructures so DECL is a function
1338   in lowered gimple form.  IN_SSA is true if the gimple is in SSA.
1339
1340   Set current_function_decl and cfun to newly constructed empty function body.
1341   return basic block in the function body.  */
1342
1343basic_block
1344init_lowered_empty_function (tree decl, bool in_ssa, gcov_type count)
1345{
1346  basic_block bb;
1347  edge e;
1348
1349  current_function_decl = decl;
1350  allocate_struct_function (decl, false);
1351  gimple_register_cfg_hooks ();
1352  init_empty_tree_cfg ();
1353
1354  if (in_ssa)
1355    {
1356      init_tree_ssa (cfun);
1357      init_ssa_operands (cfun);
1358      cfun->gimple_df->in_ssa_p = true;
1359      cfun->curr_properties |= PROP_ssa;
1360    }
1361
1362  DECL_INITIAL (decl) = make_node (BLOCK);
1363
1364  DECL_SAVED_TREE (decl) = error_mark_node;
1365  cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1366			    | PROP_cfg | PROP_loops);
1367
1368  set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1369  init_loops_structure (cfun, loops_for_fn (cfun), 1);
1370  loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1371
1372  /* Create BB for body of the function and connect it properly.  */
1373  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1374  ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1375  EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1376  EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1377  bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1378  bb->count = count;
1379  bb->frequency = BB_FREQ_MAX;
1380  e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1381  e->count = count;
1382  e->probability = REG_BR_PROB_BASE;
1383  e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1384  e->count = count;
1385  e->probability = REG_BR_PROB_BASE;
1386  add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1387
1388  return bb;
1389}
1390
1391/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1392   offset indicated by VIRTUAL_OFFSET, if that is
1393   non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1394   zero for a result adjusting thunk.  */
1395
1396static tree
1397thunk_adjust (gimple_stmt_iterator * bsi,
1398	      tree ptr, bool this_adjusting,
1399	      HOST_WIDE_INT fixed_offset, tree virtual_offset)
1400{
1401  gassign *stmt;
1402  tree ret;
1403
1404  if (this_adjusting
1405      && fixed_offset != 0)
1406    {
1407      stmt = gimple_build_assign
1408		(ptr, fold_build_pointer_plus_hwi_loc (input_location,
1409						       ptr,
1410						       fixed_offset));
1411      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1412    }
1413
1414  /* If there's a virtual offset, look up that value in the vtable and
1415     adjust the pointer again.  */
1416  if (virtual_offset)
1417    {
1418      tree vtabletmp;
1419      tree vtabletmp2;
1420      tree vtabletmp3;
1421
1422      if (!vtable_entry_type)
1423	{
1424	  tree vfunc_type = make_node (FUNCTION_TYPE);
1425	  TREE_TYPE (vfunc_type) = integer_type_node;
1426	  TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1427	  layout_type (vfunc_type);
1428
1429	  vtable_entry_type = build_pointer_type (vfunc_type);
1430	}
1431
1432      vtabletmp =
1433	create_tmp_reg (build_pointer_type
1434			  (build_pointer_type (vtable_entry_type)), "vptr");
1435
1436      /* The vptr is always at offset zero in the object.  */
1437      stmt = gimple_build_assign (vtabletmp,
1438				  build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1439					  ptr));
1440      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1441
1442      /* Form the vtable address.  */
1443      vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1444				     "vtableaddr");
1445      stmt = gimple_build_assign (vtabletmp2,
1446				  build_simple_mem_ref (vtabletmp));
1447      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1448
1449      /* Find the entry with the vcall offset.  */
1450      stmt = gimple_build_assign (vtabletmp2,
1451				  fold_build_pointer_plus_loc (input_location,
1452							       vtabletmp2,
1453							       virtual_offset));
1454      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1455
1456      /* Get the offset itself.  */
1457      vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1458				     "vcalloffset");
1459      stmt = gimple_build_assign (vtabletmp3,
1460				  build_simple_mem_ref (vtabletmp2));
1461      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1462
1463      /* Adjust the `this' pointer.  */
1464      ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1465      ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1466				      GSI_CONTINUE_LINKING);
1467    }
1468
1469  if (!this_adjusting
1470      && fixed_offset != 0)
1471    /* Adjust the pointer by the constant.  */
1472    {
1473      tree ptrtmp;
1474
1475      if (TREE_CODE (ptr) == VAR_DECL)
1476        ptrtmp = ptr;
1477      else
1478        {
1479          ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1480          stmt = gimple_build_assign (ptrtmp, ptr);
1481	  gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1482	}
1483      ptr = fold_build_pointer_plus_hwi_loc (input_location,
1484					     ptrtmp, fixed_offset);
1485    }
1486
1487  /* Emit the statement and gimplify the adjustment expression.  */
1488  ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1489  stmt = gimple_build_assign (ret, ptr);
1490  gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1491
1492  return ret;
1493}
1494
1495/* Expand thunk NODE to gimple if possible.
1496   When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1497   no assembler is produced.
1498   When OUTPUT_ASM_THUNK is true, also produce assembler for
1499   thunks that are not lowered.  */
1500
1501bool
1502cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1503{
1504  bool this_adjusting = thunk.this_adjusting;
1505  HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1506  HOST_WIDE_INT virtual_value = thunk.virtual_value;
1507  tree virtual_offset = NULL;
1508  tree alias = callees->callee->decl;
1509  tree thunk_fndecl = decl;
1510  tree a;
1511
1512  /* Instrumentation thunk is the same function with
1513     a different signature.  Never need to expand it.  */
1514  if (thunk.add_pointer_bounds_args)
1515    return false;
1516
1517  if (!force_gimple_thunk && this_adjusting
1518      && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1519					      virtual_value, alias))
1520    {
1521      const char *fnname;
1522      tree fn_block;
1523      tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1524
1525      if (!output_asm_thunks)
1526	{
1527	  analyzed = true;
1528	  return false;
1529	}
1530
1531      if (in_lto_p)
1532	get_untransformed_body ();
1533      a = DECL_ARGUMENTS (thunk_fndecl);
1534
1535      current_function_decl = thunk_fndecl;
1536
1537      /* Ensure thunks are emitted in their correct sections.  */
1538      resolve_unique_section (thunk_fndecl, 0,
1539			      flag_function_sections);
1540
1541      DECL_RESULT (thunk_fndecl)
1542	= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1543		      RESULT_DECL, 0, restype);
1544      DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1545      fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1546
1547      /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1548	 create one.  */
1549      fn_block = make_node (BLOCK);
1550      BLOCK_VARS (fn_block) = a;
1551      DECL_INITIAL (thunk_fndecl) = fn_block;
1552      init_function_start (thunk_fndecl);
1553      cfun->is_thunk = 1;
1554      insn_locations_init ();
1555      set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1556      prologue_location = curr_insn_location ();
1557      assemble_start_function (thunk_fndecl, fnname);
1558
1559      targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1560				       fixed_offset, virtual_value, alias);
1561
1562      assemble_end_function (thunk_fndecl, fnname);
1563      insn_locations_finalize ();
1564      init_insn_lengths ();
1565      free_after_compilation (cfun);
1566      set_cfun (NULL);
1567      TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1568      thunk.thunk_p = false;
1569      analyzed = false;
1570    }
1571  else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1572    {
1573      error ("generic thunk code fails for method %qD which uses %<...%>",
1574	     thunk_fndecl);
1575      TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1576      analyzed = true;
1577      return false;
1578    }
1579  else
1580    {
1581      tree restype;
1582      basic_block bb, then_bb, else_bb, return_bb;
1583      gimple_stmt_iterator bsi;
1584      int nargs = 0;
1585      tree arg;
1586      int i;
1587      tree resdecl;
1588      tree restmp = NULL;
1589      tree resbnd = NULL;
1590
1591      gcall *call;
1592      greturn *ret;
1593      bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1594
1595      if (in_lto_p)
1596	get_untransformed_body ();
1597      a = DECL_ARGUMENTS (thunk_fndecl);
1598
1599      current_function_decl = thunk_fndecl;
1600
1601      /* Ensure thunks are emitted in their correct sections.  */
1602      resolve_unique_section (thunk_fndecl, 0,
1603			      flag_function_sections);
1604
1605      DECL_IGNORED_P (thunk_fndecl) = 1;
1606      bitmap_obstack_initialize (NULL);
1607
1608      if (thunk.virtual_offset_p)
1609        virtual_offset = size_int (virtual_value);
1610
1611      /* Build the return declaration for the function.  */
1612      restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1613      if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1614	{
1615	  resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1616	  DECL_ARTIFICIAL (resdecl) = 1;
1617	  DECL_IGNORED_P (resdecl) = 1;
1618	  DECL_RESULT (thunk_fndecl) = resdecl;
1619          DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1620	}
1621      else
1622	resdecl = DECL_RESULT (thunk_fndecl);
1623
1624      bb = then_bb = else_bb = return_bb
1625	= init_lowered_empty_function (thunk_fndecl, true, count);
1626
1627      bsi = gsi_start_bb (bb);
1628
1629      /* Build call to the function being thunked.  */
1630      if (!VOID_TYPE_P (restype) && !alias_is_noreturn)
1631	{
1632	  if (DECL_BY_REFERENCE (resdecl))
1633	    {
1634	      restmp = gimple_fold_indirect_ref (resdecl);
1635	      if (!restmp)
1636		restmp = build2 (MEM_REF,
1637				 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1638				 resdecl,
1639				 build_int_cst (TREE_TYPE
1640				   (DECL_RESULT (alias)), 0));
1641	    }
1642	  else if (!is_gimple_reg_type (restype))
1643	    {
1644	      if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1645		{
1646		  restmp = resdecl;
1647
1648		  if (TREE_CODE (restmp) == VAR_DECL)
1649		    add_local_decl (cfun, restmp);
1650		  BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1651		}
1652	      else
1653		restmp = create_tmp_var (restype, "retval");
1654	    }
1655	  else
1656	    restmp = create_tmp_reg (restype, "retval");
1657	}
1658
1659      for (arg = a; arg; arg = DECL_CHAIN (arg))
1660        nargs++;
1661      auto_vec<tree> vargs (nargs);
1662      i = 0;
1663      arg = a;
1664      if (this_adjusting)
1665	{
1666	  vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1667					  virtual_offset));
1668	  arg = DECL_CHAIN (a);
1669	  i = 1;
1670	}
1671
1672      if (nargs)
1673	for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1674	  {
1675	    tree tmp = arg;
1676	    if (!is_gimple_val (arg))
1677	      {
1678		tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1679				      (TREE_TYPE (arg)), "arg");
1680		gimple stmt = gimple_build_assign (tmp, arg);
1681		gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1682	      }
1683	    vargs.quick_push (tmp);
1684	  }
1685      call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1686      callees->call_stmt = call;
1687      gimple_call_set_from_thunk (call, true);
1688      gimple_call_set_with_bounds (call, instrumentation_clone);
1689
1690      /* Return slot optimization is always possible and in fact requred to
1691         return values with DECL_BY_REFERENCE.  */
1692      if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1693	  && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1694	      || DECL_BY_REFERENCE (resdecl)))
1695        gimple_call_set_return_slot_opt (call, true);
1696
1697      if (restmp && !alias_is_noreturn)
1698	{
1699          gimple_call_set_lhs (call, restmp);
1700	  gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1701						 TREE_TYPE (TREE_TYPE (alias))));
1702	}
1703      gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1704      if (!alias_is_noreturn)
1705	{
1706	  if (instrumentation_clone
1707	      && !DECL_BY_REFERENCE (resdecl)
1708	      && restmp
1709	      && BOUNDED_P (restmp))
1710	    {
1711	      resbnd = chkp_insert_retbnd_call (NULL, restmp, &bsi);
1712	      create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi))),
1713			   as_a <gcall *> (gsi_stmt (bsi)),
1714			   callees->count, callees->frequency);
1715	    }
1716
1717	  if (restmp && !this_adjusting
1718	      && (fixed_offset || virtual_offset))
1719	    {
1720	      tree true_label = NULL_TREE;
1721
1722	      if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1723		{
1724		  gimple stmt;
1725		  edge e;
1726		  /* If the return type is a pointer, we need to
1727		     protect against NULL.  We know there will be an
1728		     adjustment, because that's why we're emitting a
1729		     thunk.  */
1730		  then_bb = create_basic_block (NULL, (void *) 0, bb);
1731		  then_bb->count = count - count / 16;
1732		  then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
1733		  return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1734		  return_bb->count = count;
1735		  return_bb->frequency = BB_FREQ_MAX;
1736		  else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1737		  then_bb->count = count / 16;
1738		  then_bb->frequency = BB_FREQ_MAX / 16;
1739		  add_bb_to_loop (then_bb, bb->loop_father);
1740		  add_bb_to_loop (return_bb, bb->loop_father);
1741		  add_bb_to_loop (else_bb, bb->loop_father);
1742		  remove_edge (single_succ_edge (bb));
1743		  true_label = gimple_block_label (then_bb);
1744		  stmt = gimple_build_cond (NE_EXPR, restmp,
1745					    build_zero_cst (TREE_TYPE (restmp)),
1746					    NULL_TREE, NULL_TREE);
1747		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1748		  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1749		  e->probability = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 16;
1750		  e->count = count - count / 16;
1751		  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1752		  e->probability = REG_BR_PROB_BASE / 16;
1753		  e->count = count / 16;
1754		  e = make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1755		  e->probability = REG_BR_PROB_BASE;
1756		  e->count = count;
1757		  e = make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1758		  e->probability = REG_BR_PROB_BASE;
1759		  e->count = count - count / 16;
1760		  e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1761		  e->probability = REG_BR_PROB_BASE;
1762		  e->count = count / 16;
1763		  bsi = gsi_last_bb (then_bb);
1764		}
1765
1766	      restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1767				     fixed_offset, virtual_offset);
1768	      if (true_label)
1769		{
1770		  gimple stmt;
1771		  bsi = gsi_last_bb (else_bb);
1772		  stmt = gimple_build_assign (restmp,
1773					      build_zero_cst (TREE_TYPE (restmp)));
1774		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1775		  bsi = gsi_last_bb (return_bb);
1776		}
1777	    }
1778	  else
1779	    gimple_call_set_tail (call, true);
1780
1781	  /* Build return value.  */
1782	  if (!DECL_BY_REFERENCE (resdecl))
1783	    ret = gimple_build_return (restmp);
1784	  else
1785	    ret = gimple_build_return (resdecl);
1786	  gimple_return_set_retbnd (ret, resbnd);
1787
1788	  gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1789	}
1790      else
1791	{
1792	  gimple_call_set_tail (call, true);
1793	  remove_edge (single_succ_edge (bb));
1794	}
1795
1796      cfun->gimple_df->in_ssa_p = true;
1797      profile_status_for_fn (cfun)
1798        = count ? PROFILE_READ : PROFILE_GUESSED;
1799      /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks.  */
1800      TREE_ASM_WRITTEN (thunk_fndecl) = false;
1801      delete_unreachable_blocks ();
1802      update_ssa (TODO_update_ssa);
1803#ifdef ENABLE_CHECKING
1804      verify_flow_info ();
1805#endif
1806      free_dominance_info (CDI_DOMINATORS);
1807
1808      /* Since we want to emit the thunk, we explicitly mark its name as
1809	 referenced.  */
1810      thunk.thunk_p = false;
1811      lowered = true;
1812      bitmap_obstack_release (NULL);
1813    }
1814  current_function_decl = NULL;
1815  set_cfun (NULL);
1816  return true;
1817}
1818
1819/* Assemble thunks and aliases associated to node.  */
1820
1821void
1822cgraph_node::assemble_thunks_and_aliases (void)
1823{
1824  cgraph_edge *e;
1825  ipa_ref *ref;
1826
1827  for (e = callers; e;)
1828    if (e->caller->thunk.thunk_p
1829	&& !e->caller->thunk.add_pointer_bounds_args)
1830      {
1831	cgraph_node *thunk = e->caller;
1832
1833	e = e->next_caller;
1834	thunk->expand_thunk (true, false);
1835	thunk->assemble_thunks_and_aliases ();
1836      }
1837    else
1838      e = e->next_caller;
1839
1840  FOR_EACH_ALIAS (this, ref)
1841    {
1842      cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1843      bool saved_written = TREE_ASM_WRITTEN (decl);
1844
1845      /* Force assemble_alias to really output the alias this time instead
1846	 of buffering it in same alias pairs.  */
1847      TREE_ASM_WRITTEN (decl) = 1;
1848      do_assemble_alias (alias->decl,
1849			 DECL_ASSEMBLER_NAME (decl));
1850      alias->assemble_thunks_and_aliases ();
1851      TREE_ASM_WRITTEN (decl) = saved_written;
1852    }
1853}
1854
1855/* Expand function specified by node.  */
1856
1857void
1858cgraph_node::expand (void)
1859{
1860  location_t saved_loc;
1861
1862  /* We ought to not compile any inline clones.  */
1863  gcc_assert (!global.inlined_to);
1864
1865  announce_function (decl);
1866  process = 0;
1867  gcc_assert (lowered);
1868  get_untransformed_body ();
1869
1870  /* Generate RTL for the body of DECL.  */
1871
1872  timevar_push (TV_REST_OF_COMPILATION);
1873
1874  gcc_assert (symtab->global_info_ready);
1875
1876  /* Initialize the default bitmap obstack.  */
1877  bitmap_obstack_initialize (NULL);
1878
1879  /* Initialize the RTL code for the function.  */
1880  current_function_decl = decl;
1881  saved_loc = input_location;
1882  input_location = DECL_SOURCE_LOCATION (decl);
1883  init_function_start (decl);
1884
1885  gimple_register_cfg_hooks ();
1886
1887  bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1888
1889  execute_all_ipa_transforms ();
1890
1891  /* Perform all tree transforms and optimizations.  */
1892
1893  /* Signal the start of passes.  */
1894  invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1895
1896  execute_pass_list (cfun, g->get_passes ()->all_passes);
1897
1898  /* Signal the end of passes.  */
1899  invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1900
1901  bitmap_obstack_release (&reg_obstack);
1902
1903  /* Release the default bitmap obstack.  */
1904  bitmap_obstack_release (NULL);
1905
1906  /* If requested, warn about function definitions where the function will
1907     return a value (usually of some struct or union type) which itself will
1908     take up a lot of stack space.  */
1909  if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1910    {
1911      tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1912
1913      if (ret_type && TYPE_SIZE_UNIT (ret_type)
1914	  && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1915	  && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1916				   larger_than_size))
1917	{
1918	  unsigned int size_as_int
1919	    = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1920
1921	  if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1922	    warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1923                     decl, size_as_int);
1924	  else
1925	    warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1926                     decl, larger_than_size);
1927	}
1928    }
1929
1930  gimple_set_body (decl, NULL);
1931  if (DECL_STRUCT_FUNCTION (decl) == 0
1932      && !cgraph_node::get (decl)->origin)
1933    {
1934      /* Stop pointing to the local nodes about to be freed.
1935	 But DECL_INITIAL must remain nonzero so we know this
1936	 was an actual function definition.
1937	 For a nested function, this is done in c_pop_function_context.
1938	 If rest_of_compilation set this to 0, leave it 0.  */
1939      if (DECL_INITIAL (decl) != 0)
1940	DECL_INITIAL (decl) = error_mark_node;
1941    }
1942
1943  input_location = saved_loc;
1944
1945  ggc_collect ();
1946  timevar_pop (TV_REST_OF_COMPILATION);
1947
1948  /* Make sure that BE didn't give up on compiling.  */
1949  gcc_assert (TREE_ASM_WRITTEN (decl));
1950  set_cfun (NULL);
1951  current_function_decl = NULL;
1952
1953  /* It would make a lot more sense to output thunks before function body to get more
1954     forward and lest backwarding jumps.  This however would need solving problem
1955     with comdats. See PR48668.  Also aliases must come after function itself to
1956     make one pass assemblers, like one on AIX, happy.  See PR 50689.
1957     FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1958     groups.  */
1959  assemble_thunks_and_aliases ();
1960  release_body ();
1961  /* Eliminate all call edges.  This is important so the GIMPLE_CALL no longer
1962     points to the dead function body.  */
1963  remove_callees ();
1964  remove_all_references ();
1965}
1966
1967/* Node comparer that is responsible for the order that corresponds
1968   to time when a function was launched for the first time.  */
1969
1970static int
1971node_cmp (const void *pa, const void *pb)
1972{
1973  const cgraph_node *a = *(const cgraph_node * const *) pa;
1974  const cgraph_node *b = *(const cgraph_node * const *) pb;
1975
1976  /* Functions with time profile must be before these without profile.  */
1977  if (!a->tp_first_run || !b->tp_first_run)
1978    return a->tp_first_run - b->tp_first_run;
1979
1980  return a->tp_first_run != b->tp_first_run
1981	 ? b->tp_first_run - a->tp_first_run
1982	 : b->order - a->order;
1983}
1984
1985/* Expand all functions that must be output.
1986
1987   Attempt to topologically sort the nodes so function is output when
1988   all called functions are already assembled to allow data to be
1989   propagated across the callgraph.  Use a stack to get smaller distance
1990   between a function and its callees (later we may choose to use a more
1991   sophisticated algorithm for function reordering; we will likely want
1992   to use subsections to make the output functions appear in top-down
1993   order).  */
1994
1995static void
1996expand_all_functions (void)
1997{
1998  cgraph_node *node;
1999  cgraph_node **order = XCNEWVEC (cgraph_node *,
2000					 symtab->cgraph_count);
2001  unsigned int expanded_func_count = 0, profiled_func_count = 0;
2002  int order_pos, new_order_pos = 0;
2003  int i;
2004
2005  order_pos = ipa_reverse_postorder (order);
2006  gcc_assert (order_pos == symtab->cgraph_count);
2007
2008  /* Garbage collector may remove inline clones we eliminate during
2009     optimization.  So we must be sure to not reference them.  */
2010  for (i = 0; i < order_pos; i++)
2011    if (order[i]->process)
2012      order[new_order_pos++] = order[i];
2013
2014  if (flag_profile_reorder_functions)
2015    qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2016
2017  for (i = new_order_pos - 1; i >= 0; i--)
2018    {
2019      node = order[i];
2020
2021      if (node->process)
2022	{
2023     expanded_func_count++;
2024     if(node->tp_first_run)
2025       profiled_func_count++;
2026
2027    if (symtab->dump_file)
2028	  fprintf (symtab->dump_file,
2029		   "Time profile order in expand_all_functions:%s:%d\n",
2030		   node->asm_name (), node->tp_first_run);
2031	  node->process = 0;
2032	  node->expand ();
2033	}
2034    }
2035
2036    if (dump_file)
2037      fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2038               main_input_filename, profiled_func_count, expanded_func_count);
2039
2040  if (symtab->dump_file && flag_profile_reorder_functions)
2041    fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2042             profiled_func_count, expanded_func_count);
2043
2044  symtab->process_new_functions ();
2045  free_gimplify_stack ();
2046
2047  free (order);
2048}
2049
2050/* This is used to sort the node types by the cgraph order number.  */
2051
2052enum cgraph_order_sort_kind
2053{
2054  ORDER_UNDEFINED = 0,
2055  ORDER_FUNCTION,
2056  ORDER_VAR,
2057  ORDER_ASM
2058};
2059
2060struct cgraph_order_sort
2061{
2062  enum cgraph_order_sort_kind kind;
2063  union
2064  {
2065    cgraph_node *f;
2066    varpool_node *v;
2067    asm_node *a;
2068  } u;
2069};
2070
2071/* Output all functions, variables, and asm statements in the order
2072   according to their order fields, which is the order in which they
2073   appeared in the file.  This implements -fno-toplevel-reorder.  In
2074   this mode we may output functions and variables which don't really
2075   need to be output.
2076   When NO_REORDER is true only do this for symbols marked no reorder. */
2077
2078static void
2079output_in_order (bool no_reorder)
2080{
2081  int max;
2082  cgraph_order_sort *nodes;
2083  int i;
2084  cgraph_node *pf;
2085  varpool_node *pv;
2086  asm_node *pa;
2087  max = symtab->order;
2088  nodes = XCNEWVEC (cgraph_order_sort, max);
2089
2090  FOR_EACH_DEFINED_FUNCTION (pf)
2091    {
2092      if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2093	{
2094	  if (no_reorder && !pf->no_reorder)
2095	    continue;
2096	  i = pf->order;
2097	  gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2098	  nodes[i].kind = ORDER_FUNCTION;
2099	  nodes[i].u.f = pf;
2100	}
2101    }
2102
2103  FOR_EACH_DEFINED_VARIABLE (pv)
2104    if (!DECL_EXTERNAL (pv->decl))
2105      {
2106	if (no_reorder && !pv->no_reorder)
2107	    continue;
2108	i = pv->order;
2109	gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2110	nodes[i].kind = ORDER_VAR;
2111	nodes[i].u.v = pv;
2112      }
2113
2114  for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2115    {
2116      i = pa->order;
2117      gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2118      nodes[i].kind = ORDER_ASM;
2119      nodes[i].u.a = pa;
2120    }
2121
2122  /* In toplevel reorder mode we output all statics; mark them as needed.  */
2123
2124  for (i = 0; i < max; ++i)
2125    if (nodes[i].kind == ORDER_VAR)
2126      nodes[i].u.v->finalize_named_section_flags ();
2127
2128  for (i = 0; i < max; ++i)
2129    {
2130      switch (nodes[i].kind)
2131	{
2132	case ORDER_FUNCTION:
2133	  nodes[i].u.f->process = 0;
2134	  nodes[i].u.f->expand ();
2135	  break;
2136
2137	case ORDER_VAR:
2138	  nodes[i].u.v->assemble_decl ();
2139	  break;
2140
2141	case ORDER_ASM:
2142	  assemble_asm (nodes[i].u.a->asm_str);
2143	  break;
2144
2145	case ORDER_UNDEFINED:
2146	  break;
2147
2148	default:
2149	  gcc_unreachable ();
2150	}
2151    }
2152
2153  symtab->clear_asm_symbols ();
2154
2155  free (nodes);
2156}
2157
2158static void
2159ipa_passes (void)
2160{
2161  gcc::pass_manager *passes = g->get_passes ();
2162
2163  set_cfun (NULL);
2164  current_function_decl = NULL;
2165  gimple_register_cfg_hooks ();
2166  bitmap_obstack_initialize (NULL);
2167
2168  invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2169
2170  if (!in_lto_p)
2171    {
2172      execute_ipa_pass_list (passes->all_small_ipa_passes);
2173      if (seen_error ())
2174	return;
2175    }
2176
2177  /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2178     devirtualization and other changes where removal iterate.  */
2179  symtab->remove_unreachable_nodes (symtab->dump_file);
2180
2181  /* If pass_all_early_optimizations was not scheduled, the state of
2182     the cgraph will not be properly updated.  Update it now.  */
2183  if (symtab->state < IPA_SSA)
2184    symtab->state = IPA_SSA;
2185
2186  if (!in_lto_p)
2187    {
2188      /* Generate coverage variables and constructors.  */
2189      coverage_finish ();
2190
2191      /* Process new functions added.  */
2192      set_cfun (NULL);
2193      current_function_decl = NULL;
2194      symtab->process_new_functions ();
2195
2196      execute_ipa_summary_passes
2197	((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2198    }
2199
2200  /* Some targets need to handle LTO assembler output specially.  */
2201  if (flag_generate_lto || flag_generate_offload)
2202    targetm.asm_out.lto_start ();
2203
2204  if (!in_lto_p)
2205    {
2206      if (g->have_offload)
2207	{
2208	  section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2209	  lto_stream_offload_p = true;
2210	  ipa_write_summaries ();
2211	  lto_stream_offload_p = false;
2212	}
2213      if (flag_lto)
2214	{
2215	  section_name_prefix = LTO_SECTION_NAME_PREFIX;
2216	  lto_stream_offload_p = false;
2217	  ipa_write_summaries ();
2218	}
2219    }
2220
2221  if (flag_generate_lto || flag_generate_offload)
2222    targetm.asm_out.lto_end ();
2223
2224  if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2225    execute_ipa_pass_list (passes->all_regular_ipa_passes);
2226  invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2227
2228  bitmap_obstack_release (NULL);
2229}
2230
2231
2232/* Return string alias is alias of.  */
2233
2234static tree
2235get_alias_symbol (tree decl)
2236{
2237  tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2238  return get_identifier (TREE_STRING_POINTER
2239			  (TREE_VALUE (TREE_VALUE (alias))));
2240}
2241
2242
2243/* Weakrefs may be associated to external decls and thus not output
2244   at expansion time.  Emit all necessary aliases.  */
2245
2246void
2247symbol_table::output_weakrefs (void)
2248{
2249  symtab_node *node;
2250  cgraph_node *cnode;
2251  FOR_EACH_SYMBOL (node)
2252    if (node->alias
2253        && !TREE_ASM_WRITTEN (node->decl)
2254	&& (!(cnode = dyn_cast <cgraph_node *> (node))
2255	    || !cnode->instrumented_version
2256	    || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2257	&& node->weakref)
2258      {
2259	tree target;
2260
2261	/* Weakrefs are special by not requiring target definition in current
2262	   compilation unit.  It is thus bit hard to work out what we want to
2263	   alias.
2264	   When alias target is defined, we need to fetch it from symtab reference,
2265	   otherwise it is pointed to by alias_target.  */
2266	if (node->alias_target)
2267	  target = (DECL_P (node->alias_target)
2268		    ? DECL_ASSEMBLER_NAME (node->alias_target)
2269		    : node->alias_target);
2270	else if (node->analyzed)
2271	  target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2272	else
2273	  {
2274	    gcc_unreachable ();
2275	    target = get_alias_symbol (node->decl);
2276	  }
2277        do_assemble_alias (node->decl, target);
2278      }
2279}
2280
2281/* Perform simple optimizations based on callgraph.  */
2282
2283void
2284symbol_table::compile (void)
2285{
2286  if (seen_error ())
2287    return;
2288
2289#ifdef ENABLE_CHECKING
2290  symtab_node::verify_symtab_nodes ();
2291#endif
2292
2293  timevar_push (TV_CGRAPHOPT);
2294  if (pre_ipa_mem_report)
2295    {
2296      fprintf (stderr, "Memory consumption before IPA\n");
2297      dump_memory_report (false);
2298    }
2299  if (!quiet_flag)
2300    fprintf (stderr, "Performing interprocedural optimizations\n");
2301  state = IPA;
2302
2303  /* Offloading requires LTO infrastructure.  */
2304  if (!in_lto_p && g->have_offload)
2305    flag_generate_offload = 1;
2306
2307  /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE.  */
2308  if (flag_generate_lto || flag_generate_offload)
2309    lto_streamer_hooks_init ();
2310
2311  /* Don't run the IPA passes if there was any error or sorry messages.  */
2312  if (!seen_error ())
2313    ipa_passes ();
2314
2315  /* Do nothing else if any IPA pass found errors or if we are just streaming LTO.  */
2316  if (seen_error ()
2317      || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2318    {
2319      timevar_pop (TV_CGRAPHOPT);
2320      return;
2321    }
2322
2323  global_info_ready = true;
2324  if (dump_file)
2325    {
2326      fprintf (dump_file, "Optimized ");
2327      symtab_node:: dump_table (dump_file);
2328    }
2329  if (post_ipa_mem_report)
2330    {
2331      fprintf (stderr, "Memory consumption after IPA\n");
2332      dump_memory_report (false);
2333    }
2334  timevar_pop (TV_CGRAPHOPT);
2335
2336  /* Output everything.  */
2337  (*debug_hooks->assembly_start) ();
2338  if (!quiet_flag)
2339    fprintf (stderr, "Assembling functions:\n");
2340#ifdef ENABLE_CHECKING
2341  symtab_node::verify_symtab_nodes ();
2342#endif
2343
2344  materialize_all_clones ();
2345  bitmap_obstack_initialize (NULL);
2346  execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2347  bitmap_obstack_release (NULL);
2348  mark_functions_to_output ();
2349
2350  /* When weakref support is missing, we autmatically translate all
2351     references to NODE to references to its ultimate alias target.
2352     The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2353     TREE_CHAIN.
2354
2355     Set up this mapping before we output any assembler but once we are sure
2356     that all symbol renaming is done.
2357
2358     FIXME: All this uglyness can go away if we just do renaming at gimple
2359     level by physically rewritting the IL.  At the moment we can only redirect
2360     calls, so we need infrastructure for renaming references as well.  */
2361#ifndef ASM_OUTPUT_WEAKREF
2362  symtab_node *node;
2363
2364  FOR_EACH_SYMBOL (node)
2365    if (node->alias
2366	&& lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2367      {
2368	IDENTIFIER_TRANSPARENT_ALIAS
2369	   (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2370	TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2371	   = (node->alias_target ? node->alias_target
2372	      : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2373      }
2374#endif
2375
2376  state = EXPANSION;
2377
2378  if (!flag_toplevel_reorder)
2379    output_in_order (false);
2380  else
2381    {
2382      /* Output first asm statements and anything ordered. The process
2383         flag is cleared for these nodes, so we skip them later.  */
2384      output_in_order (true);
2385      expand_all_functions ();
2386      output_variables ();
2387    }
2388
2389  process_new_functions ();
2390  state = FINISHED;
2391  output_weakrefs ();
2392
2393  if (dump_file)
2394    {
2395      fprintf (dump_file, "\nFinal ");
2396      symtab_node::dump_table (dump_file);
2397    }
2398#ifdef ENABLE_CHECKING
2399  symtab_node::verify_symtab_nodes ();
2400  /* Double check that all inline clones are gone and that all
2401     function bodies have been released from memory.  */
2402  if (!seen_error ())
2403    {
2404      cgraph_node *node;
2405      bool error_found = false;
2406
2407      FOR_EACH_DEFINED_FUNCTION (node)
2408	if (node->global.inlined_to
2409	    || gimple_has_body_p (node->decl))
2410	  {
2411	    error_found = true;
2412	    node->debug ();
2413	  }
2414      if (error_found)
2415	internal_error ("nodes with unreleased memory found");
2416    }
2417#endif
2418}
2419
2420
2421/* Analyze the whole compilation unit once it is parsed completely.  */
2422
2423void
2424symbol_table::finalize_compilation_unit (void)
2425{
2426  timevar_push (TV_CGRAPH);
2427
2428  /* If we're here there's no current function anymore.  Some frontends
2429     are lazy in clearing these.  */
2430  current_function_decl = NULL;
2431  set_cfun (NULL);
2432
2433  /* Do not skip analyzing the functions if there were errors, we
2434     miss diagnostics for following functions otherwise.  */
2435
2436  /* Emit size functions we didn't inline.  */
2437  finalize_size_functions ();
2438
2439  /* Mark alias targets necessary and emit diagnostics.  */
2440  handle_alias_pairs ();
2441
2442  if (!quiet_flag)
2443    {
2444      fprintf (stderr, "\nAnalyzing compilation unit\n");
2445      fflush (stderr);
2446    }
2447
2448  if (flag_dump_passes)
2449    dump_passes ();
2450
2451  /* Gimplify and lower all functions, compute reachability and
2452     remove unreachable nodes.  */
2453  analyze_functions ();
2454
2455  /* Mark alias targets necessary and emit diagnostics.  */
2456  handle_alias_pairs ();
2457
2458  /* Gimplify and lower thunks.  */
2459  analyze_functions ();
2460
2461  /* Finally drive the pass manager.  */
2462  compile ();
2463
2464  timevar_pop (TV_CGRAPH);
2465}
2466
2467/* Reset all state within cgraphunit.c so that we can rerun the compiler
2468   within the same process.  For use by toplev::finalize.  */
2469
2470void
2471cgraphunit_c_finalize (void)
2472{
2473  gcc_assert (cgraph_new_nodes.length () == 0);
2474  cgraph_new_nodes.truncate (0);
2475
2476  vtable_entry_type = NULL;
2477  queued_nodes = &symtab_terminator;
2478
2479  first_analyzed = NULL;
2480  first_analyzed_var = NULL;
2481}
2482
2483/* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2484   kind of wrapper method.  */
2485
2486void
2487cgraph_node::create_wrapper (cgraph_node *target)
2488{
2489  /* Preserve DECL_RESULT so we get right by reference flag.  */
2490  tree decl_result = DECL_RESULT (decl);
2491
2492  /* Remove the function's body but keep arguments to be reused
2493     for thunk.  */
2494  release_body (true);
2495  reset ();
2496
2497  DECL_UNINLINABLE (decl) = false;
2498  DECL_RESULT (decl) = decl_result;
2499  DECL_INITIAL (decl) = NULL;
2500  allocate_struct_function (decl, false);
2501  set_cfun (NULL);
2502
2503  /* Turn alias into thunk and expand it into GIMPLE representation.  */
2504  definition = true;
2505
2506  memset (&thunk, 0, sizeof (cgraph_thunk_info));
2507  thunk.thunk_p = true;
2508  create_edge (target, NULL, count, CGRAPH_FREQ_BASE);
2509  callees->can_throw_external = !TREE_NOTHROW (target->decl);
2510
2511  tree arguments = DECL_ARGUMENTS (decl);
2512
2513  while (arguments)
2514    {
2515      TREE_ADDRESSABLE (arguments) = false;
2516      arguments = TREE_CHAIN (arguments);
2517    }
2518
2519  expand_thunk (false, true);
2520
2521  /* Inline summary set-up.  */
2522  analyze ();
2523  inline_analyze_function (this);
2524}
2525
2526#include "gt-cgraphunit.h"
2527