1/* Basic IPA optimizations and utilities.
2   Copyright (C) 2003-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "hash-set.h"
25#include "machmode.h"
26#include "vec.h"
27#include "double-int.h"
28#include "input.h"
29#include "alias.h"
30#include "symtab.h"
31#include "options.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"
35#include "fold-const.h"
36#include "calls.h"
37#include "stringpool.h"
38#include "predict.h"
39#include "basic-block.h"
40#include "hash-map.h"
41#include "is-a.h"
42#include "plugin-api.h"
43#include "hard-reg-set.h"
44#include "input.h"
45#include "function.h"
46#include "ipa-ref.h"
47#include "cgraph.h"
48#include "tree-pass.h"
49#include "gimple-expr.h"
50#include "gimplify.h"
51#include "flags.h"
52#include "target.h"
53#include "tree-iterator.h"
54#include "ipa-utils.h"
55#include "alloc-pool.h"
56#include "symbol-summary.h"
57#include "ipa-prop.h"
58#include "ipa-inline.h"
59#include "tree-inline.h"
60#include "profile.h"
61#include "params.h"
62#include "internal-fn.h"
63#include "tree-ssa-alias.h"
64#include "gimple.h"
65#include "dbgcnt.h"
66
67
68/* Return true when NODE has ADDR reference.  */
69
70static bool
71has_addr_references_p (struct cgraph_node *node,
72		       void *data ATTRIBUTE_UNUSED)
73{
74  int i;
75  struct ipa_ref *ref = NULL;
76
77  for (i = 0; node->iterate_referring (i, ref); i++)
78    if (ref->use == IPA_REF_ADDR)
79      return true;
80  return false;
81}
82
83/* Look for all functions inlined to NODE and update their inlined_to pointers
84   to INLINED_TO.  */
85
86static void
87update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
88{
89  struct cgraph_edge *e;
90  for (e = node->callees; e; e = e->next_callee)
91    if (e->callee->global.inlined_to)
92      {
93        e->callee->global.inlined_to = inlined_to;
94	update_inlined_to_pointer (e->callee, inlined_to);
95      }
96}
97
98/* Add symtab NODE to queue starting at FIRST.
99
100   The queue is linked via AUX pointers and terminated by pointer to 1.
101   We enqueue nodes at two occasions: when we find them reachable or when we find
102   their bodies needed for further clonning.  In the second case we mark them
103   by pointer to 2 after processing so they are re-queue when they become
104   reachable.  */
105
106static void
107enqueue_node (symtab_node *node, symtab_node **first,
108	      hash_set<symtab_node *> *reachable)
109{
110  /* Node is still in queue; do nothing.  */
111  if (node->aux && node->aux != (void *) 2)
112    return;
113  /* Node was already processed as unreachable, re-enqueue
114     only if it became reachable now.  */
115  if (node->aux == (void *)2 && !reachable->contains (node))
116    return;
117  node->aux = *first;
118  *first = node;
119}
120
121/* Process references.  */
122
123static void
124process_references (symtab_node *snode,
125		    symtab_node **first,
126		    bool before_inlining_p,
127		    hash_set<symtab_node *> *reachable)
128{
129  int i;
130  struct ipa_ref *ref = NULL;
131  for (i = 0; snode->iterate_reference (i, ref); i++)
132    {
133      symtab_node *node = ref->referred;
134      symtab_node *body = node->ultimate_alias_target ();
135
136      if (node->definition && !node->in_other_partition
137	  && ((!DECL_EXTERNAL (node->decl) || node->alias)
138	      || (((before_inlining_p
139		    && ((TREE_CODE (node->decl) != FUNCTION_DECL
140			 && optimize)
141			|| (TREE_CODE (node->decl) == FUNCTION_DECL
142			    && opt_for_fn (body->decl, optimize))
143		        || (symtab->state < IPA_SSA
144		            && lookup_attribute
145				 ("always_inline",
146			          DECL_ATTRIBUTES (body->decl))))))
147		  /* We use variable constructors during late compilation for
148		     constant folding.  Keep references alive so partitioning
149		     knows about potential references.  */
150		  || (TREE_CODE (node->decl) == VAR_DECL
151		      && flag_wpa
152		      && ctor_for_folding (node->decl)
153		         != error_mark_node))))
154	{
155	  /* Be sure that we will not optimize out alias target
156	     body.  */
157	  if (DECL_EXTERNAL (node->decl)
158	      && node->alias
159	      && before_inlining_p)
160	    reachable->add (body);
161	  reachable->add (node);
162	}
163      enqueue_node (node, first, reachable);
164    }
165}
166
167/* EDGE is an polymorphic call.  If BEFORE_INLINING_P is set, mark
168   all its potential targets as reachable to permit later inlining if
169   devirtualization happens.  After inlining still keep their declarations
170   around, so we can devirtualize to a direct call.
171
172   Also try to make trivial devirutalization when no or only one target is
173   possible.  */
174
175static void
176walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
177			       struct cgraph_edge *edge,
178			       symtab_node **first,
179			       hash_set<symtab_node *> *reachable,
180			       bool before_inlining_p)
181{
182  unsigned int i;
183  void *cache_token;
184  bool final;
185  vec <cgraph_node *>targets
186    = possible_polymorphic_call_targets
187	(edge, &final, &cache_token);
188
189  if (!reachable_call_targets->add (cache_token))
190    {
191      for (i = 0; i < targets.length (); i++)
192	{
193	  struct cgraph_node *n = targets[i];
194
195	  /* Do not bother to mark virtual methods in anonymous namespace;
196	     either we will find use of virtual table defining it, or it is
197	     unused.  */
198	  if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
199	      && type_in_anonymous_namespace_p
200		    (method_class_type (TREE_TYPE (n->decl))))
201	    continue;
202
203	   symtab_node *body = n->function_symbol ();
204
205	  /* Prior inlining, keep alive bodies of possible targets for
206	     devirtualization.  */
207	   if (n->definition
208	       && (before_inlining_p
209		   && opt_for_fn (body->decl, optimize)
210		   && opt_for_fn (body->decl, flag_devirtualize)))
211	      {
212		 /* Be sure that we will not optimize out alias target
213		    body.  */
214		 if (DECL_EXTERNAL (n->decl)
215		     && n->alias
216		     && before_inlining_p)
217		   reachable->add (body);
218		reachable->add (n);
219	      }
220	  /* Even after inlining we want to keep the possible targets in the
221	     boundary, so late passes can still produce direct call even if
222	     the chance for inlining is lost.  */
223	  enqueue_node (n, first, reachable);
224	}
225    }
226
227  /* Very trivial devirtualization; when the type is
228     final or anonymous (so we know all its derivation)
229     and there is only one possible virtual call target,
230     make the edge direct.  */
231  if (final)
232    {
233      if (targets.length () <= 1 && dbg_cnt (devirt))
234	{
235	  cgraph_node *target, *node = edge->caller;
236	  if (targets.length () == 1)
237	    target = targets[0];
238	  else
239	    target = cgraph_node::get_create
240		       (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
241
242	  if (dump_enabled_p ())
243            {
244	      location_t locus;
245	      if (edge->call_stmt)
246		locus = gimple_location (edge->call_stmt);
247	      else
248		locus = UNKNOWN_LOCATION;
249	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
250                               "devirtualizing call in %s/%i to %s/%i\n",
251                               edge->caller->name (), edge->caller->order,
252                               target->name (),
253                               target->order);
254	    }
255	  edge = edge->make_direct (target);
256	  if (inline_summaries)
257	    inline_update_overall_summary (node);
258	  else if (edge->call_stmt)
259	    {
260	      edge->redirect_call_stmt_to_callee ();
261
262	      /* Call to __builtin_unreachable shouldn't be instrumented.  */
263	      if (!targets.length ())
264		gimple_call_set_with_bounds (edge->call_stmt, false);
265	    }
266	}
267    }
268}
269
270/* Perform reachability analysis and reclaim all unreachable nodes.
271
272   The algorithm is basically mark&sweep but with some extra refinements:
273
274   - reachable extern inline functions needs special handling; the bodies needs
275     to stay in memory until inlining in hope that they will be inlined.
276     After inlining we release their bodies and turn them into unanalyzed
277     nodes even when they are reachable.
278
279   - virtual functions are kept in callgraph even if they seem unreachable in
280     hope calls to them will be devirtualized.
281
282     Again we remove them after inlining.  In late optimization some
283     devirtualization may happen, but it is not important since we won't inline
284     the call. In theory early opts and IPA should work out all important cases.
285
286   - virtual clones needs bodies of their origins for later materialization;
287     this means that we want to keep the body even if the origin is unreachable
288     otherwise.  To avoid origin from sitting in the callgraph and being
289     walked by IPA passes, we turn them into unanalyzed nodes with body
290     defined.
291
292     We maintain set of function declaration where body needs to stay in
293     body_needed_for_clonning
294
295     Inline clones represent special case: their declaration match the
296     declaration of origin and cgraph_remove_node already knows how to
297     reshape callgraph and preserve body when offline copy of function or
298     inline clone is being removed.
299
300   - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
301     variables with DECL_INITIAL set.  We finalize these and keep reachable
302     ones around for constant folding purposes.  After inlining we however
303     stop walking their references to let everything static referneced by them
304     to be removed when it is otherwise unreachable.
305
306   We maintain queue of both reachable symbols (i.e. defined symbols that needs
307   to stay) and symbols that are in boundary (i.e. external symbols referenced
308   by reachable symbols or origins of clones).  The queue is represented
309   as linked list by AUX pointer terminated by 1.
310
311   At the end we keep all reachable symbols. For symbols in boundary we always
312   turn definition into a declaration, but we may keep function body around
313   based on body_needed_for_clonning
314
315   All symbols that enter the queue have AUX pointer non-zero and are in the
316   boundary.  Pointer set REACHABLE is used to track reachable symbols.
317
318   Every symbol can be visited twice - once as part of boundary and once
319   as real reachable symbol. enqueue_node needs to decide whether the
320   node needs to be re-queued for second processing.  For this purpose
321   we set AUX pointer of processed symbols in the boundary to constant 2.  */
322
323bool
324symbol_table::remove_unreachable_nodes (FILE *file)
325{
326  symtab_node *first = (symtab_node *) (void *) 1;
327  struct cgraph_node *node, *next;
328  varpool_node *vnode, *vnext;
329  bool changed = false;
330  hash_set<symtab_node *> reachable;
331  hash_set<tree> body_needed_for_clonning;
332  hash_set<void *> reachable_call_targets;
333  bool before_inlining_p = symtab->state < (!optimize ? IPA_SSA
334					    : IPA_SSA_AFTER_INLINING);
335
336  timevar_push (TV_IPA_UNREACHABLE);
337  build_type_inheritance_graph ();
338  if (file)
339    fprintf (file, "\nReclaiming functions:");
340#ifdef ENABLE_CHECKING
341  FOR_EACH_FUNCTION (node)
342    gcc_assert (!node->aux);
343  FOR_EACH_VARIABLE (vnode)
344    gcc_assert (!vnode->aux);
345#endif
346  /* Mark functions whose bodies are obviously needed.
347     This is mostly when they can be referenced externally.  Inline clones
348     are special since their declarations are shared with master clone and thus
349     cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them.  */
350  FOR_EACH_FUNCTION (node)
351    {
352      node->used_as_abstract_origin = false;
353      if (node->definition
354	  && !node->global.inlined_to
355	  && !node->in_other_partition
356	  && !node->can_remove_if_no_direct_calls_and_refs_p ())
357	{
358	  gcc_assert (!node->global.inlined_to);
359	  reachable.add (node);
360	  enqueue_node (node, &first, &reachable);
361	}
362      else
363	gcc_assert (!node->aux);
364     }
365
366  /* Mark variables that are obviously needed.  */
367  FOR_EACH_DEFINED_VARIABLE (vnode)
368    if (!vnode->can_remove_if_no_refs_p()
369	&& !vnode->in_other_partition)
370      {
371	reachable.add (vnode);
372	enqueue_node (vnode, &first, &reachable);
373      }
374
375  /* Perform reachability analysis.  */
376  while (first != (symtab_node *) (void *) 1)
377    {
378      bool in_boundary_p = !reachable.contains (first);
379      symtab_node *node = first;
380
381      first = (symtab_node *)first->aux;
382
383      /* If we are processing symbol in boundary, mark its AUX pointer for
384	 possible later re-processing in enqueue_node.  */
385      if (in_boundary_p)
386	{
387	  node->aux = (void *)2;
388	  if (node->alias && node->analyzed)
389	    enqueue_node (node->get_alias_target (), &first, &reachable);
390	}
391      else
392	{
393	  if (TREE_CODE (node->decl) == FUNCTION_DECL
394	      && DECL_ABSTRACT_ORIGIN (node->decl))
395	    {
396	      struct cgraph_node *origin_node
397	      = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
398	      if (origin_node && !origin_node->used_as_abstract_origin)
399		{
400	          origin_node->used_as_abstract_origin = true;
401		  gcc_assert (!origin_node->prev_sibling_clone);
402		  gcc_assert (!origin_node->next_sibling_clone);
403		  for (cgraph_node *n = origin_node->clones; n;
404		       n = n->next_sibling_clone)
405		    if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
406		      n->used_as_abstract_origin = true;
407		}
408	    }
409	  /* If any symbol in a comdat group is reachable, force
410	     all externally visible symbols in the same comdat
411	     group to be reachable as well.  Comdat-local symbols
412	     can be discarded if all uses were inlined.  */
413	  if (node->same_comdat_group)
414	    {
415	      symtab_node *next;
416	      for (next = node->same_comdat_group;
417		   next != node;
418		   next = next->same_comdat_group)
419		if (!next->comdat_local_p ()
420		    && !reachable.add (next))
421		  enqueue_node (next, &first, &reachable);
422	    }
423	  /* Mark references as reachable.  */
424	  process_references (node, &first, before_inlining_p, &reachable);
425	}
426
427      if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
428	{
429	  /* Mark the callees reachable unless they are direct calls to extern
430 	     inline functions we decided to not inline.  */
431	  if (!in_boundary_p)
432	    {
433	      struct cgraph_edge *e;
434	      /* Keep alive possible targets for devirtualization.  */
435	      if (opt_for_fn (cnode->decl, optimize)
436		  && opt_for_fn (cnode->decl, flag_devirtualize))
437		{
438		  struct cgraph_edge *next;
439		  for (e = cnode->indirect_calls; e; e = next)
440		    {
441		      next = e->next_callee;
442		      if (e->indirect_info->polymorphic)
443			walk_polymorphic_call_targets (&reachable_call_targets,
444						       e, &first, &reachable,
445						       before_inlining_p);
446		    }
447		}
448	      for (e = cnode->callees; e; e = e->next_callee)
449		{
450	          symtab_node *body = e->callee->function_symbol ();
451		  if (e->callee->definition
452		      && !e->callee->in_other_partition
453		      && (!e->inline_failed
454			  || !DECL_EXTERNAL (e->callee->decl)
455			  || e->callee->alias
456			  || (before_inlining_p
457			      && (opt_for_fn (body->decl, optimize)
458		                  || (symtab->state < IPA_SSA
459		                      && lookup_attribute
460				          ("always_inline",
461				           DECL_ATTRIBUTES (body->decl)))))))
462		    {
463		      /* Be sure that we will not optimize out alias target
464			 body.  */
465		      if (DECL_EXTERNAL (e->callee->decl)
466			  && e->callee->alias
467			  && before_inlining_p)
468			reachable.add (body);
469		      reachable.add (e->callee);
470		    }
471		  enqueue_node (e->callee, &first, &reachable);
472		}
473
474	      /* When inline clone exists, mark body to be preserved so when removing
475		 offline copy of the function we don't kill it.  */
476	      if (cnode->global.inlined_to)
477	        body_needed_for_clonning.add (cnode->decl);
478
479	      /* For instrumentation clones we always need original
480		 function node for proper LTO privatization.  */
481	      if (cnode->instrumentation_clone
482		  && cnode->definition)
483		{
484		  gcc_assert (cnode->instrumented_version || in_lto_p);
485		  if (cnode->instrumented_version)
486		    {
487		      enqueue_node (cnode->instrumented_version, &first,
488				    &reachable);
489		      reachable.add (cnode->instrumented_version);
490		    }
491		}
492
493	      /* For non-inline clones, force their origins to the boundary and ensure
494		 that body is not removed.  */
495	      while (cnode->clone_of)
496		{
497		  bool noninline = cnode->clone_of->decl != cnode->decl;
498		  cnode = cnode->clone_of;
499		  if (noninline)
500		    {
501		      body_needed_for_clonning.add (cnode->decl);
502		      enqueue_node (cnode, &first, &reachable);
503		    }
504		}
505
506	    }
507	  else if (cnode->thunk.thunk_p)
508	    enqueue_node (cnode->callees->callee, &first, &reachable);
509
510	  /* If any reachable function has simd clones, mark them as
511	     reachable as well.  */
512	  if (cnode->simd_clones)
513	    {
514	      cgraph_node *next;
515	      for (next = cnode->simd_clones;
516		   next;
517		   next = next->simdclone->next_clone)
518		if (in_boundary_p
519		    || !reachable.add (next))
520		  enqueue_node (next, &first, &reachable);
521	    }
522	}
523      /* When we see constructor of external variable, keep referred nodes in the
524	boundary.  This will also hold initializers of the external vars NODE
525	refers to.  */
526      varpool_node *vnode = dyn_cast <varpool_node *> (node);
527      if (vnode
528	  && DECL_EXTERNAL (node->decl)
529	  && !vnode->alias
530	  && in_boundary_p)
531	{
532	  struct ipa_ref *ref = NULL;
533	  for (int i = 0; node->iterate_reference (i, ref); i++)
534	    enqueue_node (ref->referred, &first, &reachable);
535	}
536    }
537
538  /* Remove unreachable functions.   */
539  for (node = first_function (); node; node = next)
540    {
541      next = next_function (node);
542
543      /* If node is not needed at all, remove it.  */
544      if (!node->aux)
545	{
546	  if (file)
547	    fprintf (file, " %s/%i", node->name (), node->order);
548	  node->remove ();
549	  changed = true;
550	}
551      /* If node is unreachable, remove its body.  */
552      else if (!reachable.contains (node))
553        {
554	  /* We keep definitions of thunks and aliases in the boundary so
555	     we can walk to the ultimate alias targets and function symbols
556	     reliably.  */
557	  if (node->alias || node->thunk.thunk_p)
558	    ;
559	  else if (!body_needed_for_clonning.contains (node->decl)
560	      && !node->alias && !node->thunk.thunk_p)
561	    node->release_body ();
562	  else if (!node->clone_of)
563	    gcc_assert (in_lto_p || DECL_RESULT (node->decl));
564	  if (node->definition && !node->alias && !node->thunk.thunk_p)
565	    {
566	      if (file)
567		fprintf (file, " %s/%i", node->name (), node->order);
568	      node->body_removed = true;
569	      node->analyzed = false;
570	      node->definition = false;
571	      node->cpp_implicit_alias = false;
572	      node->alias = false;
573	      node->thunk.thunk_p = false;
574	      node->weakref = false;
575	      /* After early inlining we drop always_inline attributes on
576		 bodies of functions that are still referenced (have their
577		 address taken).  */
578	      DECL_ATTRIBUTES (node->decl)
579		= remove_attribute ("always_inline",
580				    DECL_ATTRIBUTES (node->decl));
581	      if (!node->in_other_partition)
582		node->local.local = false;
583	      node->remove_callees ();
584	      node->remove_all_references ();
585	      changed = true;
586	      if (node->thunk.thunk_p
587		  && node->thunk.add_pointer_bounds_args)
588		{
589		  node->thunk.thunk_p = false;
590		  node->thunk.add_pointer_bounds_args = false;
591		}
592	    }
593	}
594      else
595	gcc_assert (node->clone_of || !node->has_gimple_body_p ()
596		    || in_lto_p || DECL_RESULT (node->decl));
597    }
598
599  /* Inline clones might be kept around so their materializing allows further
600     cloning.  If the function the clone is inlined into is removed, we need
601     to turn it into normal cone.  */
602  FOR_EACH_FUNCTION (node)
603    {
604      if (node->global.inlined_to
605	  && !node->callers)
606	{
607	  gcc_assert (node->clones);
608	  node->global.inlined_to = NULL;
609	  update_inlined_to_pointer (node, node);
610	}
611      node->aux = NULL;
612    }
613
614  /* Remove unreachable variables.  */
615  if (file)
616    fprintf (file, "\nReclaiming variables:");
617  for (vnode = first_variable (); vnode; vnode = vnext)
618    {
619      vnext = next_variable (vnode);
620      if (!vnode->aux
621	  /* For can_refer_decl_in_current_unit_p we want to track for
622	     all external variables if they are defined in other partition
623	     or not.  */
624	  && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
625	{
626	  struct ipa_ref *ref = NULL;
627
628	  /* First remove the aliases, so varpool::remove can possibly lookup
629	     the constructor and save it for future use.  */
630	  while (vnode->iterate_direct_aliases (0, ref))
631	    {
632	      if (file)
633		fprintf (file, " %s/%i", ref->referred->name (),
634			 ref->referred->order);
635	      ref->referring->remove ();
636	    }
637	  if (file)
638	    fprintf (file, " %s/%i", vnode->name (), vnode->order);
639          vnext = next_variable (vnode);
640	  vnode->remove ();
641	  changed = true;
642	}
643      else if (!reachable.contains (vnode) && !vnode->alias)
644        {
645	  tree init;
646	  if (vnode->definition)
647	    {
648	      if (file)
649		fprintf (file, " %s", vnode->name ());
650	      changed = true;
651	    }
652	  /* Keep body if it may be useful for constant folding.  */
653	  if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
654	      && !POINTER_BOUNDS_P (vnode->decl))
655	    vnode->remove_initializer ();
656	  else
657	    DECL_INITIAL (vnode->decl) = init;
658	  vnode->body_removed = true;
659	  vnode->definition = false;
660	  vnode->analyzed = false;
661	  vnode->aux = NULL;
662
663	  vnode->remove_from_same_comdat_group ();
664
665	  vnode->remove_all_references ();
666	}
667      else
668	vnode->aux = NULL;
669    }
670
671  /* Now update address_taken flags and try to promote functions to be local.  */
672  if (file)
673    fprintf (file, "\nClearing address taken flags:");
674  FOR_EACH_DEFINED_FUNCTION (node)
675    if (node->address_taken
676	&& !node->used_from_other_partition)
677      {
678	if (!node->call_for_symbol_and_aliases
679	    (has_addr_references_p, NULL, true)
680	    && (!node->instrumentation_clone
681		|| !node->instrumented_version
682		|| !node->instrumented_version->address_taken))
683	  {
684	    if (file)
685	      fprintf (file, " %s", node->name ());
686	    node->address_taken = false;
687	    changed = true;
688	    if (node->local_p ())
689	      {
690		node->local.local = true;
691		if (file)
692		  fprintf (file, " (local)");
693	      }
694	  }
695      }
696  if (file)
697    fprintf (file, "\n");
698
699#ifdef ENABLE_CHECKING
700  symtab_node::verify_symtab_nodes ();
701#endif
702
703  /* If we removed something, perhaps profile could be improved.  */
704  if (changed && optimize && inline_edge_summary_vec.exists ())
705    FOR_EACH_DEFINED_FUNCTION (node)
706      ipa_propagate_frequency (node);
707
708  timevar_pop (TV_IPA_UNREACHABLE);
709  return changed;
710}
711
712/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
713   as needed, also clear EXPLICIT_REFS if the references to given variable
714   do not need to be explicit.  */
715
716void
717process_references (varpool_node *vnode,
718		    bool *written, bool *address_taken,
719		    bool *read, bool *explicit_refs)
720{
721  int i;
722  struct ipa_ref *ref;
723
724  if (!vnode->all_refs_explicit_p ()
725      || TREE_THIS_VOLATILE (vnode->decl))
726    *explicit_refs = false;
727
728  for (i = 0; vnode->iterate_referring (i, ref)
729	      && *explicit_refs && (!*written || !*address_taken || !*read); i++)
730    switch (ref->use)
731      {
732      case IPA_REF_ADDR:
733	*address_taken = true;
734	break;
735      case IPA_REF_LOAD:
736	*read = true;
737	break;
738      case IPA_REF_STORE:
739	*written = true;
740	break;
741      case IPA_REF_ALIAS:
742	process_references (dyn_cast<varpool_node *> (ref->referring), written,
743			    address_taken, read, explicit_refs);
744	break;
745      case IPA_REF_CHKP:
746	gcc_unreachable ();
747      }
748}
749
750/* Set TREE_READONLY bit.  */
751
752bool
753set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
754{
755  TREE_READONLY (vnode->decl) = true;
756  return false;
757}
758
759/* Set writeonly bit and clear the initalizer, since it will not be needed.  */
760
761bool
762set_writeonly_bit (varpool_node *vnode, void *data)
763{
764  vnode->writeonly = true;
765  if (optimize)
766    {
767      DECL_INITIAL (vnode->decl) = NULL;
768      if (!vnode->alias)
769	{
770	  if (vnode->num_references ())
771	    *(bool *)data = true;
772	  vnode->remove_all_references ();
773	}
774    }
775  return false;
776}
777
778/* Clear addressale bit of VNODE.  */
779
780bool
781clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
782{
783  vnode->address_taken = false;
784  TREE_ADDRESSABLE (vnode->decl) = 0;
785  return false;
786}
787
788/* Discover variables that have no longer address taken or that are read only
789   and update their flags.
790
791   Return true when unreachable symbol removan should be done.
792
793   FIXME: This can not be done in between gimplify and omp_expand since
794   readonly flag plays role on what is shared and what is not.  Currently we do
795   this transformation as part of whole program visibility and re-do at
796   ipa-reference pass (to take into account clonning), but it would
797   make sense to do it before early optimizations.  */
798
799bool
800ipa_discover_readonly_nonaddressable_vars (void)
801{
802  bool remove_p = false;
803  varpool_node *vnode;
804  if (dump_file)
805    fprintf (dump_file, "Clearing variable flags:");
806  FOR_EACH_VARIABLE (vnode)
807    if (!vnode->alias
808	&& (TREE_ADDRESSABLE (vnode->decl)
809	    || !vnode->writeonly
810	    || !TREE_READONLY (vnode->decl)))
811      {
812	bool written = false;
813	bool address_taken = false;
814	bool read = false;
815	bool explicit_refs = true;
816
817	process_references (vnode, &written, &address_taken, &read,
818			    &explicit_refs);
819	if (!explicit_refs)
820	  continue;
821	if (!address_taken)
822	  {
823	    if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
824	      fprintf (dump_file, " %s (non-addressable)", vnode->name ());
825	    vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
826					        true);
827	  }
828	if (!address_taken && !written
829	    /* Making variable in explicit section readonly can cause section
830	       type conflict.
831	       See e.g. gcc.c-torture/compile/pr23237.c */
832	    && vnode->get_section () == NULL)
833	  {
834	    if (!TREE_READONLY (vnode->decl) && dump_file)
835	      fprintf (dump_file, " %s (read-only)", vnode->name ());
836	    vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
837	  }
838	if (!vnode->writeonly && !read && !address_taken && written)
839	  {
840	    if (dump_file)
841	      fprintf (dump_file, " %s (write-only)", vnode->name ());
842	    vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
843					        true);
844	  }
845      }
846  if (dump_file)
847    fprintf (dump_file, "\n");
848  return remove_p;
849}
850
851/* Free inline summary.  */
852
853namespace {
854
855const pass_data pass_data_ipa_free_inline_summary =
856{
857  SIMPLE_IPA_PASS, /* type */
858  "free-inline-summary", /* name */
859  OPTGROUP_NONE, /* optinfo_flags */
860  TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
861  0, /* properties_required */
862  0, /* properties_provided */
863  0, /* properties_destroyed */
864  0, /* todo_flags_start */
865  /* Early optimizations may make function unreachable.  We can not
866     remove unreachable functions as part of the ealry opts pass because
867     TODOs are run before subpasses.  Do it here.  */
868  ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
869};
870
871class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
872{
873public:
874  pass_ipa_free_inline_summary (gcc::context *ctxt)
875    : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
876  {}
877
878  /* opt_pass methods: */
879  virtual unsigned int execute (function *)
880    {
881      inline_free_summary ();
882      return 0;
883    }
884
885}; // class pass_ipa_free_inline_summary
886
887} // anon namespace
888
889simple_ipa_opt_pass *
890make_pass_ipa_free_inline_summary (gcc::context *ctxt)
891{
892  return new pass_ipa_free_inline_summary (ctxt);
893}
894
895/* Generate and emit a static constructor or destructor.  WHICH must
896   be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
897   (for chp static vars constructor) or 'B' (for chkp static bounds
898   constructor).  BODY is a STATEMENT_LIST containing GENERIC
899   statements.  PRIORITY is the initialization priority for this
900   constructor or destructor.
901
902   FINAL specify whether the externally visible name for collect2 should
903   be produced. */
904
905static void
906cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
907{
908  static int counter = 0;
909  char which_buf[16];
910  tree decl, name, resdecl;
911
912  /* The priority is encoded in the constructor or destructor name.
913     collect2 will sort the names and arrange that they are called at
914     program startup.  */
915  if (final)
916    sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
917  else
918  /* Proudce sane name but one not recognizable by collect2, just for the
919     case we fail to inline the function.  */
920    sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
921  name = get_file_function_name (which_buf);
922
923  decl = build_decl (input_location, FUNCTION_DECL, name,
924		     build_function_type_list (void_type_node, NULL_TREE));
925  current_function_decl = decl;
926
927  resdecl = build_decl (input_location,
928			RESULT_DECL, NULL_TREE, void_type_node);
929  DECL_ARTIFICIAL (resdecl) = 1;
930  DECL_RESULT (decl) = resdecl;
931  DECL_CONTEXT (resdecl) = decl;
932
933  allocate_struct_function (decl, false);
934
935  TREE_STATIC (decl) = 1;
936  TREE_USED (decl) = 1;
937  DECL_ARTIFICIAL (decl) = 1;
938  DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
939  DECL_SAVED_TREE (decl) = body;
940  if (!targetm.have_ctors_dtors && final)
941    {
942      TREE_PUBLIC (decl) = 1;
943      DECL_PRESERVE_P (decl) = 1;
944    }
945  DECL_UNINLINABLE (decl) = 1;
946
947  DECL_INITIAL (decl) = make_node (BLOCK);
948  TREE_USED (DECL_INITIAL (decl)) = 1;
949
950  DECL_SOURCE_LOCATION (decl) = input_location;
951  cfun->function_end_locus = input_location;
952
953  switch (which)
954    {
955    case 'I':
956      DECL_STATIC_CONSTRUCTOR (decl) = 1;
957      decl_init_priority_insert (decl, priority);
958      break;
959    case 'P':
960      DECL_STATIC_CONSTRUCTOR (decl) = 1;
961      DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
962					  NULL,
963					  NULL_TREE);
964      decl_init_priority_insert (decl, priority);
965      break;
966    case 'B':
967      DECL_STATIC_CONSTRUCTOR (decl) = 1;
968      DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
969					  NULL,
970					  NULL_TREE);
971      decl_init_priority_insert (decl, priority);
972      break;
973    case 'D':
974      DECL_STATIC_DESTRUCTOR (decl) = 1;
975      decl_fini_priority_insert (decl, priority);
976      break;
977    default:
978      gcc_unreachable ();
979    }
980
981  gimplify_function_tree (decl);
982
983  cgraph_node::add_new_function (decl, false);
984
985  set_cfun (NULL);
986  current_function_decl = NULL;
987}
988
989/* Generate and emit a static constructor or destructor.  WHICH must
990   be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
991   (for chkp static vars constructor) or 'B' (for chkp static bounds
992   constructor).  BODY is a STATEMENT_LIST containing GENERIC
993   statements.  PRIORITY is the initialization priority for this
994   constructor or destructor.  */
995
996void
997cgraph_build_static_cdtor (char which, tree body, int priority)
998{
999  cgraph_build_static_cdtor_1 (which, body, priority, false);
1000}
1001
1002/* A vector of FUNCTION_DECLs declared as static constructors.  */
1003static vec<tree> static_ctors;
1004/* A vector of FUNCTION_DECLs declared as static destructors.  */
1005static vec<tree> static_dtors;
1006
1007/* When target does not have ctors and dtors, we call all constructor
1008   and destructor by special initialization/destruction function
1009   recognized by collect2.
1010
1011   When we are going to build this function, collect all constructors and
1012   destructors and turn them into normal functions.  */
1013
1014static void
1015record_cdtor_fn (struct cgraph_node *node)
1016{
1017  if (DECL_STATIC_CONSTRUCTOR (node->decl))
1018    static_ctors.safe_push (node->decl);
1019  if (DECL_STATIC_DESTRUCTOR (node->decl))
1020    static_dtors.safe_push (node->decl);
1021  node = cgraph_node::get (node->decl);
1022  DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
1023}
1024
1025/* Define global constructors/destructor functions for the CDTORS, of
1026   which they are LEN.  The CDTORS are sorted by initialization
1027   priority.  If CTOR_P is true, these are constructors; otherwise,
1028   they are destructors.  */
1029
1030static void
1031build_cdtor (bool ctor_p, vec<tree> cdtors)
1032{
1033  size_t i,j;
1034  size_t len = cdtors.length ();
1035
1036  i = 0;
1037  while (i < len)
1038    {
1039      tree body;
1040      tree fn;
1041      priority_type priority;
1042
1043      priority = 0;
1044      body = NULL_TREE;
1045      j = i;
1046      do
1047	{
1048	  priority_type p;
1049	  fn = cdtors[j];
1050	  p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1051	  if (j == i)
1052	    priority = p;
1053	  else if (p != priority)
1054	    break;
1055	  j++;
1056	}
1057      while (j < len);
1058
1059      /* When there is only one cdtor and target supports them, do nothing.  */
1060      if (j == i + 1
1061	  && targetm.have_ctors_dtors)
1062	{
1063	  i++;
1064	  continue;
1065	}
1066      /* Find the next batch of constructors/destructors with the same
1067	 initialization priority.  */
1068      for (;i < j; i++)
1069	{
1070	  tree call;
1071	  fn = cdtors[i];
1072	  call = build_call_expr (fn, 0);
1073	  if (ctor_p)
1074	    DECL_STATIC_CONSTRUCTOR (fn) = 0;
1075	  else
1076	    DECL_STATIC_DESTRUCTOR (fn) = 0;
1077	  /* We do not want to optimize away pure/const calls here.
1078	     When optimizing, these should be already removed, when not
1079	     optimizing, we want user to be able to breakpoint in them.  */
1080	  TREE_SIDE_EFFECTS (call) = 1;
1081	  append_to_statement_list (call, &body);
1082	}
1083      gcc_assert (body != NULL_TREE);
1084      /* Generate a function to call all the function of like
1085	 priority.  */
1086      cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1087    }
1088}
1089
1090/* Comparison function for qsort.  P1 and P2 are actually of type
1091   "tree *" and point to static constructors.  DECL_INIT_PRIORITY is
1092   used to determine the sort order.  */
1093
1094static int
1095compare_ctor (const void *p1, const void *p2)
1096{
1097  tree f1;
1098  tree f2;
1099  int priority1;
1100  int priority2;
1101
1102  f1 = *(const tree *)p1;
1103  f2 = *(const tree *)p2;
1104  priority1 = DECL_INIT_PRIORITY (f1);
1105  priority2 = DECL_INIT_PRIORITY (f2);
1106
1107  if (priority1 < priority2)
1108    return -1;
1109  else if (priority1 > priority2)
1110    return 1;
1111  else
1112    /* Ensure a stable sort.  Constructors are executed in backwarding
1113       order to make LTO initialize braries first.  */
1114    return DECL_UID (f2) - DECL_UID (f1);
1115}
1116
1117/* Comparison function for qsort.  P1 and P2 are actually of type
1118   "tree *" and point to static destructors.  DECL_FINI_PRIORITY is
1119   used to determine the sort order.  */
1120
1121static int
1122compare_dtor (const void *p1, const void *p2)
1123{
1124  tree f1;
1125  tree f2;
1126  int priority1;
1127  int priority2;
1128
1129  f1 = *(const tree *)p1;
1130  f2 = *(const tree *)p2;
1131  priority1 = DECL_FINI_PRIORITY (f1);
1132  priority2 = DECL_FINI_PRIORITY (f2);
1133
1134  if (priority1 < priority2)
1135    return -1;
1136  else if (priority1 > priority2)
1137    return 1;
1138  else
1139    /* Ensure a stable sort.  */
1140    return DECL_UID (f1) - DECL_UID (f2);
1141}
1142
1143/* Generate functions to call static constructors and destructors
1144   for targets that do not support .ctors/.dtors sections.  These
1145   functions have magic names which are detected by collect2.  */
1146
1147static void
1148build_cdtor_fns (void)
1149{
1150  if (!static_ctors.is_empty ())
1151    {
1152      gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1153      static_ctors.qsort (compare_ctor);
1154      build_cdtor (/*ctor_p=*/true, static_ctors);
1155    }
1156
1157  if (!static_dtors.is_empty ())
1158    {
1159      gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1160      static_dtors.qsort (compare_dtor);
1161      build_cdtor (/*ctor_p=*/false, static_dtors);
1162    }
1163}
1164
1165/* Look for constructors and destructors and produce function calling them.
1166   This is needed for targets not supporting ctors or dtors, but we perform the
1167   transformation also at linktime to merge possibly numerous
1168   constructors/destructors into single function to improve code locality and
1169   reduce size.  */
1170
1171static unsigned int
1172ipa_cdtor_merge (void)
1173{
1174  struct cgraph_node *node;
1175  FOR_EACH_DEFINED_FUNCTION (node)
1176    if (DECL_STATIC_CONSTRUCTOR (node->decl)
1177	|| DECL_STATIC_DESTRUCTOR (node->decl))
1178       record_cdtor_fn (node);
1179  build_cdtor_fns ();
1180  static_ctors.release ();
1181  static_dtors.release ();
1182  return 0;
1183}
1184
1185namespace {
1186
1187const pass_data pass_data_ipa_cdtor_merge =
1188{
1189  IPA_PASS, /* type */
1190  "cdtor", /* name */
1191  OPTGROUP_NONE, /* optinfo_flags */
1192  TV_CGRAPHOPT, /* tv_id */
1193  0, /* properties_required */
1194  0, /* properties_provided */
1195  0, /* properties_destroyed */
1196  0, /* todo_flags_start */
1197  0, /* todo_flags_finish */
1198};
1199
1200class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1201{
1202public:
1203  pass_ipa_cdtor_merge (gcc::context *ctxt)
1204    : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1205		      NULL, /* generate_summary */
1206		      NULL, /* write_summary */
1207		      NULL, /* read_summary */
1208		      NULL, /* write_optimization_summary */
1209		      NULL, /* read_optimization_summary */
1210		      NULL, /* stmt_fixup */
1211		      0, /* function_transform_todo_flags_start */
1212		      NULL, /* function_transform */
1213		      NULL) /* variable_transform */
1214  {}
1215
1216  /* opt_pass methods: */
1217  virtual bool gate (function *);
1218  virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1219
1220}; // class pass_ipa_cdtor_merge
1221
1222bool
1223pass_ipa_cdtor_merge::gate (function *)
1224{
1225  /* Perform the pass when we have no ctors/dtors support
1226     or at LTO time to merge multiple constructors into single
1227     function.  */
1228  return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1229}
1230
1231} // anon namespace
1232
1233ipa_opt_pass_d *
1234make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1235{
1236  return new pass_ipa_cdtor_merge (ctxt);
1237}
1238
1239/* Invalid pointer representing BOTTOM for single user dataflow.  */
1240#define BOTTOM ((cgraph_node *)(size_t) 2)
1241
1242/* Meet operation for single user dataflow.
1243   Here we want to associate variables with sigle function that may access it.
1244
1245   FUNCTION is current single user of a variable, VAR is variable that uses it.
1246   Latttice is stored in SINGLE_USER_MAP.
1247
1248   We represent:
1249    - TOP by no entry in SIGNLE_USER_MAP
1250    - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1251    - known single user by cgraph pointer in SINGLE_USER_MAP.  */
1252
1253cgraph_node *
1254meet (cgraph_node *function, varpool_node *var,
1255       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1256{
1257  struct cgraph_node *user, **f;
1258
1259  if (var->aux == BOTTOM)
1260    return BOTTOM;
1261
1262  f = single_user_map.get (var);
1263  if (!f)
1264    return function;
1265  user = *f;
1266  if (!function)
1267    return user;
1268  else if (function != user)
1269    return BOTTOM;
1270  else
1271    return function;
1272}
1273
1274/* Propagation step of single-use dataflow.
1275
1276   Check all uses of VNODE and see if they are used by single function FUNCTION.
1277   SINGLE_USER_MAP represents the dataflow lattice.  */
1278
1279cgraph_node *
1280propagate_single_user (varpool_node *vnode, cgraph_node *function,
1281		       hash_map<varpool_node *, cgraph_node *> &single_user_map)
1282{
1283  int i;
1284  struct ipa_ref *ref;
1285
1286  gcc_assert (!vnode->externally_visible);
1287
1288  /* If node is an alias, first meet with its target.  */
1289  if (vnode->alias)
1290    function = meet (function, vnode->get_alias_target (), single_user_map);
1291
1292  /* Check all users and see if they correspond to a single function.  */
1293  for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1294    {
1295      struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1296      if (cnode)
1297	{
1298	  if (cnode->global.inlined_to)
1299	    cnode = cnode->global.inlined_to;
1300	  if (!function)
1301	    function = cnode;
1302	  else if (function != cnode)
1303	    function = BOTTOM;
1304	}
1305      else
1306	function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1307			 single_user_map);
1308    }
1309  return function;
1310}
1311
1312/* Pass setting used_by_single_function flag.
1313   This flag is set on variable when there is only one function that may
1314   possibly referr to it.  */
1315
1316static unsigned int
1317ipa_single_use (void)
1318{
1319  varpool_node *first = (varpool_node *) (void *) 1;
1320  varpool_node *var;
1321  hash_map<varpool_node *, cgraph_node *> single_user_map;
1322
1323  FOR_EACH_DEFINED_VARIABLE (var)
1324    if (!var->all_refs_explicit_p ())
1325      var->aux = BOTTOM;
1326    else
1327      {
1328	/* Enqueue symbol for dataflow.  */
1329        var->aux = first;
1330	first = var;
1331      }
1332
1333  /* The actual dataflow.  */
1334
1335  while (first != (void *) 1)
1336    {
1337      cgraph_node *user, *orig_user, **f;
1338
1339      var = first;
1340      first = (varpool_node *)first->aux;
1341
1342      f = single_user_map.get (var);
1343      if (f)
1344	orig_user = *f;
1345      else
1346	orig_user = NULL;
1347      user = propagate_single_user (var, orig_user, single_user_map);
1348
1349      gcc_checking_assert (var->aux != BOTTOM);
1350
1351      /* If user differs, enqueue all references.  */
1352      if (user != orig_user)
1353	{
1354	  unsigned int i;
1355	  ipa_ref *ref;
1356
1357	  single_user_map.put (var, user);
1358
1359	  /* Enqueue all aliases for re-processing.  */
1360	  for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1361	    if (!ref->referring->aux)
1362	      {
1363		ref->referring->aux = first;
1364		first = dyn_cast <varpool_node *> (ref->referring);
1365	      }
1366	  /* Enqueue all users for re-processing.  */
1367	  for (i = 0; var->iterate_reference (i, ref); i++)
1368	    if (!ref->referred->aux
1369	        && ref->referred->definition
1370		&& is_a <varpool_node *> (ref->referred))
1371	      {
1372		ref->referred->aux = first;
1373		first = dyn_cast <varpool_node *> (ref->referred);
1374	      }
1375
1376	  /* If user is BOTTOM, just punt on this var.  */
1377	  if (user == BOTTOM)
1378	    var->aux = BOTTOM;
1379	  else
1380	    var->aux = NULL;
1381	}
1382      else
1383	var->aux = NULL;
1384    }
1385
1386  FOR_EACH_DEFINED_VARIABLE (var)
1387    {
1388      if (var->aux != BOTTOM)
1389	{
1390#ifdef ENABLE_CHECKING
1391	  /* Not having the single user known means that the VAR is
1392	     unreachable.  Either someone forgot to remove unreachable
1393	     variables or the reachability here is wrong.  */
1394
1395          gcc_assert (single_user_map.get (var));
1396#endif
1397	  if (dump_file)
1398	    {
1399	      fprintf (dump_file, "Variable %s/%i is used by single function\n",
1400		       var->name (), var->order);
1401	    }
1402	  var->used_by_single_function = true;
1403	}
1404      var->aux = NULL;
1405    }
1406  return 0;
1407}
1408
1409namespace {
1410
1411const pass_data pass_data_ipa_single_use =
1412{
1413  IPA_PASS, /* type */
1414  "single-use", /* name */
1415  OPTGROUP_NONE, /* optinfo_flags */
1416  TV_CGRAPHOPT, /* tv_id */
1417  0, /* properties_required */
1418  0, /* properties_provided */
1419  0, /* properties_destroyed */
1420  0, /* todo_flags_start */
1421  0, /* todo_flags_finish */
1422};
1423
1424class pass_ipa_single_use : public ipa_opt_pass_d
1425{
1426public:
1427  pass_ipa_single_use (gcc::context *ctxt)
1428    : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1429		      NULL, /* generate_summary */
1430		      NULL, /* write_summary */
1431		      NULL, /* read_summary */
1432		      NULL, /* write_optimization_summary */
1433		      NULL, /* read_optimization_summary */
1434		      NULL, /* stmt_fixup */
1435		      0, /* function_transform_todo_flags_start */
1436		      NULL, /* function_transform */
1437		      NULL) /* variable_transform */
1438  {}
1439
1440  /* opt_pass methods: */
1441  virtual bool gate (function *);
1442  virtual unsigned int execute (function *) { return ipa_single_use (); }
1443
1444}; // class pass_ipa_single_use
1445
1446bool
1447pass_ipa_single_use::gate (function *)
1448{
1449  return optimize;
1450}
1451
1452} // anon namespace
1453
1454ipa_opt_pass_d *
1455make_pass_ipa_single_use (gcc::context *ctxt)
1456{
1457  return new pass_ipa_single_use (ctxt);
1458}
1459