1/* Callgraph based analysis of static variables.
2   Copyright (C) 2004-2015 Free Software Foundation, Inc.
3   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21/* This file marks functions as being either const (TREE_READONLY) or
22   pure (DECL_PURE_P).  It can also set a variant of these that
23   are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25   This must be run after inlining decisions have been made since
26   otherwise, the local sets will not contain information that is
27   consistent with post inlined state.  The global sets are not prone
28   to this problem since they are by definition transitive.  */
29
30/* The code in this module is called by the ipa pass manager. It
31   should be one of the later passes since it's information is used by
32   the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
37#include "tm.h"
38#include "hash-set.h"
39#include "machmode.h"
40#include "vec.h"
41#include "double-int.h"
42#include "input.h"
43#include "alias.h"
44#include "symtab.h"
45#include "wide-int.h"
46#include "inchash.h"
47#include "tree.h"
48#include "fold-const.h"
49#include "print-tree.h"
50#include "calls.h"
51#include "predict.h"
52#include "hard-reg-set.h"
53#include "input.h"
54#include "function.h"
55#include "dominance.h"
56#include "cfg.h"
57#include "cfganal.h"
58#include "basic-block.h"
59#include "tree-ssa-alias.h"
60#include "internal-fn.h"
61#include "tree-eh.h"
62#include "gimple-expr.h"
63#include "is-a.h"
64#include "gimple.h"
65#include "gimple-iterator.h"
66#include "gimple-walk.h"
67#include "tree-cfg.h"
68#include "tree-ssa-loop-niter.h"
69#include "tree-inline.h"
70#include "tree-pass.h"
71#include "langhooks.h"
72#include "hash-map.h"
73#include "plugin-api.h"
74#include "ipa-ref.h"
75#include "cgraph.h"
76#include "ipa-utils.h"
77#include "flags.h"
78#include "diagnostic.h"
79#include "gimple-pretty-print.h"
80#include "langhooks.h"
81#include "target.h"
82#include "lto-streamer.h"
83#include "data-streamer.h"
84#include "tree-streamer.h"
85#include "cfgloop.h"
86#include "tree-scalar-evolution.h"
87#include "intl.h"
88#include "opts.h"
89#include "varasm.h"
90
91/* Lattice values for const and pure functions.  Everything starts out
92   being const, then may drop to pure and then neither depending on
93   what is found.  */
94enum pure_const_state_e
95{
96  IPA_CONST,
97  IPA_PURE,
98  IPA_NEITHER
99};
100
101const char *pure_const_names[3] = {"const", "pure", "neither"};
102
103/* Holder for the const_state.  There is one of these per function
104   decl.  */
105struct funct_state_d
106{
107  /* See above.  */
108  enum pure_const_state_e pure_const_state;
109  /* What user set here; we can be always sure about this.  */
110  enum pure_const_state_e state_previously_known;
111  bool looping_previously_known;
112
113  /* True if the function could possibly infinite loop.  There are a
114     lot of ways that this could be determined.  We are pretty
115     conservative here.  While it is possible to cse pure and const
116     calls, it is not legal to have dce get rid of the call if there
117     is a possibility that the call could infinite loop since this is
118     a behavioral change.  */
119  bool looping;
120
121  bool can_throw;
122
123  /* If function can call free, munmap or otherwise make previously
124     non-trapping memory accesses trapping.  */
125  bool can_free;
126};
127
128/* State used when we know nothing about function.  */
129static struct funct_state_d varying_state
130   = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
131
132
133typedef struct funct_state_d * funct_state;
134
135/* The storage of the funct_state is abstracted because there is the
136   possibility that it may be desirable to move this to the cgraph
137   local info.  */
138
139/* Array, indexed by cgraph node uid, of function states.  */
140
141static vec<funct_state> funct_state_vec;
142
143static bool gate_pure_const (void);
144
145namespace {
146
147const pass_data pass_data_ipa_pure_const =
148{
149  IPA_PASS, /* type */
150  "pure-const", /* name */
151  OPTGROUP_NONE, /* optinfo_flags */
152  TV_IPA_PURE_CONST, /* tv_id */
153  0, /* properties_required */
154  0, /* properties_provided */
155  0, /* properties_destroyed */
156  0, /* todo_flags_start */
157  0, /* todo_flags_finish */
158};
159
160class pass_ipa_pure_const : public ipa_opt_pass_d
161{
162public:
163  pass_ipa_pure_const(gcc::context *ctxt);
164
165  /* opt_pass methods: */
166  bool gate (function *) { return gate_pure_const (); }
167  unsigned int execute (function *fun);
168
169  void register_hooks (void);
170
171private:
172  bool init_p;
173
174  /* Holders of ipa cgraph hooks: */
175  struct cgraph_node_hook_list *function_insertion_hook_holder;
176  struct cgraph_2node_hook_list *node_duplication_hook_holder;
177  struct cgraph_node_hook_list *node_removal_hook_holder;
178
179}; // class pass_ipa_pure_const
180
181} // anon namespace
182
183/* Try to guess if function body will always be visible to compiler
184   when compiling the call and whether compiler will be able
185   to propagate the information by itself.  */
186
187static bool
188function_always_visible_to_compiler_p (tree decl)
189{
190  return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
191}
192
193/* Emit suggestion about attribute ATTRIB_NAME for DECL.  KNOWN_FINITE
194   is true if the function is known to be finite.  The diagnostic is
195   controlled by OPTION.  WARNED_ABOUT is a hash_set<tree> unique for
196   OPTION, this function may initialize it and it is always returned
197   by the function.  */
198
199static hash_set<tree> *
200suggest_attribute (int option, tree decl, bool known_finite,
201		   hash_set<tree> *warned_about,
202		   const char * attrib_name)
203{
204  if (!option_enabled (option, &global_options))
205    return warned_about;
206  if (TREE_THIS_VOLATILE (decl)
207      || (known_finite && function_always_visible_to_compiler_p (decl)))
208    return warned_about;
209
210  if (!warned_about)
211    warned_about = new hash_set<tree>;
212  if (warned_about->contains (decl))
213    return warned_about;
214  warned_about->add (decl);
215  warning_at (DECL_SOURCE_LOCATION (decl),
216	      option,
217	      known_finite
218	      ? _("function might be candidate for attribute %<%s%>")
219	      : _("function might be candidate for attribute %<%s%>"
220		  " if it is known to return normally"), attrib_name);
221  return warned_about;
222}
223
224/* Emit suggestion about __attribute_((pure)) for DECL.  KNOWN_FINITE
225   is true if the function is known to be finite.  */
226
227static void
228warn_function_pure (tree decl, bool known_finite)
229{
230  static hash_set<tree> *warned_about;
231
232  warned_about
233    = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
234			 known_finite, warned_about, "pure");
235}
236
237/* Emit suggestion about __attribute_((const)) for DECL.  KNOWN_FINITE
238   is true if the function is known to be finite.  */
239
240static void
241warn_function_const (tree decl, bool known_finite)
242{
243  static hash_set<tree> *warned_about;
244  warned_about
245    = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
246			 known_finite, warned_about, "const");
247}
248
249static void
250warn_function_noreturn (tree decl)
251{
252  static hash_set<tree> *warned_about;
253  if (!lang_hooks.missing_noreturn_ok_p (decl)
254      && targetm.warn_func_return (decl))
255    warned_about
256      = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
257			   true, warned_about, "noreturn");
258}
259
260/* Return true if we have a function state for NODE.  */
261
262static inline bool
263has_function_state (struct cgraph_node *node)
264{
265  if (!funct_state_vec.exists ()
266      || funct_state_vec.length () <= (unsigned int)node->uid)
267    return false;
268  return funct_state_vec[node->uid] != NULL;
269}
270
271/* Return the function state from NODE.  */
272
273static inline funct_state
274get_function_state (struct cgraph_node *node)
275{
276  if (!funct_state_vec.exists ()
277      || funct_state_vec.length () <= (unsigned int)node->uid
278      || !funct_state_vec[node->uid])
279    /* We might want to put correct previously_known state into varying.  */
280    return &varying_state;
281 return funct_state_vec[node->uid];
282}
283
284/* Set the function state S for NODE.  */
285
286static inline void
287set_function_state (struct cgraph_node *node, funct_state s)
288{
289  if (!funct_state_vec.exists ()
290      || funct_state_vec.length () <= (unsigned int)node->uid)
291     funct_state_vec.safe_grow_cleared (node->uid + 1);
292  funct_state_vec[node->uid] = s;
293}
294
295/* Check to see if the use (or definition when CHECKING_WRITE is true)
296   variable T is legal in a function that is either pure or const.  */
297
298static inline void
299check_decl (funct_state local,
300	    tree t, bool checking_write, bool ipa)
301{
302  /* Do not want to do anything with volatile except mark any
303     function that uses one to be not const or pure.  */
304  if (TREE_THIS_VOLATILE (t))
305    {
306      local->pure_const_state = IPA_NEITHER;
307      if (dump_file)
308        fprintf (dump_file, "    Volatile operand is not const/pure");
309      return;
310    }
311
312  /* Do not care about a local automatic that is not static.  */
313  if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
314    return;
315
316  /* If the variable has the "used" attribute, treat it as if it had a
317     been touched by the devil.  */
318  if (DECL_PRESERVE_P (t))
319    {
320      local->pure_const_state = IPA_NEITHER;
321      if (dump_file)
322        fprintf (dump_file, "    Used static/global variable is not const/pure\n");
323      return;
324    }
325
326  /* In IPA mode we are not interested in checking actual loads and stores;
327     they will be processed at propagation time using ipa_ref.  */
328  if (ipa)
329    return;
330
331  /* Since we have dealt with the locals and params cases above, if we
332     are CHECKING_WRITE, this cannot be a pure or constant
333     function.  */
334  if (checking_write)
335    {
336      local->pure_const_state = IPA_NEITHER;
337      if (dump_file)
338        fprintf (dump_file, "    static/global memory write is not const/pure\n");
339      return;
340    }
341
342  if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
343    {
344      /* Readonly reads are safe.  */
345      if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
346	return; /* Read of a constant, do not change the function state.  */
347      else
348	{
349          if (dump_file)
350            fprintf (dump_file, "    global memory read is not const\n");
351	  /* Just a regular read.  */
352	  if (local->pure_const_state == IPA_CONST)
353	    local->pure_const_state = IPA_PURE;
354	}
355    }
356  else
357    {
358      /* Compilation level statics can be read if they are readonly
359	 variables.  */
360      if (TREE_READONLY (t))
361	return;
362
363      if (dump_file)
364	fprintf (dump_file, "    static memory read is not const\n");
365      /* Just a regular read.  */
366      if (local->pure_const_state == IPA_CONST)
367	local->pure_const_state = IPA_PURE;
368    }
369}
370
371
372/* Check to see if the use (or definition when CHECKING_WRITE is true)
373   variable T is legal in a function that is either pure or const.  */
374
375static inline void
376check_op (funct_state local, tree t, bool checking_write)
377{
378  t = get_base_address (t);
379  if (t && TREE_THIS_VOLATILE (t))
380    {
381      local->pure_const_state = IPA_NEITHER;
382      if (dump_file)
383	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
384      return;
385    }
386  else if (t
387  	   && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
388	   && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
389	   && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
390    {
391      if (dump_file)
392	fprintf (dump_file, "    Indirect ref to local memory is OK\n");
393      return;
394    }
395  else if (checking_write)
396    {
397      local->pure_const_state = IPA_NEITHER;
398      if (dump_file)
399	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
400      return;
401    }
402  else
403    {
404      if (dump_file)
405	fprintf (dump_file, "    Indirect ref read is not const\n");
406      if (local->pure_const_state == IPA_CONST)
407	local->pure_const_state = IPA_PURE;
408    }
409}
410
411/* compute state based on ECF FLAGS and store to STATE and LOOPING.  */
412
413static void
414state_from_flags (enum pure_const_state_e *state, bool *looping,
415	          int flags, bool cannot_lead_to_return)
416{
417  *looping = false;
418  if (flags & ECF_LOOPING_CONST_OR_PURE)
419    {
420      *looping = true;
421      if (dump_file && (dump_flags & TDF_DETAILS))
422	fprintf (dump_file, " looping");
423    }
424  if (flags & ECF_CONST)
425    {
426      *state = IPA_CONST;
427      if (dump_file && (dump_flags & TDF_DETAILS))
428	fprintf (dump_file, " const\n");
429    }
430  else if (flags & ECF_PURE)
431    {
432      *state = IPA_PURE;
433      if (dump_file && (dump_flags & TDF_DETAILS))
434	fprintf (dump_file, " pure\n");
435    }
436  else if (cannot_lead_to_return)
437    {
438      *state = IPA_PURE;
439      *looping = true;
440      if (dump_file && (dump_flags & TDF_DETAILS))
441	fprintf (dump_file, " ignoring side effects->pure looping\n");
442    }
443  else
444    {
445      if (dump_file && (dump_flags & TDF_DETAILS))
446	fprintf (dump_file, " neither\n");
447      *state = IPA_NEITHER;
448      *looping = true;
449    }
450}
451
452/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
453   into STATE and LOOPING better of the two variants.
454   Be sure to merge looping correctly.  IPA_NEITHER functions
455   have looping 0 even if they don't have to return.  */
456
457static inline void
458better_state (enum pure_const_state_e *state, bool *looping,
459	      enum pure_const_state_e state2, bool looping2)
460{
461  if (state2 < *state)
462    {
463      if (*state == IPA_NEITHER)
464	*looping = looping2;
465      else
466	*looping = MIN (*looping, looping2);
467      *state = state2;
468    }
469  else if (state2 != IPA_NEITHER)
470    *looping = MIN (*looping, looping2);
471}
472
473/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
474   into STATE and LOOPING worse of the two variants.  */
475
476static inline void
477worse_state (enum pure_const_state_e *state, bool *looping,
478	     enum pure_const_state_e state2, bool looping2)
479{
480  *state = MAX (*state, state2);
481  *looping = MAX (*looping, looping2);
482}
483
484/* Recognize special cases of builtins that are by themselves not pure or const
485   but function using them is.  */
486static bool
487special_builtin_state (enum pure_const_state_e *state, bool *looping,
488			tree callee)
489{
490  if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
491    switch (DECL_FUNCTION_CODE (callee))
492      {
493	case BUILT_IN_RETURN:
494	case BUILT_IN_UNREACHABLE:
495	case BUILT_IN_ALLOCA:
496	case BUILT_IN_ALLOCA_WITH_ALIGN:
497	case BUILT_IN_STACK_SAVE:
498	case BUILT_IN_STACK_RESTORE:
499	case BUILT_IN_EH_POINTER:
500	case BUILT_IN_EH_FILTER:
501	case BUILT_IN_UNWIND_RESUME:
502	case BUILT_IN_CXA_END_CLEANUP:
503	case BUILT_IN_EH_COPY_VALUES:
504	case BUILT_IN_FRAME_ADDRESS:
505	case BUILT_IN_APPLY:
506	case BUILT_IN_APPLY_ARGS:
507	  *looping = false;
508	  *state = IPA_CONST;
509	  return true;
510	case BUILT_IN_PREFETCH:
511	  *looping = true;
512	  *state = IPA_CONST;
513	  return true;
514	default:
515	  break;
516      }
517  return false;
518}
519
520/* Check the parameters of a function call to CALL_EXPR to see if
521   there are any references in the parameters that are not allowed for
522   pure or const functions.  Also check to see if this is either an
523   indirect call, a call outside the compilation unit, or has special
524   attributes that may also effect the purity.  The CALL_EXPR node for
525   the entire call expression.  */
526
527static void
528check_call (funct_state local, gcall *call, bool ipa)
529{
530  int flags = gimple_call_flags (call);
531  tree callee_t = gimple_call_fndecl (call);
532  bool possibly_throws = stmt_could_throw_p (call);
533  bool possibly_throws_externally = (possibly_throws
534  				     && stmt_can_throw_external (call));
535
536  if (possibly_throws)
537    {
538      unsigned int i;
539      for (i = 0; i < gimple_num_ops (call); i++)
540        if (gimple_op (call, i)
541	    && tree_could_throw_p (gimple_op (call, i)))
542	  {
543	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
544	      {
545		if (dump_file)
546		  fprintf (dump_file, "    operand can throw; looping\n");
547		local->looping = true;
548	      }
549	    if (possibly_throws_externally)
550	      {
551		if (dump_file)
552		  fprintf (dump_file, "    operand can throw externally\n");
553		local->can_throw = true;
554	      }
555	  }
556    }
557
558  /* The const and pure flags are set by a variety of places in the
559     compiler (including here).  If someone has already set the flags
560     for the callee, (such as for some of the builtins) we will use
561     them, otherwise we will compute our own information.
562
563     Const and pure functions have less clobber effects than other
564     functions so we process these first.  Otherwise if it is a call
565     outside the compilation unit or an indirect call we punt.  This
566     leaves local calls which will be processed by following the call
567     graph.  */
568  if (callee_t)
569    {
570      enum pure_const_state_e call_state;
571      bool call_looping;
572
573      if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
574	  && !nonfreeing_call_p (call))
575	local->can_free = true;
576
577      if (special_builtin_state (&call_state, &call_looping, callee_t))
578	{
579	  worse_state (&local->pure_const_state, &local->looping,
580		       call_state, call_looping);
581	  return;
582	}
583      /* When bad things happen to bad functions, they cannot be const
584	 or pure.  */
585      if (setjmp_call_p (callee_t))
586	{
587	  if (dump_file)
588	    fprintf (dump_file, "    setjmp is not const/pure\n");
589          local->looping = true;
590	  local->pure_const_state = IPA_NEITHER;
591	}
592
593      if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
594	switch (DECL_FUNCTION_CODE (callee_t))
595	  {
596	  case BUILT_IN_LONGJMP:
597	  case BUILT_IN_NONLOCAL_GOTO:
598	    if (dump_file)
599	      fprintf (dump_file, "    longjmp and nonlocal goto is not const/pure\n");
600	    local->pure_const_state = IPA_NEITHER;
601            local->looping = true;
602	    break;
603	  default:
604	    break;
605	  }
606    }
607  else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
608    local->can_free = true;
609
610  /* When not in IPA mode, we can still handle self recursion.  */
611  if (!ipa && callee_t
612      && recursive_call_p (current_function_decl, callee_t))
613    {
614      if (dump_file)
615        fprintf (dump_file, "    Recursive call can loop.\n");
616      local->looping = true;
617    }
618  /* Either callee is unknown or we are doing local analysis.
619     Look to see if there are any bits available for the callee (such as by
620     declaration or because it is builtin) and process solely on the basis of
621     those bits. */
622  else if (!ipa)
623    {
624      enum pure_const_state_e call_state;
625      bool call_looping;
626      if (possibly_throws && cfun->can_throw_non_call_exceptions)
627        {
628	  if (dump_file)
629	    fprintf (dump_file, "    can throw; looping\n");
630          local->looping = true;
631	}
632      if (possibly_throws_externally)
633        {
634	  if (dump_file)
635	    {
636	      fprintf (dump_file, "    can throw externally to lp %i\n",
637	      	       lookup_stmt_eh_lp (call));
638	      if (callee_t)
639		fprintf (dump_file, "     callee:%s\n",
640			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
641	    }
642          local->can_throw = true;
643	}
644      if (dump_file && (dump_flags & TDF_DETAILS))
645	fprintf (dump_file, "    checking flags for call:");
646      state_from_flags (&call_state, &call_looping, flags,
647			((flags & (ECF_NORETURN | ECF_NOTHROW))
648			 == (ECF_NORETURN | ECF_NOTHROW))
649			|| (!flag_exceptions && (flags & ECF_NORETURN)));
650      worse_state (&local->pure_const_state, &local->looping,
651		   call_state, call_looping);
652    }
653  /* Direct functions calls are handled by IPA propagation.  */
654}
655
656/* Wrapper around check_decl for loads in local more.  */
657
658static bool
659check_load (gimple, tree op, tree, void *data)
660{
661  if (DECL_P (op))
662    check_decl ((funct_state)data, op, false, false);
663  else
664    check_op ((funct_state)data, op, false);
665  return false;
666}
667
668/* Wrapper around check_decl for stores in local more.  */
669
670static bool
671check_store (gimple, tree op, tree, void *data)
672{
673  if (DECL_P (op))
674    check_decl ((funct_state)data, op, true, false);
675  else
676    check_op ((funct_state)data, op, true);
677  return false;
678}
679
680/* Wrapper around check_decl for loads in ipa mode.  */
681
682static bool
683check_ipa_load (gimple, tree op, tree, void *data)
684{
685  if (DECL_P (op))
686    check_decl ((funct_state)data, op, false, true);
687  else
688    check_op ((funct_state)data, op, false);
689  return false;
690}
691
692/* Wrapper around check_decl for stores in ipa mode.  */
693
694static bool
695check_ipa_store (gimple, tree op, tree, void *data)
696{
697  if (DECL_P (op))
698    check_decl ((funct_state)data, op, true, true);
699  else
700    check_op ((funct_state)data, op, true);
701  return false;
702}
703
704/* Look into pointer pointed to by GSIP and figure out what interesting side
705   effects it has.  */
706static void
707check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
708{
709  gimple stmt = gsi_stmt (*gsip);
710
711  if (is_gimple_debug (stmt))
712    return;
713
714  /* Do consider clobber as side effects before IPA, so we rather inline
715     C++ destructors and keep clobber semantics than eliminate them.
716
717     TODO: We may get smarter during early optimizations on these and let
718     functions containing only clobbers to be optimized more.  This is a common
719     case of C++ destructors.  */
720
721  if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
722    return;
723
724  if (dump_file)
725    {
726      fprintf (dump_file, "  scanning: ");
727      print_gimple_stmt (dump_file, stmt, 0, 0);
728    }
729
730  if (gimple_has_volatile_ops (stmt)
731      && !gimple_clobber_p (stmt))
732    {
733      local->pure_const_state = IPA_NEITHER;
734      if (dump_file)
735	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
736    }
737
738  /* Look for loads and stores.  */
739  walk_stmt_load_store_ops (stmt, local,
740			    ipa ? check_ipa_load : check_load,
741			    ipa ? check_ipa_store :  check_store);
742
743  if (gimple_code (stmt) != GIMPLE_CALL
744      && stmt_could_throw_p (stmt))
745    {
746      if (cfun->can_throw_non_call_exceptions)
747	{
748	  if (dump_file)
749	    fprintf (dump_file, "    can throw; looping\n");
750	  local->looping = true;
751	}
752      if (stmt_can_throw_external (stmt))
753	{
754	  if (dump_file)
755	    fprintf (dump_file, "    can throw externally\n");
756	  local->can_throw = true;
757	}
758      else
759	if (dump_file)
760	  fprintf (dump_file, "    can throw\n");
761    }
762  switch (gimple_code (stmt))
763    {
764    case GIMPLE_CALL:
765      check_call (local, as_a <gcall *> (stmt), ipa);
766      break;
767    case GIMPLE_LABEL:
768      if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
769	/* Target of long jump. */
770	{
771          if (dump_file)
772            fprintf (dump_file, "    nonlocal label is not const/pure\n");
773	  local->pure_const_state = IPA_NEITHER;
774	}
775      break;
776    case GIMPLE_ASM:
777      if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
778	{
779	  if (dump_file)
780	    fprintf (dump_file, "    memory asm clobber is not const/pure\n");
781	  /* Abandon all hope, ye who enter here. */
782	  local->pure_const_state = IPA_NEITHER;
783	  local->can_free = true;
784	}
785      if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
786	{
787	  if (dump_file)
788	    fprintf (dump_file, "    volatile is not const/pure\n");
789	  /* Abandon all hope, ye who enter here. */
790	  local->pure_const_state = IPA_NEITHER;
791	  local->looping = true;
792	  local->can_free = true;
793	}
794      return;
795    default:
796      break;
797    }
798}
799
800
801/* This is the main routine for finding the reference patterns for
802   global variables within a function FN.  */
803
804static funct_state
805analyze_function (struct cgraph_node *fn, bool ipa)
806{
807  tree decl = fn->decl;
808  funct_state l;
809  basic_block this_block;
810
811  l = XCNEW (struct funct_state_d);
812  l->pure_const_state = IPA_CONST;
813  l->state_previously_known = IPA_NEITHER;
814  l->looping_previously_known = true;
815  l->looping = false;
816  l->can_throw = false;
817  l->can_free = false;
818  state_from_flags (&l->state_previously_known, &l->looping_previously_known,
819		    flags_from_decl_or_type (fn->decl),
820		    fn->cannot_return_p ());
821
822  if (fn->thunk.thunk_p || fn->alias)
823    {
824      /* Thunk gets propagated through, so nothing interesting happens.  */
825      gcc_assert (ipa);
826      if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
827	l->pure_const_state = IPA_NEITHER;
828      return l;
829    }
830
831  if (dump_file)
832    {
833      fprintf (dump_file, "\n\n local analysis of %s\n ",
834	       fn->name ());
835    }
836
837  push_cfun (DECL_STRUCT_FUNCTION (decl));
838
839  FOR_EACH_BB_FN (this_block, cfun)
840    {
841      gimple_stmt_iterator gsi;
842      struct walk_stmt_info wi;
843
844      memset (&wi, 0, sizeof (wi));
845      for (gsi = gsi_start_bb (this_block);
846	   !gsi_end_p (gsi);
847	   gsi_next (&gsi))
848	{
849	  check_stmt (&gsi, l, ipa);
850	  if (l->pure_const_state == IPA_NEITHER
851	      && l->looping
852	      && l->can_throw
853	      && l->can_free)
854	    goto end;
855	}
856    }
857
858end:
859  if (l->pure_const_state != IPA_NEITHER)
860    {
861      /* Const functions cannot have back edges (an
862	 indication of possible infinite loop side
863	 effect.  */
864      if (mark_dfs_back_edges ())
865        {
866	  /* Preheaders are needed for SCEV to work.
867	     Simple latches and recorded exits improve chances that loop will
868	     proved to be finite in testcases such as in loop-15.c
869	     and loop-24.c  */
870	  loop_optimizer_init (LOOPS_HAVE_PREHEADERS
871			       | LOOPS_HAVE_SIMPLE_LATCHES
872			       | LOOPS_HAVE_RECORDED_EXITS);
873	  if (dump_file && (dump_flags & TDF_DETAILS))
874	    flow_loops_dump (dump_file, NULL, 0);
875	  if (mark_irreducible_loops ())
876	    {
877	      if (dump_file)
878	        fprintf (dump_file, "    has irreducible loops\n");
879	      l->looping = true;
880	    }
881	  else
882	    {
883	      struct loop *loop;
884	      scev_initialize ();
885	      FOR_EACH_LOOP (loop, 0)
886		if (!finite_loop_p (loop))
887		  {
888		    if (dump_file)
889		      fprintf (dump_file, "    can not prove finiteness of "
890			       "loop %i\n", loop->num);
891		    l->looping =true;
892		    break;
893		  }
894	      scev_finalize ();
895	    }
896          loop_optimizer_finalize ();
897	}
898    }
899
900  if (dump_file && (dump_flags & TDF_DETAILS))
901    fprintf (dump_file, "    checking previously known:");
902
903  better_state (&l->pure_const_state, &l->looping,
904		l->state_previously_known,
905		l->looping_previously_known);
906  if (TREE_NOTHROW (decl))
907    l->can_throw = false;
908
909  pop_cfun ();
910  if (dump_file)
911    {
912      if (l->looping)
913        fprintf (dump_file, "Function is locally looping.\n");
914      if (l->can_throw)
915        fprintf (dump_file, "Function is locally throwing.\n");
916      if (l->pure_const_state == IPA_CONST)
917        fprintf (dump_file, "Function is locally const.\n");
918      if (l->pure_const_state == IPA_PURE)
919        fprintf (dump_file, "Function is locally pure.\n");
920      if (l->can_free)
921	fprintf (dump_file, "Function can locally free.\n");
922    }
923  return l;
924}
925
926/* Called when new function is inserted to callgraph late.  */
927static void
928add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
929{
930 if (node->get_availability () < AVAIL_INTERPOSABLE)
931   return;
932  /* There are some shared nodes, in particular the initializers on
933     static declarations.  We do not need to scan them more than once
934     since all we would be interested in are the addressof
935     operations.  */
936  if (node->get_availability () > AVAIL_INTERPOSABLE
937      && opt_for_fn (node->decl, flag_ipa_pure_const))
938    set_function_state (node, analyze_function (node, true));
939}
940
941/* Called when new clone is inserted to callgraph late.  */
942
943static void
944duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
945	 	     void *data ATTRIBUTE_UNUSED)
946{
947  if (has_function_state (src))
948    {
949      funct_state l = XNEW (struct funct_state_d);
950      gcc_assert (!has_function_state (dst));
951      memcpy (l, get_function_state (src), sizeof (*l));
952      set_function_state (dst, l);
953    }
954}
955
956/* Called when new clone is inserted to callgraph late.  */
957
958static void
959remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
960{
961  if (has_function_state (node))
962    {
963      funct_state l = get_function_state (node);
964      if (l != &varying_state)
965        free (l);
966      set_function_state (node, NULL);
967    }
968}
969
970
971void
972pass_ipa_pure_const::
973register_hooks (void)
974{
975  if (init_p)
976    return;
977
978  init_p = true;
979
980  node_removal_hook_holder =
981      symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
982  node_duplication_hook_holder =
983      symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
984  function_insertion_hook_holder =
985      symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
986}
987
988
989/* Analyze each function in the cgraph to see if it is locally PURE or
990   CONST.  */
991
992static void
993pure_const_generate_summary (void)
994{
995  struct cgraph_node *node;
996
997  pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
998  pass->register_hooks ();
999
1000  /* Process all of the functions.
1001
1002     We process AVAIL_INTERPOSABLE functions.  We can not use the results
1003     by default, but the info can be used at LTO with -fwhole-program or
1004     when function got cloned and the clone is AVAILABLE.  */
1005
1006  FOR_EACH_DEFINED_FUNCTION (node)
1007    if (node->get_availability () >= AVAIL_INTERPOSABLE
1008        && opt_for_fn (node->decl, flag_ipa_pure_const))
1009      set_function_state (node, analyze_function (node, true));
1010}
1011
1012
1013/* Serialize the ipa info for lto.  */
1014
1015static void
1016pure_const_write_summary (void)
1017{
1018  struct cgraph_node *node;
1019  struct lto_simple_output_block *ob
1020    = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1021  unsigned int count = 0;
1022  lto_symtab_encoder_iterator lsei;
1023  lto_symtab_encoder_t encoder;
1024
1025  encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1026
1027  for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1028       lsei_next_function_in_partition (&lsei))
1029    {
1030      node = lsei_cgraph_node (lsei);
1031      if (node->definition && has_function_state (node))
1032	count++;
1033    }
1034
1035  streamer_write_uhwi_stream (ob->main_stream, count);
1036
1037  /* Process all of the functions.  */
1038  for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1039       lsei_next_function_in_partition (&lsei))
1040    {
1041      node = lsei_cgraph_node (lsei);
1042      if (node->definition && has_function_state (node))
1043	{
1044	  struct bitpack_d bp;
1045	  funct_state fs;
1046	  int node_ref;
1047	  lto_symtab_encoder_t encoder;
1048
1049	  fs = get_function_state (node);
1050
1051	  encoder = ob->decl_state->symtab_node_encoder;
1052	  node_ref = lto_symtab_encoder_encode (encoder, node);
1053	  streamer_write_uhwi_stream (ob->main_stream, node_ref);
1054
1055	  /* Note that flags will need to be read in the opposite
1056	     order as we are pushing the bitflags into FLAGS.  */
1057	  bp = bitpack_create (ob->main_stream);
1058	  bp_pack_value (&bp, fs->pure_const_state, 2);
1059	  bp_pack_value (&bp, fs->state_previously_known, 2);
1060	  bp_pack_value (&bp, fs->looping_previously_known, 1);
1061	  bp_pack_value (&bp, fs->looping, 1);
1062	  bp_pack_value (&bp, fs->can_throw, 1);
1063	  bp_pack_value (&bp, fs->can_free, 1);
1064	  streamer_write_bitpack (&bp);
1065	}
1066    }
1067
1068  lto_destroy_simple_output_block (ob);
1069}
1070
1071
1072/* Deserialize the ipa info for lto.  */
1073
1074static void
1075pure_const_read_summary (void)
1076{
1077  struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1078  struct lto_file_decl_data *file_data;
1079  unsigned int j = 0;
1080
1081  pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1082  pass->register_hooks ();
1083
1084  while ((file_data = file_data_vec[j++]))
1085    {
1086      const char *data;
1087      size_t len;
1088      struct lto_input_block *ib
1089	= lto_create_simple_input_block (file_data,
1090					 LTO_section_ipa_pure_const,
1091					 &data, &len);
1092      if (ib)
1093	{
1094	  unsigned int i;
1095	  unsigned int count = streamer_read_uhwi (ib);
1096
1097	  for (i = 0; i < count; i++)
1098	    {
1099	      unsigned int index;
1100	      struct cgraph_node *node;
1101	      struct bitpack_d bp;
1102	      funct_state fs;
1103	      lto_symtab_encoder_t encoder;
1104
1105	      fs = XCNEW (struct funct_state_d);
1106	      index = streamer_read_uhwi (ib);
1107	      encoder = file_data->symtab_node_encoder;
1108	      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1109									index));
1110	      set_function_state (node, fs);
1111
1112	      /* Note that the flags must be read in the opposite
1113		 order in which they were written (the bitflags were
1114		 pushed into FLAGS).  */
1115	      bp = streamer_read_bitpack (ib);
1116	      fs->pure_const_state
1117			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1118	      fs->state_previously_known
1119			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1120	      fs->looping_previously_known = bp_unpack_value (&bp, 1);
1121	      fs->looping = bp_unpack_value (&bp, 1);
1122	      fs->can_throw = bp_unpack_value (&bp, 1);
1123	      fs->can_free = bp_unpack_value (&bp, 1);
1124	      if (dump_file)
1125		{
1126		  int flags = flags_from_decl_or_type (node->decl);
1127		  fprintf (dump_file, "Read info for %s/%i ",
1128			   node->name (),
1129			   node->order);
1130		  if (flags & ECF_CONST)
1131		    fprintf (dump_file, " const");
1132		  if (flags & ECF_PURE)
1133		    fprintf (dump_file, " pure");
1134		  if (flags & ECF_NOTHROW)
1135		    fprintf (dump_file, " nothrow");
1136		  fprintf (dump_file, "\n  pure const state: %s\n",
1137			   pure_const_names[fs->pure_const_state]);
1138		  fprintf (dump_file, "  previously known state: %s\n",
1139			   pure_const_names[fs->looping_previously_known]);
1140		  if (fs->looping)
1141		    fprintf (dump_file,"  function is locally looping\n");
1142		  if (fs->looping_previously_known)
1143		    fprintf (dump_file,"  function is previously known looping\n");
1144		  if (fs->can_throw)
1145		    fprintf (dump_file,"  function is locally throwing\n");
1146		  if (fs->can_free)
1147		    fprintf (dump_file,"  function can locally free\n");
1148		}
1149	    }
1150
1151	  lto_destroy_simple_input_block (file_data,
1152					  LTO_section_ipa_pure_const,
1153					  ib, data, len);
1154	}
1155    }
1156}
1157
1158
1159static bool
1160ignore_edge (struct cgraph_edge *e)
1161{
1162  return (!e->can_throw_external);
1163}
1164
1165/* Return true if NODE is self recursive function.
1166   Indirectly recursive functions appears as non-trivial strongly
1167   connected components, so we need to care about self recursion
1168   only.  */
1169
1170static bool
1171self_recursive_p (struct cgraph_node *node)
1172{
1173  struct cgraph_edge *e;
1174  for (e = node->callees; e; e = e->next_callee)
1175    if (e->callee->function_symbol () == node)
1176      return true;
1177  return false;
1178}
1179
1180/* Return true if N is cdtor that is not const or pure.  In this case we may
1181   need to remove unreachable function if it is marked const/pure.  */
1182
1183static bool
1184cdtor_p (cgraph_node *n, void *)
1185{
1186  if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1187    return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
1188  return false;
1189}
1190
1191/* Produce transitive closure over the callgraph and compute pure/const
1192   attributes.  */
1193
1194static bool
1195propagate_pure_const (void)
1196{
1197  struct cgraph_node *node;
1198  struct cgraph_node *w;
1199  struct cgraph_node **order =
1200    XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1201  int order_pos;
1202  int i;
1203  struct ipa_dfs_info * w_info;
1204  bool remove_p = false;
1205
1206  order_pos = ipa_reduced_postorder (order, true, false, NULL);
1207  if (dump_file)
1208    {
1209      cgraph_node::dump_cgraph (dump_file);
1210      ipa_print_order (dump_file, "reduced", order, order_pos);
1211    }
1212
1213  /* Propagate the local information through the call graph to produce
1214     the global information.  All the nodes within a cycle will have
1215     the same info so we collapse cycles first.  Then we can do the
1216     propagation in one pass from the leaves to the roots.  */
1217  for (i = 0; i < order_pos; i++ )
1218    {
1219      enum pure_const_state_e pure_const_state = IPA_CONST;
1220      bool looping = false;
1221      int count = 0;
1222      node = order[i];
1223
1224      if (node->alias)
1225	continue;
1226
1227      if (dump_file && (dump_flags & TDF_DETAILS))
1228	fprintf (dump_file, "Starting cycle\n");
1229
1230      /* Find the worst state for any node in the cycle.  */
1231      w = node;
1232      while (w && pure_const_state != IPA_NEITHER)
1233	{
1234	  struct cgraph_edge *e;
1235	  struct cgraph_edge *ie;
1236	  int i;
1237	  struct ipa_ref *ref = NULL;
1238
1239	  funct_state w_l = get_function_state (w);
1240	  if (dump_file && (dump_flags & TDF_DETAILS))
1241	    fprintf (dump_file, "  Visiting %s/%i state:%s looping %i\n",
1242		     w->name (),
1243		     w->order,
1244		     pure_const_names[w_l->pure_const_state],
1245		     w_l->looping);
1246
1247	  /* First merge in function body properties.  */
1248	  worse_state (&pure_const_state, &looping,
1249		       w_l->pure_const_state, w_l->looping);
1250	  if (pure_const_state == IPA_NEITHER)
1251	    break;
1252
1253	  /* For overwritable nodes we can not assume anything.  */
1254	  if (w->get_availability () == AVAIL_INTERPOSABLE)
1255	    {
1256	      worse_state (&pure_const_state, &looping,
1257			   w_l->state_previously_known,
1258			   w_l->looping_previously_known);
1259	      if (dump_file && (dump_flags & TDF_DETAILS))
1260		{
1261		  fprintf (dump_file,
1262			   "    Overwritable. state %s looping %i\n",
1263			   pure_const_names[w_l->state_previously_known],
1264			   w_l->looping_previously_known);
1265		}
1266	      break;
1267	    }
1268
1269	  count++;
1270
1271	  /* We consider recursive cycles as possibly infinite.
1272	     This might be relaxed since infinite recursion leads to stack
1273	     overflow.  */
1274	  if (count > 1)
1275	    looping = true;
1276
1277	  /* Now walk the edges and merge in callee properties.  */
1278	  for (e = w->callees; e; e = e->next_callee)
1279	    {
1280	      enum availability avail;
1281	      struct cgraph_node *y = e->callee->
1282				function_or_virtual_thunk_symbol (&avail);
1283	      enum pure_const_state_e edge_state = IPA_CONST;
1284	      bool edge_looping = false;
1285
1286	      if (dump_file && (dump_flags & TDF_DETAILS))
1287		{
1288		  fprintf (dump_file,
1289			   "    Call to %s/%i",
1290			   e->callee->name (),
1291			   e->callee->order);
1292		}
1293	      if (avail > AVAIL_INTERPOSABLE)
1294		{
1295		  funct_state y_l = get_function_state (y);
1296		  if (dump_file && (dump_flags & TDF_DETAILS))
1297		    {
1298		      fprintf (dump_file,
1299			       " state:%s looping:%i\n",
1300			       pure_const_names[y_l->pure_const_state],
1301			       y_l->looping);
1302		    }
1303		  if (y_l->pure_const_state > IPA_PURE
1304		      && e->cannot_lead_to_return_p ())
1305		    {
1306		      if (dump_file && (dump_flags & TDF_DETAILS))
1307			fprintf (dump_file,
1308				 "        Ignoring side effects"
1309				 " -> pure, looping\n");
1310		      edge_state = IPA_PURE;
1311		      edge_looping = true;
1312		    }
1313		  else
1314		    {
1315		      edge_state = y_l->pure_const_state;
1316		      edge_looping = y_l->looping;
1317		    }
1318		}
1319	      else if (special_builtin_state (&edge_state, &edge_looping,
1320					       y->decl))
1321		;
1322	      else
1323		state_from_flags (&edge_state, &edge_looping,
1324				  flags_from_decl_or_type (y->decl),
1325				  e->cannot_lead_to_return_p ());
1326
1327	      /* Merge the results with what we already know.  */
1328	      better_state (&edge_state, &edge_looping,
1329			    w_l->state_previously_known,
1330			    w_l->looping_previously_known);
1331	      worse_state (&pure_const_state, &looping,
1332			   edge_state, edge_looping);
1333	      if (pure_const_state == IPA_NEITHER)
1334	        break;
1335	    }
1336	  if (pure_const_state == IPA_NEITHER)
1337	    break;
1338
1339	  /* Now process the indirect call.  */
1340          for (ie = w->indirect_calls; ie; ie = ie->next_callee)
1341	    {
1342	      enum pure_const_state_e edge_state = IPA_CONST;
1343	      bool edge_looping = false;
1344
1345	      if (dump_file && (dump_flags & TDF_DETAILS))
1346		fprintf (dump_file, "    Indirect call");
1347	      state_from_flags (&edge_state, &edge_looping,
1348			        ie->indirect_info->ecf_flags,
1349				ie->cannot_lead_to_return_p ());
1350	      /* Merge the results with what we already know.  */
1351	      better_state (&edge_state, &edge_looping,
1352			    w_l->state_previously_known,
1353			    w_l->looping_previously_known);
1354	      worse_state (&pure_const_state, &looping,
1355			   edge_state, edge_looping);
1356	      if (pure_const_state == IPA_NEITHER)
1357	        break;
1358	    }
1359	  if (pure_const_state == IPA_NEITHER)
1360	    break;
1361
1362	  /* And finally all loads and stores.  */
1363	  for (i = 0; w->iterate_reference (i, ref); i++)
1364	    {
1365	      enum pure_const_state_e ref_state = IPA_CONST;
1366	      bool ref_looping = false;
1367	      switch (ref->use)
1368		{
1369		case IPA_REF_LOAD:
1370		  /* readonly reads are safe.  */
1371		  if (TREE_READONLY (ref->referred->decl))
1372		    break;
1373		  if (dump_file && (dump_flags & TDF_DETAILS))
1374		    fprintf (dump_file, "    nonreadonly global var read\n");
1375		  ref_state = IPA_PURE;
1376		  break;
1377		case IPA_REF_STORE:
1378		  if (ref->cannot_lead_to_return ())
1379		    break;
1380		  ref_state = IPA_NEITHER;
1381		  if (dump_file && (dump_flags & TDF_DETAILS))
1382		    fprintf (dump_file, "    global var write\n");
1383		  break;
1384		case IPA_REF_ADDR:
1385		case IPA_REF_CHKP:
1386		  break;
1387		default:
1388		  gcc_unreachable ();
1389		}
1390	      better_state (&ref_state, &ref_looping,
1391			    w_l->state_previously_known,
1392			    w_l->looping_previously_known);
1393	      worse_state (&pure_const_state, &looping,
1394			   ref_state, ref_looping);
1395	      if (pure_const_state == IPA_NEITHER)
1396		break;
1397	    }
1398	  w_info = (struct ipa_dfs_info *) w->aux;
1399	  w = w_info->next_cycle;
1400	}
1401      if (dump_file && (dump_flags & TDF_DETAILS))
1402	fprintf (dump_file, "Result %s looping %i\n",
1403		 pure_const_names [pure_const_state],
1404		 looping);
1405
1406      /* Find the worst state of can_free for any node in the cycle.  */
1407      bool can_free = false;
1408      w = node;
1409      while (w && !can_free)
1410	{
1411	  struct cgraph_edge *e;
1412	  funct_state w_l = get_function_state (w);
1413
1414	  if (w_l->can_free
1415	      || w->get_availability () == AVAIL_INTERPOSABLE
1416	      || w->indirect_calls)
1417	    can_free = true;
1418
1419	  for (e = w->callees; e && !can_free; e = e->next_callee)
1420	    {
1421	      enum availability avail;
1422	      struct cgraph_node *y = e->callee->
1423				function_or_virtual_thunk_symbol (&avail);
1424
1425	      if (avail > AVAIL_INTERPOSABLE)
1426		can_free = get_function_state (y)->can_free;
1427	      else
1428		can_free = true;
1429	    }
1430	  w_info = (struct ipa_dfs_info *) w->aux;
1431	  w = w_info->next_cycle;
1432	}
1433
1434      /* Copy back the region's pure_const_state which is shared by
1435	 all nodes in the region.  */
1436      w = node;
1437      while (w)
1438	{
1439	  funct_state w_l = get_function_state (w);
1440	  enum pure_const_state_e this_state = pure_const_state;
1441	  bool this_looping = looping;
1442
1443	  w_l->can_free = can_free;
1444	  w->nonfreeing_fn = !can_free;
1445	  if (!can_free && dump_file)
1446	    fprintf (dump_file, "Function found not to call free: %s\n",
1447		     w->name ());
1448
1449	  if (w_l->state_previously_known != IPA_NEITHER
1450	      && this_state > w_l->state_previously_known)
1451	    {
1452              this_state = w_l->state_previously_known;
1453	      this_looping |= w_l->looping_previously_known;
1454	    }
1455	  if (!this_looping && self_recursive_p (w))
1456	    this_looping = true;
1457	  if (!w_l->looping_previously_known)
1458	    this_looping = false;
1459
1460	  /* All nodes within a cycle share the same info.  */
1461	  w_l->pure_const_state = this_state;
1462	  w_l->looping = this_looping;
1463
1464	  /* Inline clones share declaration with their offline copies;
1465	     do not modify their declarations since the offline copy may
1466	     be different.  */
1467	  if (!w->global.inlined_to)
1468	    switch (this_state)
1469	      {
1470	      case IPA_CONST:
1471		if (!TREE_READONLY (w->decl))
1472		  {
1473		    warn_function_const (w->decl, !this_looping);
1474		    if (dump_file)
1475		      fprintf (dump_file, "Function found to be %sconst: %s\n",
1476			       this_looping ? "looping " : "",
1477			       w->name ());
1478		  }
1479		remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1480							    NULL, true);
1481		w->set_const_flag (true, this_looping);
1482		break;
1483
1484	      case IPA_PURE:
1485		if (!DECL_PURE_P (w->decl))
1486		  {
1487		    warn_function_pure (w->decl, !this_looping);
1488		    if (dump_file)
1489		      fprintf (dump_file, "Function found to be %spure: %s\n",
1490			       this_looping ? "looping " : "",
1491			       w->name ());
1492		  }
1493		remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1494							    NULL, true);
1495		w->set_pure_flag (true, this_looping);
1496		break;
1497
1498	      default:
1499		break;
1500	      }
1501	  w_info = (struct ipa_dfs_info *) w->aux;
1502	  w = w_info->next_cycle;
1503	}
1504    }
1505
1506  ipa_free_postorder_info ();
1507  free (order);
1508  return remove_p;
1509}
1510
1511/* Produce transitive closure over the callgraph and compute nothrow
1512   attributes.  */
1513
1514static void
1515propagate_nothrow (void)
1516{
1517  struct cgraph_node *node;
1518  struct cgraph_node *w;
1519  struct cgraph_node **order =
1520    XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1521  int order_pos;
1522  int i;
1523  struct ipa_dfs_info * w_info;
1524
1525  order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
1526  if (dump_file)
1527    {
1528      cgraph_node::dump_cgraph (dump_file);
1529      ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1530    }
1531
1532  /* Propagate the local information through the call graph to produce
1533     the global information.  All the nodes within a cycle will have
1534     the same info so we collapse cycles first.  Then we can do the
1535     propagation in one pass from the leaves to the roots.  */
1536  for (i = 0; i < order_pos; i++ )
1537    {
1538      bool can_throw = false;
1539      node = order[i];
1540
1541      if (node->alias)
1542	continue;
1543
1544      /* Find the worst state for any node in the cycle.  */
1545      w = node;
1546      while (w && !can_throw)
1547	{
1548	  struct cgraph_edge *e, *ie;
1549	  funct_state w_l = get_function_state (w);
1550
1551	  if (w_l->can_throw
1552	      || w->get_availability () == AVAIL_INTERPOSABLE)
1553	    can_throw = true;
1554
1555	  for (e = w->callees; e && !can_throw; e = e->next_callee)
1556	    {
1557	      enum availability avail;
1558	      struct cgraph_node *y = e->callee->
1559				function_or_virtual_thunk_symbol (&avail);
1560
1561	      if (avail > AVAIL_INTERPOSABLE)
1562		{
1563		  funct_state y_l = get_function_state (y);
1564
1565		  if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1566		      && e->can_throw_external)
1567		    can_throw = true;
1568		}
1569	      else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1570	        can_throw = true;
1571	    }
1572          for (ie = w->indirect_calls; ie && !can_throw; ie = ie->next_callee)
1573	    if (ie->can_throw_external)
1574	      can_throw = true;
1575	  w_info = (struct ipa_dfs_info *) w->aux;
1576	  w = w_info->next_cycle;
1577	}
1578
1579      /* Copy back the region's pure_const_state which is shared by
1580	 all nodes in the region.  */
1581      w = node;
1582      while (w)
1583	{
1584	  funct_state w_l = get_function_state (w);
1585	  if (!can_throw && !TREE_NOTHROW (w->decl))
1586	    {
1587	      /* Inline clones share declaration with their offline copies;
1588		 do not modify their declarations since the offline copy may
1589		 be different.  */
1590	      if (!w->global.inlined_to)
1591		{
1592		  w->set_nothrow_flag (true);
1593		  if (dump_file)
1594		    fprintf (dump_file, "Function found to be nothrow: %s\n",
1595			     w->name ());
1596		}
1597	    }
1598	  else if (can_throw && !TREE_NOTHROW (w->decl))
1599	    w_l->can_throw = true;
1600	  w_info = (struct ipa_dfs_info *) w->aux;
1601	  w = w_info->next_cycle;
1602	}
1603    }
1604
1605  ipa_free_postorder_info ();
1606  free (order);
1607}
1608
1609
1610/* Produce the global information by preforming a transitive closure
1611   on the local information that was produced by generate_summary.  */
1612
1613unsigned int
1614pass_ipa_pure_const::
1615execute (function *)
1616{
1617  struct cgraph_node *node;
1618  bool remove_p;
1619
1620  symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1621  symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1622  symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
1623
1624  /* Nothrow makes more function to not lead to return and improve
1625     later analysis.  */
1626  propagate_nothrow ();
1627  remove_p = propagate_pure_const ();
1628
1629  /* Cleanup. */
1630  FOR_EACH_FUNCTION (node)
1631    if (has_function_state (node))
1632      free (get_function_state (node));
1633  funct_state_vec.release ();
1634  return remove_p ? TODO_remove_functions : 0;
1635}
1636
1637static bool
1638gate_pure_const (void)
1639{
1640  return flag_ipa_pure_const || in_lto_p;
1641}
1642
1643pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1644    : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1645		     pure_const_generate_summary, /* generate_summary */
1646		     pure_const_write_summary, /* write_summary */
1647		     pure_const_read_summary, /* read_summary */
1648		     NULL, /* write_optimization_summary */
1649		     NULL, /* read_optimization_summary */
1650		     NULL, /* stmt_fixup */
1651		     0, /* function_transform_todo_flags_start */
1652		     NULL, /* function_transform */
1653		     NULL), /* variable_transform */
1654  init_p(false),
1655  function_insertion_hook_holder(NULL),
1656  node_duplication_hook_holder(NULL),
1657  node_removal_hook_holder(NULL)
1658{
1659}
1660
1661ipa_opt_pass_d *
1662make_pass_ipa_pure_const (gcc::context *ctxt)
1663{
1664  return new pass_ipa_pure_const (ctxt);
1665}
1666
1667/* Return true if function should be skipped for local pure const analysis.  */
1668
1669static bool
1670skip_function_for_local_pure_const (struct cgraph_node *node)
1671{
1672  /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1673     we must not promote functions that are called by already processed functions.  */
1674
1675  if (function_called_by_processed_nodes_p ())
1676    {
1677      if (dump_file)
1678        fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1679      return true;
1680    }
1681  if (node->get_availability () <= AVAIL_INTERPOSABLE)
1682    {
1683      if (dump_file)
1684        fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
1685      return true;
1686    }
1687  return false;
1688}
1689
1690/* Simple local pass for pure const discovery reusing the analysis from
1691   ipa_pure_const.   This pass is effective when executed together with
1692   other optimization passes in early optimization pass queue.  */
1693
1694namespace {
1695
1696const pass_data pass_data_local_pure_const =
1697{
1698  GIMPLE_PASS, /* type */
1699  "local-pure-const", /* name */
1700  OPTGROUP_NONE, /* optinfo_flags */
1701  TV_IPA_PURE_CONST, /* tv_id */
1702  0, /* properties_required */
1703  0, /* properties_provided */
1704  0, /* properties_destroyed */
1705  0, /* todo_flags_start */
1706  0, /* todo_flags_finish */
1707};
1708
1709class pass_local_pure_const : public gimple_opt_pass
1710{
1711public:
1712  pass_local_pure_const (gcc::context *ctxt)
1713    : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1714  {}
1715
1716  /* opt_pass methods: */
1717  opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1718  virtual bool gate (function *) { return gate_pure_const (); }
1719  virtual unsigned int execute (function *);
1720
1721}; // class pass_local_pure_const
1722
1723unsigned int
1724pass_local_pure_const::execute (function *fun)
1725{
1726  bool changed = false;
1727  funct_state l;
1728  bool skip;
1729  struct cgraph_node *node;
1730
1731  node = cgraph_node::get (current_function_decl);
1732  skip = skip_function_for_local_pure_const (node);
1733  if (!warn_suggest_attribute_const
1734      && !warn_suggest_attribute_pure
1735      && skip)
1736    return 0;
1737
1738  l = analyze_function (node, false);
1739
1740  /* Do NORETURN discovery.  */
1741  if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1742      && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1743    {
1744      warn_function_noreturn (fun->decl);
1745      if (dump_file)
1746	fprintf (dump_file, "Function found to be noreturn: %s\n",
1747		 current_function_name ());
1748
1749      /* Update declaration and reduce profile to executed once.  */
1750      TREE_THIS_VOLATILE (current_function_decl) = 1;
1751      if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1752	node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1753
1754      changed = true;
1755    }
1756
1757  switch (l->pure_const_state)
1758    {
1759    case IPA_CONST:
1760      if (!TREE_READONLY (current_function_decl))
1761	{
1762	  warn_function_const (current_function_decl, !l->looping);
1763	  if (!skip)
1764	    {
1765	      node->set_const_flag (true, l->looping);
1766	      changed = true;
1767	    }
1768	  if (dump_file)
1769	    fprintf (dump_file, "Function found to be %sconst: %s\n",
1770		     l->looping ? "looping " : "",
1771		     current_function_name ());
1772	}
1773      else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1774	       && !l->looping)
1775	{
1776	  if (!skip)
1777	    {
1778	      node->set_const_flag (true, false);
1779	      changed = true;
1780	    }
1781	  if (dump_file)
1782	    fprintf (dump_file, "Function found to be non-looping: %s\n",
1783		     current_function_name ());
1784	}
1785      break;
1786
1787    case IPA_PURE:
1788      if (!DECL_PURE_P (current_function_decl))
1789	{
1790	  if (!skip)
1791	    {
1792	      node->set_pure_flag (true, l->looping);
1793	      changed = true;
1794	    }
1795	  warn_function_pure (current_function_decl, !l->looping);
1796	  if (dump_file)
1797	    fprintf (dump_file, "Function found to be %spure: %s\n",
1798		     l->looping ? "looping " : "",
1799		     current_function_name ());
1800	}
1801      else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1802	       && !l->looping)
1803	{
1804	  if (!skip)
1805	    {
1806	      node->set_pure_flag (true, false);
1807	      changed = true;
1808	    }
1809	  if (dump_file)
1810	    fprintf (dump_file, "Function found to be non-looping: %s\n",
1811		     current_function_name ());
1812	}
1813      break;
1814
1815    default:
1816      break;
1817    }
1818  if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1819    {
1820      node->set_nothrow_flag (true);
1821      changed = true;
1822      if (dump_file)
1823	fprintf (dump_file, "Function found to be nothrow: %s\n",
1824		 current_function_name ());
1825    }
1826  free (l);
1827  if (changed)
1828    return execute_fixup_cfg ();
1829  else
1830    return 0;
1831}
1832
1833} // anon namespace
1834
1835gimple_opt_pass *
1836make_pass_local_pure_const (gcc::context *ctxt)
1837{
1838  return new pass_local_pure_const (ctxt);
1839}
1840
1841/* Emit noreturn warnings.  */
1842
1843namespace {
1844
1845const pass_data pass_data_warn_function_noreturn =
1846{
1847  GIMPLE_PASS, /* type */
1848  "*warn_function_noreturn", /* name */
1849  OPTGROUP_NONE, /* optinfo_flags */
1850  TV_NONE, /* tv_id */
1851  PROP_cfg, /* properties_required */
1852  0, /* properties_provided */
1853  0, /* properties_destroyed */
1854  0, /* todo_flags_start */
1855  0, /* todo_flags_finish */
1856};
1857
1858class pass_warn_function_noreturn : public gimple_opt_pass
1859{
1860public:
1861  pass_warn_function_noreturn (gcc::context *ctxt)
1862    : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1863  {}
1864
1865  /* opt_pass methods: */
1866  virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
1867  virtual unsigned int execute (function *fun)
1868    {
1869      if (!TREE_THIS_VOLATILE (current_function_decl)
1870	  && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1871	warn_function_noreturn (current_function_decl);
1872      return 0;
1873    }
1874
1875}; // class pass_warn_function_noreturn
1876
1877} // anon namespace
1878
1879gimple_opt_pass *
1880make_pass_warn_function_noreturn (gcc::context *ctxt)
1881{
1882  return new pass_warn_function_noreturn (ctxt);
1883}
1884
1885/* Simple local pass for pure const discovery reusing the analysis from
1886   ipa_pure_const.   This pass is effective when executed together with
1887   other optimization passes in early optimization pass queue.  */
1888
1889namespace {
1890
1891const pass_data pass_data_nothrow =
1892{
1893  GIMPLE_PASS, /* type */
1894  "nothrow", /* name */
1895  OPTGROUP_NONE, /* optinfo_flags */
1896  TV_IPA_PURE_CONST, /* tv_id */
1897  0, /* properties_required */
1898  0, /* properties_provided */
1899  0, /* properties_destroyed */
1900  0, /* todo_flags_start */
1901  0, /* todo_flags_finish */
1902};
1903
1904class pass_nothrow : public gimple_opt_pass
1905{
1906public:
1907  pass_nothrow (gcc::context *ctxt)
1908    : gimple_opt_pass (pass_data_nothrow, ctxt)
1909  {}
1910
1911  /* opt_pass methods: */
1912  opt_pass * clone () { return new pass_nothrow (m_ctxt); }
1913  virtual bool gate (function *) { return optimize; }
1914  virtual unsigned int execute (function *);
1915
1916}; // class pass_nothrow
1917
1918unsigned int
1919pass_nothrow::execute (function *)
1920{
1921  struct cgraph_node *node;
1922  basic_block this_block;
1923
1924  if (TREE_NOTHROW (current_function_decl))
1925    return 0;
1926
1927  node = cgraph_node::get (current_function_decl);
1928
1929  /* We run during lowering, we can not really use availability yet.  */
1930  if (cgraph_node::get (current_function_decl)->get_availability ()
1931      <= AVAIL_INTERPOSABLE)
1932    {
1933      if (dump_file)
1934        fprintf (dump_file, "Function is interposable;"
1935	         " not analyzing.\n");
1936      return true;
1937    }
1938
1939  FOR_EACH_BB_FN (this_block, cfun)
1940    {
1941      for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
1942	   !gsi_end_p (gsi);
1943	   gsi_next (&gsi))
1944        if (stmt_can_throw_external (gsi_stmt (gsi)))
1945	  {
1946	    if (is_gimple_call (gsi_stmt (gsi)))
1947	      {
1948		tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
1949		if (callee_t && recursive_call_p (current_function_decl,
1950						  callee_t))
1951		  continue;
1952	      }
1953
1954	    if (dump_file)
1955	      {
1956		fprintf (dump_file, "Statement can throw: ");
1957		print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
1958	      }
1959	    return 0;
1960	  }
1961    }
1962
1963  node->set_nothrow_flag (true);
1964  if (dump_file)
1965    fprintf (dump_file, "Function found to be nothrow: %s\n",
1966	     current_function_name ());
1967  return 0;
1968}
1969
1970} // anon namespace
1971
1972gimple_opt_pass *
1973make_pass_nothrow (gcc::context *ctxt)
1974{
1975  return new pass_nothrow (ctxt);
1976}
1977