1/* Tree inlining.
2   Copyright (C) 2001-2015 Free Software Foundation, Inc.
3   Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "diagnostic-core.h"
26#include "hash-set.h"
27#include "machmode.h"
28#include "vec.h"
29#include "double-int.h"
30#include "input.h"
31#include "alias.h"
32#include "symtab.h"
33#include "wide-int.h"
34#include "inchash.h"
35#include "tree.h"
36#include "fold-const.h"
37#include "stor-layout.h"
38#include "calls.h"
39#include "tree-inline.h"
40#include "flags.h"
41#include "params.h"
42#include "insn-config.h"
43#include "hashtab.h"
44#include "langhooks.h"
45#include "predict.h"
46#include "hard-reg-set.h"
47#include "function.h"
48#include "dominance.h"
49#include "cfg.h"
50#include "cfganal.h"
51#include "basic-block.h"
52#include "tree-iterator.h"
53#include "intl.h"
54#include "tree-ssa-alias.h"
55#include "internal-fn.h"
56#include "gimple-fold.h"
57#include "tree-eh.h"
58#include "gimple-expr.h"
59#include "is-a.h"
60#include "gimple.h"
61#include "gimplify.h"
62#include "gimple-iterator.h"
63#include "gimplify-me.h"
64#include "gimple-walk.h"
65#include "gimple-ssa.h"
66#include "tree-cfg.h"
67#include "tree-phinodes.h"
68#include "ssa-iterators.h"
69#include "stringpool.h"
70#include "tree-ssanames.h"
71#include "tree-into-ssa.h"
72#include "rtl.h"
73#include "statistics.h"
74#include "real.h"
75#include "fixed-value.h"
76#include "expmed.h"
77#include "dojump.h"
78#include "explow.h"
79#include "emit-rtl.h"
80#include "varasm.h"
81#include "stmt.h"
82#include "expr.h"
83#include "tree-dfa.h"
84#include "tree-ssa.h"
85#include "tree-pretty-print.h"
86#include "except.h"
87#include "debug.h"
88#include "hash-map.h"
89#include "plugin-api.h"
90#include "ipa-ref.h"
91#include "cgraph.h"
92#include "alloc-pool.h"
93#include "symbol-summary.h"
94#include "ipa-prop.h"
95#include "value-prof.h"
96#include "tree-pass.h"
97#include "target.h"
98#include "cfgloop.h"
99#include "builtins.h"
100#include "tree-chkp.h"
101
102#include "rtl.h"	/* FIXME: For asm_str_count.  */
103
104/* I'm not real happy about this, but we need to handle gimple and
105   non-gimple trees.  */
106
107/* Inlining, Cloning, Versioning, Parallelization
108
109   Inlining: a function body is duplicated, but the PARM_DECLs are
110   remapped into VAR_DECLs, and non-void RETURN_EXPRs become
111   MODIFY_EXPRs that store to a dedicated returned-value variable.
112   The duplicated eh_region info of the copy will later be appended
113   to the info for the caller; the eh_region info in copied throwing
114   statements and RESX statements are adjusted accordingly.
115
116   Cloning: (only in C++) We have one body for a con/de/structor, and
117   multiple function decls, each with a unique parameter list.
118   Duplicate the body, using the given splay tree; some parameters
119   will become constants (like 0 or 1).
120
121   Versioning: a function body is duplicated and the result is a new
122   function rather than into blocks of an existing function as with
123   inlining.  Some parameters will become constants.
124
125   Parallelization: a region of a function is duplicated resulting in
126   a new function.  Variables may be replaced with complex expressions
127   to enable shared variable semantics.
128
129   All of these will simultaneously lookup any callgraph edges.  If
130   we're going to inline the duplicated function body, and the given
131   function has some cloned callgraph nodes (one for each place this
132   function will be inlined) those callgraph edges will be duplicated.
133   If we're cloning the body, those callgraph edges will be
134   updated to point into the new body.  (Note that the original
135   callgraph node and edge list will not be altered.)
136
137   See the CALL_EXPR handling case in copy_tree_body_r ().  */
138
139/* To Do:
140
141   o In order to make inlining-on-trees work, we pessimized
142     function-local static constants.  In particular, they are now
143     always output, even when not addressed.  Fix this by treating
144     function-local static constants just like global static
145     constants; the back-end already knows not to output them if they
146     are not needed.
147
148   o Provide heuristics to clamp inlining of recursive template
149     calls?  */
150
151
152/* Weights that estimate_num_insns uses to estimate the size of the
153   produced code.  */
154
155eni_weights eni_size_weights;
156
157/* Weights that estimate_num_insns uses to estimate the time necessary
158   to execute the produced code.  */
159
160eni_weights eni_time_weights;
161
162/* Prototypes.  */
163
164static tree declare_return_variable (copy_body_data *, tree, tree, tree,
165				     basic_block);
166static void remap_block (tree *, copy_body_data *);
167static void copy_bind_expr (tree *, int *, copy_body_data *);
168static void declare_inline_vars (tree, tree);
169static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
170static void prepend_lexical_block (tree current_block, tree new_block);
171static tree copy_decl_to_var (tree, copy_body_data *);
172static tree copy_result_decl_to_var (tree, copy_body_data *);
173static tree copy_decl_maybe_to_var (tree, copy_body_data *);
174static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
175static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
176static void insert_init_stmt (copy_body_data *, basic_block, gimple);
177
178/* Insert a tree->tree mapping for ID.  Despite the name suggests
179   that the trees should be variables, it is used for more than that.  */
180
181void
182insert_decl_map (copy_body_data *id, tree key, tree value)
183{
184  id->decl_map->put (key, value);
185
186  /* Always insert an identity map as well.  If we see this same new
187     node again, we won't want to duplicate it a second time.  */
188  if (key != value)
189    id->decl_map->put (value, value);
190}
191
192/* Insert a tree->tree mapping for ID.  This is only used for
193   variables.  */
194
195static void
196insert_debug_decl_map (copy_body_data *id, tree key, tree value)
197{
198  if (!gimple_in_ssa_p (id->src_cfun))
199    return;
200
201  if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
202    return;
203
204  if (!target_for_debug_bind (key))
205    return;
206
207  gcc_assert (TREE_CODE (key) == PARM_DECL);
208  gcc_assert (TREE_CODE (value) == VAR_DECL);
209
210  if (!id->debug_map)
211    id->debug_map = new hash_map<tree, tree>;
212
213  id->debug_map->put (key, value);
214}
215
216/* If nonzero, we're remapping the contents of inlined debug
217   statements.  If negative, an error has occurred, such as a
218   reference to a variable that isn't available in the inlined
219   context.  */
220static int processing_debug_stmt = 0;
221
222/* Construct new SSA name for old NAME. ID is the inline context.  */
223
224static tree
225remap_ssa_name (tree name, copy_body_data *id)
226{
227  tree new_tree, var;
228  tree *n;
229
230  gcc_assert (TREE_CODE (name) == SSA_NAME);
231
232  n = id->decl_map->get (name);
233  if (n)
234    return unshare_expr (*n);
235
236  if (processing_debug_stmt)
237    {
238      if (SSA_NAME_IS_DEFAULT_DEF (name)
239	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
240	  && id->entry_bb == NULL
241	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
242	{
243	  tree vexpr = make_node (DEBUG_EXPR_DECL);
244	  gimple def_temp;
245	  gimple_stmt_iterator gsi;
246	  tree val = SSA_NAME_VAR (name);
247
248	  n = id->decl_map->get (val);
249	  if (n != NULL)
250	    val = *n;
251	  if (TREE_CODE (val) != PARM_DECL)
252	    {
253	      processing_debug_stmt = -1;
254	      return name;
255	    }
256	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
257	  DECL_ARTIFICIAL (vexpr) = 1;
258	  TREE_TYPE (vexpr) = TREE_TYPE (name);
259	  DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
260	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
261	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
262	  return vexpr;
263	}
264
265      processing_debug_stmt = -1;
266      return name;
267    }
268
269  /* Remap anonymous SSA names or SSA names of anonymous decls.  */
270  var = SSA_NAME_VAR (name);
271  if (!var
272      || (!SSA_NAME_IS_DEFAULT_DEF (name)
273	  && TREE_CODE (var) == VAR_DECL
274	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
275	  && DECL_ARTIFICIAL (var)
276	  && DECL_IGNORED_P (var)
277	  && !DECL_NAME (var)))
278    {
279      struct ptr_info_def *pi;
280      new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
281      if (!var && SSA_NAME_IDENTIFIER (name))
282	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
283      insert_decl_map (id, name, new_tree);
284      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
285	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
286      /* At least IPA points-to info can be directly transferred.  */
287      if (id->src_cfun->gimple_df
288	  && id->src_cfun->gimple_df->ipa_pta
289	  && (pi = SSA_NAME_PTR_INFO (name))
290	  && !pi->pt.anything)
291	{
292	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293	  new_pi->pt = pi->pt;
294	}
295      return new_tree;
296    }
297
298  /* Do not set DEF_STMT yet as statement is not copied yet. We do that
299     in copy_bb.  */
300  new_tree = remap_decl (var, id);
301
302  /* We might've substituted constant or another SSA_NAME for
303     the variable.
304
305     Replace the SSA name representing RESULT_DECL by variable during
306     inlining:  this saves us from need to introduce PHI node in a case
307     return value is just partly initialized.  */
308  if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
309      && (!SSA_NAME_VAR (name)
310	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
311	  || !id->transform_return_to_modify))
312    {
313      struct ptr_info_def *pi;
314      new_tree = make_ssa_name (new_tree);
315      insert_decl_map (id, name, new_tree);
316      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
317	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
318      /* At least IPA points-to info can be directly transferred.  */
319      if (id->src_cfun->gimple_df
320	  && id->src_cfun->gimple_df->ipa_pta
321	  && (pi = SSA_NAME_PTR_INFO (name))
322	  && !pi->pt.anything)
323	{
324	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
325	  new_pi->pt = pi->pt;
326	}
327      if (SSA_NAME_IS_DEFAULT_DEF (name))
328	{
329	  /* By inlining function having uninitialized variable, we might
330	     extend the lifetime (variable might get reused).  This cause
331	     ICE in the case we end up extending lifetime of SSA name across
332	     abnormal edge, but also increase register pressure.
333
334	     We simply initialize all uninitialized vars by 0 except
335	     for case we are inlining to very first BB.  We can avoid
336	     this for all BBs that are not inside strongly connected
337	     regions of the CFG, but this is expensive to test.  */
338	  if (id->entry_bb
339	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
340	      && (!SSA_NAME_VAR (name)
341		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
342	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
343					     0)->dest
344		  || EDGE_COUNT (id->entry_bb->preds) != 1))
345	    {
346	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
347	      gimple init_stmt;
348	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
349
350	      init_stmt = gimple_build_assign (new_tree, zero);
351	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
352	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
353	    }
354	  else
355	    {
356	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
357	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
358	    }
359	}
360    }
361  else
362    insert_decl_map (id, name, new_tree);
363  return new_tree;
364}
365
366/* Remap DECL during the copying of the BLOCK tree for the function.  */
367
368tree
369remap_decl (tree decl, copy_body_data *id)
370{
371  tree *n;
372
373  /* We only remap local variables in the current function.  */
374
375  /* See if we have remapped this declaration.  */
376
377  n = id->decl_map->get (decl);
378
379  if (!n && processing_debug_stmt)
380    {
381      processing_debug_stmt = -1;
382      return decl;
383    }
384
385  /* If we didn't already have an equivalent for this declaration,
386     create one now.  */
387  if (!n)
388    {
389      /* Make a copy of the variable or label.  */
390      tree t = id->copy_decl (decl, id);
391
392      /* Remember it, so that if we encounter this local entity again
393	 we can reuse this copy.  Do this early because remap_type may
394	 need this decl for TYPE_STUB_DECL.  */
395      insert_decl_map (id, decl, t);
396
397      if (!DECL_P (t))
398	return t;
399
400      /* Remap types, if necessary.  */
401      TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
402      if (TREE_CODE (t) == TYPE_DECL)
403        DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
404
405      /* Remap sizes as necessary.  */
406      walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
407      walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
408
409      /* If fields, do likewise for offset and qualifier.  */
410      if (TREE_CODE (t) == FIELD_DECL)
411	{
412	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
413	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
414	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
415	}
416
417      return t;
418    }
419
420  if (id->do_not_unshare)
421    return *n;
422  else
423    return unshare_expr (*n);
424}
425
426static tree
427remap_type_1 (tree type, copy_body_data *id)
428{
429  tree new_tree, t;
430
431  /* We do need a copy.  build and register it now.  If this is a pointer or
432     reference type, remap the designated type and make a new pointer or
433     reference type.  */
434  if (TREE_CODE (type) == POINTER_TYPE)
435    {
436      new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
437					 TYPE_MODE (type),
438					 TYPE_REF_CAN_ALIAS_ALL (type));
439      if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
440	new_tree = build_type_attribute_qual_variant (new_tree,
441						      TYPE_ATTRIBUTES (type),
442						      TYPE_QUALS (type));
443      insert_decl_map (id, type, new_tree);
444      return new_tree;
445    }
446  else if (TREE_CODE (type) == REFERENCE_TYPE)
447    {
448      new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
449					    TYPE_MODE (type),
450					    TYPE_REF_CAN_ALIAS_ALL (type));
451      if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452	new_tree = build_type_attribute_qual_variant (new_tree,
453						      TYPE_ATTRIBUTES (type),
454						      TYPE_QUALS (type));
455      insert_decl_map (id, type, new_tree);
456      return new_tree;
457    }
458  else
459    new_tree = copy_node (type);
460
461  insert_decl_map (id, type, new_tree);
462
463  /* This is a new type, not a copy of an old type.  Need to reassociate
464     variants.  We can handle everything except the main variant lazily.  */
465  t = TYPE_MAIN_VARIANT (type);
466  if (type != t)
467    {
468      t = remap_type (t, id);
469      TYPE_MAIN_VARIANT (new_tree) = t;
470      TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
471      TYPE_NEXT_VARIANT (t) = new_tree;
472    }
473  else
474    {
475      TYPE_MAIN_VARIANT (new_tree) = new_tree;
476      TYPE_NEXT_VARIANT (new_tree) = NULL;
477    }
478
479  if (TYPE_STUB_DECL (type))
480    TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
481
482  /* Lazily create pointer and reference types.  */
483  TYPE_POINTER_TO (new_tree) = NULL;
484  TYPE_REFERENCE_TO (new_tree) = NULL;
485
486  /* Copy all types that may contain references to local variables; be sure to
487     preserve sharing in between type and its main variant when possible.  */
488  switch (TREE_CODE (new_tree))
489    {
490    case INTEGER_TYPE:
491    case REAL_TYPE:
492    case FIXED_POINT_TYPE:
493    case ENUMERAL_TYPE:
494    case BOOLEAN_TYPE:
495      if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
496	{
497	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
498	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
499
500	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
501	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
502	}
503      else
504	{
505	  t = TYPE_MIN_VALUE (new_tree);
506	  if (t && TREE_CODE (t) != INTEGER_CST)
507	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
508
509	  t = TYPE_MAX_VALUE (new_tree);
510	  if (t && TREE_CODE (t) != INTEGER_CST)
511	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
512	}
513      return new_tree;
514
515    case FUNCTION_TYPE:
516      if (TYPE_MAIN_VARIANT (new_tree) != new_tree
517	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
518	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
519      else
520        TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
521      if (TYPE_MAIN_VARIANT (new_tree) != new_tree
522	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
523	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
524      else
525        walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
526      return new_tree;
527
528    case ARRAY_TYPE:
529      if (TYPE_MAIN_VARIANT (new_tree) != new_tree
530	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
531	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
532      else
533	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
534
535      if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
536	{
537	  gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
538	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
539	}
540      else
541	TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
542      break;
543
544    case RECORD_TYPE:
545    case UNION_TYPE:
546    case QUAL_UNION_TYPE:
547      if (TYPE_MAIN_VARIANT (type) != type
548	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
549	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
550      else
551	{
552	  tree f, nf = NULL;
553
554	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
555	    {
556	      t = remap_decl (f, id);
557	      DECL_CONTEXT (t) = new_tree;
558	      DECL_CHAIN (t) = nf;
559	      nf = t;
560	    }
561	  TYPE_FIELDS (new_tree) = nreverse (nf);
562	}
563      break;
564
565    case OFFSET_TYPE:
566    default:
567      /* Shouldn't have been thought variable sized.  */
568      gcc_unreachable ();
569    }
570
571  /* All variants of type share the same size, so use the already remaped data.  */
572  if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
573    {
574      gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
575      gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
576
577      TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
578      TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
579    }
580  else
581    {
582      walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
583      walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
584    }
585
586  return new_tree;
587}
588
589tree
590remap_type (tree type, copy_body_data *id)
591{
592  tree *node;
593  tree tmp;
594
595  if (type == NULL)
596    return type;
597
598  /* See if we have remapped this type.  */
599  node = id->decl_map->get (type);
600  if (node)
601    return *node;
602
603  /* The type only needs remapping if it's variably modified.  */
604  if (! variably_modified_type_p (type, id->src_fn))
605    {
606      insert_decl_map (id, type, type);
607      return type;
608    }
609
610  id->remapping_type_depth++;
611  tmp = remap_type_1 (type, id);
612  id->remapping_type_depth--;
613
614  return tmp;
615}
616
617/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
618
619static bool
620can_be_nonlocal (tree decl, copy_body_data *id)
621{
622  /* We can not duplicate function decls.  */
623  if (TREE_CODE (decl) == FUNCTION_DECL)
624    return true;
625
626  /* Local static vars must be non-local or we get multiple declaration
627     problems.  */
628  if (TREE_CODE (decl) == VAR_DECL
629      && !auto_var_in_fn_p (decl, id->src_fn))
630    return true;
631
632  return false;
633}
634
635static tree
636remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
637	     copy_body_data *id)
638{
639  tree old_var;
640  tree new_decls = NULL_TREE;
641
642  /* Remap its variables.  */
643  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
644    {
645      tree new_var;
646
647      if (can_be_nonlocal (old_var, id))
648	{
649	  /* We need to add this variable to the local decls as otherwise
650	     nothing else will do so.  */
651	  if (TREE_CODE (old_var) == VAR_DECL
652	      && ! DECL_EXTERNAL (old_var)
653	      && cfun)
654	    add_local_decl (cfun, old_var);
655	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
656	      && !DECL_IGNORED_P (old_var)
657	      && nonlocalized_list)
658	    vec_safe_push (*nonlocalized_list, old_var);
659	  continue;
660	}
661
662      /* Remap the variable.  */
663      new_var = remap_decl (old_var, id);
664
665      /* If we didn't remap this variable, we can't mess with its
666	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
667	 already declared somewhere else, so don't declare it here.  */
668
669      if (new_var == id->retvar)
670	;
671      else if (!new_var)
672        {
673	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
674	      && !DECL_IGNORED_P (old_var)
675	      && nonlocalized_list)
676	    vec_safe_push (*nonlocalized_list, old_var);
677	}
678      else
679	{
680	  gcc_assert (DECL_P (new_var));
681	  DECL_CHAIN (new_var) = new_decls;
682	  new_decls = new_var;
683
684	  /* Also copy value-expressions.  */
685	  if (TREE_CODE (new_var) == VAR_DECL
686	      && DECL_HAS_VALUE_EXPR_P (new_var))
687	    {
688	      tree tem = DECL_VALUE_EXPR (new_var);
689	      bool old_regimplify = id->regimplify;
690	      id->remapping_type_depth++;
691	      walk_tree (&tem, copy_tree_body_r, id, NULL);
692	      id->remapping_type_depth--;
693	      id->regimplify = old_regimplify;
694	      SET_DECL_VALUE_EXPR (new_var, tem);
695	    }
696	}
697    }
698
699  return nreverse (new_decls);
700}
701
702/* Copy the BLOCK to contain remapped versions of the variables
703   therein.  And hook the new block into the block-tree.  */
704
705static void
706remap_block (tree *block, copy_body_data *id)
707{
708  tree old_block;
709  tree new_block;
710
711  /* Make the new block.  */
712  old_block = *block;
713  new_block = make_node (BLOCK);
714  TREE_USED (new_block) = TREE_USED (old_block);
715  BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
716  BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
717  BLOCK_NONLOCALIZED_VARS (new_block)
718    = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
719  *block = new_block;
720
721  /* Remap its variables.  */
722  BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
723  					&BLOCK_NONLOCALIZED_VARS (new_block),
724					id);
725
726  if (id->transform_lang_insert_block)
727    id->transform_lang_insert_block (new_block);
728
729  /* Remember the remapped block.  */
730  insert_decl_map (id, old_block, new_block);
731}
732
733/* Copy the whole block tree and root it in id->block.  */
734static tree
735remap_blocks (tree block, copy_body_data *id)
736{
737  tree t;
738  tree new_tree = block;
739
740  if (!block)
741    return NULL;
742
743  remap_block (&new_tree, id);
744  gcc_assert (new_tree != block);
745  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
746    prepend_lexical_block (new_tree, remap_blocks (t, id));
747  /* Blocks are in arbitrary order, but make things slightly prettier and do
748     not swap order when producing a copy.  */
749  BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
750  return new_tree;
751}
752
753/* Remap the block tree rooted at BLOCK to nothing.  */
754static void
755remap_blocks_to_null (tree block, copy_body_data *id)
756{
757  tree t;
758  insert_decl_map (id, block, NULL_TREE);
759  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
760    remap_blocks_to_null (t, id);
761}
762
763static void
764copy_statement_list (tree *tp)
765{
766  tree_stmt_iterator oi, ni;
767  tree new_tree;
768
769  new_tree = alloc_stmt_list ();
770  ni = tsi_start (new_tree);
771  oi = tsi_start (*tp);
772  TREE_TYPE (new_tree) = TREE_TYPE (*tp);
773  *tp = new_tree;
774
775  for (; !tsi_end_p (oi); tsi_next (&oi))
776    {
777      tree stmt = tsi_stmt (oi);
778      if (TREE_CODE (stmt) == STATEMENT_LIST)
779	/* This copy is not redundant; tsi_link_after will smash this
780	   STATEMENT_LIST into the end of the one we're building, and we
781	   don't want to do that with the original.  */
782	copy_statement_list (&stmt);
783      tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
784    }
785}
786
787static void
788copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
789{
790  tree block = BIND_EXPR_BLOCK (*tp);
791  /* Copy (and replace) the statement.  */
792  copy_tree_r (tp, walk_subtrees, NULL);
793  if (block)
794    {
795      remap_block (&block, id);
796      BIND_EXPR_BLOCK (*tp) = block;
797    }
798
799  if (BIND_EXPR_VARS (*tp))
800    /* This will remap a lot of the same decls again, but this should be
801       harmless.  */
802    BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
803}
804
805
806/* Create a new gimple_seq by remapping all the statements in BODY
807   using the inlining information in ID.  */
808
809static gimple_seq
810remap_gimple_seq (gimple_seq body, copy_body_data *id)
811{
812  gimple_stmt_iterator si;
813  gimple_seq new_body = NULL;
814
815  for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
816    {
817      gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
818      gimple_seq_add_seq (&new_body, new_stmts);
819    }
820
821  return new_body;
822}
823
824
825/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
826   block using the mapping information in ID.  */
827
828static gimple
829copy_gimple_bind (gbind *stmt, copy_body_data *id)
830{
831  gimple new_bind;
832  tree new_block, new_vars;
833  gimple_seq body, new_body;
834
835  /* Copy the statement.  Note that we purposely don't use copy_stmt
836     here because we need to remap statements as we copy.  */
837  body = gimple_bind_body (stmt);
838  new_body = remap_gimple_seq (body, id);
839
840  new_block = gimple_bind_block (stmt);
841  if (new_block)
842    remap_block (&new_block, id);
843
844  /* This will remap a lot of the same decls again, but this should be
845     harmless.  */
846  new_vars = gimple_bind_vars (stmt);
847  if (new_vars)
848    new_vars = remap_decls (new_vars, NULL, id);
849
850  new_bind = gimple_build_bind (new_vars, new_body, new_block);
851
852  return new_bind;
853}
854
855/* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
856
857static bool
858is_parm (tree decl)
859{
860  if (TREE_CODE (decl) == SSA_NAME)
861    {
862      decl = SSA_NAME_VAR (decl);
863      if (!decl)
864	return false;
865    }
866
867  return (TREE_CODE (decl) == PARM_DECL);
868}
869
870/* Remap the dependence CLIQUE from the source to the destination function
871   as specified in ID.  */
872
873static unsigned short
874remap_dependence_clique (copy_body_data *id, unsigned short clique)
875{
876  if (clique == 0 || processing_debug_stmt)
877    return 0;
878  if (!id->dependence_map)
879    id->dependence_map
880      = new hash_map<unsigned short, unsigned short, dependence_hasher>;
881  bool existed;
882  unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
883  if (!existed)
884    newc = ++cfun->last_clique;
885  return newc;
886}
887
888/* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
889   'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
890   WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
891   recursing into the children nodes of *TP.  */
892
893static tree
894remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
895{
896  struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
897  copy_body_data *id = (copy_body_data *) wi_p->info;
898  tree fn = id->src_fn;
899
900  if (TREE_CODE (*tp) == SSA_NAME)
901    {
902      *tp = remap_ssa_name (*tp, id);
903      *walk_subtrees = 0;
904      return NULL;
905    }
906  else if (auto_var_in_fn_p (*tp, fn))
907    {
908      /* Local variables and labels need to be replaced by equivalent
909	 variables.  We don't want to copy static variables; there's
910	 only one of those, no matter how many times we inline the
911	 containing function.  Similarly for globals from an outer
912	 function.  */
913      tree new_decl;
914
915      /* Remap the declaration.  */
916      new_decl = remap_decl (*tp, id);
917      gcc_assert (new_decl);
918      /* Replace this variable with the copy.  */
919      STRIP_TYPE_NOPS (new_decl);
920      /* ???  The C++ frontend uses void * pointer zero to initialize
921         any other type.  This confuses the middle-end type verification.
922	 As cloned bodies do not go through gimplification again the fixup
923	 there doesn't trigger.  */
924      if (TREE_CODE (new_decl) == INTEGER_CST
925	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
926	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
927      *tp = new_decl;
928      *walk_subtrees = 0;
929    }
930  else if (TREE_CODE (*tp) == STATEMENT_LIST)
931    gcc_unreachable ();
932  else if (TREE_CODE (*tp) == SAVE_EXPR)
933    gcc_unreachable ();
934  else if (TREE_CODE (*tp) == LABEL_DECL
935	   && (!DECL_CONTEXT (*tp)
936	       || decl_function_context (*tp) == id->src_fn))
937    /* These may need to be remapped for EH handling.  */
938    *tp = remap_decl (*tp, id);
939  else if (TREE_CODE (*tp) == FIELD_DECL)
940    {
941      /* If the enclosing record type is variably_modified_type_p, the field
942	 has already been remapped.  Otherwise, it need not be.  */
943      tree *n = id->decl_map->get (*tp);
944      if (n)
945	*tp = *n;
946      *walk_subtrees = 0;
947    }
948  else if (TYPE_P (*tp))
949    /* Types may need remapping as well.  */
950    *tp = remap_type (*tp, id);
951  else if (CONSTANT_CLASS_P (*tp))
952    {
953      /* If this is a constant, we have to copy the node iff the type
954	 will be remapped.  copy_tree_r will not copy a constant.  */
955      tree new_type = remap_type (TREE_TYPE (*tp), id);
956
957      if (new_type == TREE_TYPE (*tp))
958	*walk_subtrees = 0;
959
960      else if (TREE_CODE (*tp) == INTEGER_CST)
961	*tp = wide_int_to_tree (new_type, *tp);
962      else
963	{
964	  *tp = copy_node (*tp);
965	  TREE_TYPE (*tp) = new_type;
966	}
967    }
968  else
969    {
970      /* Otherwise, just copy the node.  Note that copy_tree_r already
971	 knows not to copy VAR_DECLs, etc., so this is safe.  */
972
973      if (TREE_CODE (*tp) == MEM_REF)
974	{
975	  /* We need to re-canonicalize MEM_REFs from inline substitutions
976	     that can happen when a pointer argument is an ADDR_EXPR.
977	     Recurse here manually to allow that.  */
978	  tree ptr = TREE_OPERAND (*tp, 0);
979	  tree type = remap_type (TREE_TYPE (*tp), id);
980	  tree old = *tp;
981	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
982	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
983	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
984	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
985	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
986	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
987	    {
988	      MR_DEPENDENCE_CLIQUE (*tp)
989	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
990	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
991	    }
992	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
993	     remapped a parameter as the property might be valid only
994	     for the parameter itself.  */
995	  if (TREE_THIS_NOTRAP (old)
996	      && (!is_parm (TREE_OPERAND (old, 0))
997		  || (!id->transform_parameter && is_parm (ptr))))
998	    TREE_THIS_NOTRAP (*tp) = 1;
999	  *walk_subtrees = 0;
1000	  return NULL;
1001	}
1002
1003      /* Here is the "usual case".  Copy this tree node, and then
1004	 tweak some special cases.  */
1005      copy_tree_r (tp, walk_subtrees, NULL);
1006
1007      if (TREE_CODE (*tp) != OMP_CLAUSE)
1008	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1009
1010      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1011	{
1012	  /* The copied TARGET_EXPR has never been expanded, even if the
1013	     original node was expanded already.  */
1014	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1015	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1016	}
1017      else if (TREE_CODE (*tp) == ADDR_EXPR)
1018	{
1019	  /* Variable substitution need not be simple.  In particular,
1020	     the MEM_REF substitution above.  Make sure that
1021	     TREE_CONSTANT and friends are up-to-date.  */
1022	  int invariant = is_gimple_min_invariant (*tp);
1023	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1024	  recompute_tree_invariant_for_addr_expr (*tp);
1025
1026	  /* If this used to be invariant, but is not any longer,
1027	     then regimplification is probably needed.  */
1028	  if (invariant && !is_gimple_min_invariant (*tp))
1029	    id->regimplify = true;
1030
1031	  *walk_subtrees = 0;
1032	}
1033    }
1034
1035  /* Update the TREE_BLOCK for the cloned expr.  */
1036  if (EXPR_P (*tp))
1037    {
1038      tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1039      tree old_block = TREE_BLOCK (*tp);
1040      if (old_block)
1041	{
1042	  tree *n;
1043	  n = id->decl_map->get (TREE_BLOCK (*tp));
1044	  if (n)
1045	    new_block = *n;
1046	}
1047      TREE_SET_BLOCK (*tp, new_block);
1048    }
1049
1050  /* Keep iterating.  */
1051  return NULL_TREE;
1052}
1053
1054
1055/* Called from copy_body_id via walk_tree.  DATA is really a
1056   `copy_body_data *'.  */
1057
1058tree
1059copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1060{
1061  copy_body_data *id = (copy_body_data *) data;
1062  tree fn = id->src_fn;
1063  tree new_block;
1064
1065  /* Begin by recognizing trees that we'll completely rewrite for the
1066     inlining context.  Our output for these trees is completely
1067     different from out input (e.g. RETURN_EXPR is deleted, and morphs
1068     into an edge).  Further down, we'll handle trees that get
1069     duplicated and/or tweaked.  */
1070
1071  /* When requested, RETURN_EXPRs should be transformed to just the
1072     contained MODIFY_EXPR.  The branch semantics of the return will
1073     be handled elsewhere by manipulating the CFG rather than a statement.  */
1074  if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1075    {
1076      tree assignment = TREE_OPERAND (*tp, 0);
1077
1078      /* If we're returning something, just turn that into an
1079	 assignment into the equivalent of the original RESULT_DECL.
1080	 If the "assignment" is just the result decl, the result
1081	 decl has already been set (e.g. a recent "foo (&result_decl,
1082	 ...)"); just toss the entire RETURN_EXPR.  */
1083      if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1084	{
1085	  /* Replace the RETURN_EXPR with (a copy of) the
1086	     MODIFY_EXPR hanging underneath.  */
1087	  *tp = copy_node (assignment);
1088	}
1089      else /* Else the RETURN_EXPR returns no value.  */
1090	{
1091	  *tp = NULL;
1092	  return (tree) (void *)1;
1093	}
1094    }
1095  else if (TREE_CODE (*tp) == SSA_NAME)
1096    {
1097      *tp = remap_ssa_name (*tp, id);
1098      *walk_subtrees = 0;
1099      return NULL;
1100    }
1101
1102  /* Local variables and labels need to be replaced by equivalent
1103     variables.  We don't want to copy static variables; there's only
1104     one of those, no matter how many times we inline the containing
1105     function.  Similarly for globals from an outer function.  */
1106  else if (auto_var_in_fn_p (*tp, fn))
1107    {
1108      tree new_decl;
1109
1110      /* Remap the declaration.  */
1111      new_decl = remap_decl (*tp, id);
1112      gcc_assert (new_decl);
1113      /* Replace this variable with the copy.  */
1114      STRIP_TYPE_NOPS (new_decl);
1115      *tp = new_decl;
1116      *walk_subtrees = 0;
1117    }
1118  else if (TREE_CODE (*tp) == STATEMENT_LIST)
1119    copy_statement_list (tp);
1120  else if (TREE_CODE (*tp) == SAVE_EXPR
1121	   || TREE_CODE (*tp) == TARGET_EXPR)
1122    remap_save_expr (tp, id->decl_map, walk_subtrees);
1123  else if (TREE_CODE (*tp) == LABEL_DECL
1124	   && (! DECL_CONTEXT (*tp)
1125	       || decl_function_context (*tp) == id->src_fn))
1126    /* These may need to be remapped for EH handling.  */
1127    *tp = remap_decl (*tp, id);
1128  else if (TREE_CODE (*tp) == BIND_EXPR)
1129    copy_bind_expr (tp, walk_subtrees, id);
1130  /* Types may need remapping as well.  */
1131  else if (TYPE_P (*tp))
1132    *tp = remap_type (*tp, id);
1133
1134  /* If this is a constant, we have to copy the node iff the type will be
1135     remapped.  copy_tree_r will not copy a constant.  */
1136  else if (CONSTANT_CLASS_P (*tp))
1137    {
1138      tree new_type = remap_type (TREE_TYPE (*tp), id);
1139
1140      if (new_type == TREE_TYPE (*tp))
1141	*walk_subtrees = 0;
1142
1143      else if (TREE_CODE (*tp) == INTEGER_CST)
1144	*tp = wide_int_to_tree (new_type, *tp);
1145      else
1146	{
1147	  *tp = copy_node (*tp);
1148	  TREE_TYPE (*tp) = new_type;
1149	}
1150    }
1151
1152  /* Otherwise, just copy the node.  Note that copy_tree_r already
1153     knows not to copy VAR_DECLs, etc., so this is safe.  */
1154  else
1155    {
1156      /* Here we handle trees that are not completely rewritten.
1157	 First we detect some inlining-induced bogosities for
1158	 discarding.  */
1159      if (TREE_CODE (*tp) == MODIFY_EXPR
1160	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1161	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1162	{
1163	  /* Some assignments VAR = VAR; don't generate any rtl code
1164	     and thus don't count as variable modification.  Avoid
1165	     keeping bogosities like 0 = 0.  */
1166	  tree decl = TREE_OPERAND (*tp, 0), value;
1167	  tree *n;
1168
1169	  n = id->decl_map->get (decl);
1170	  if (n)
1171	    {
1172	      value = *n;
1173	      STRIP_TYPE_NOPS (value);
1174	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1175		{
1176		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1177		  return copy_tree_body_r (tp, walk_subtrees, data);
1178		}
1179	    }
1180	}
1181      else if (TREE_CODE (*tp) == INDIRECT_REF)
1182	{
1183	  /* Get rid of *& from inline substitutions that can happen when a
1184	     pointer argument is an ADDR_EXPR.  */
1185	  tree decl = TREE_OPERAND (*tp, 0);
1186	  tree *n = id->decl_map->get (decl);
1187	  if (n)
1188	    {
1189	      /* If we happen to get an ADDR_EXPR in n->value, strip
1190	         it manually here as we'll eventually get ADDR_EXPRs
1191		 which lie about their types pointed to.  In this case
1192		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1193		 but we absolutely rely on that.  As fold_indirect_ref
1194	         does other useful transformations, try that first, though.  */
1195	      tree type = TREE_TYPE (*tp);
1196	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1197	      tree old = *tp;
1198	      *tp = gimple_fold_indirect_ref (ptr);
1199	      if (! *tp)
1200	        {
1201		  if (TREE_CODE (ptr) == ADDR_EXPR)
1202		    {
1203		      *tp
1204		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1205		      /* ???  We should either assert here or build
1206			 a VIEW_CONVERT_EXPR instead of blindly leaking
1207			 incompatible types to our IL.  */
1208		      if (! *tp)
1209			*tp = TREE_OPERAND (ptr, 0);
1210		    }
1211	          else
1212		    {
1213	              *tp = build1 (INDIRECT_REF, type, ptr);
1214		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1215		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1216		      TREE_READONLY (*tp) = TREE_READONLY (old);
1217		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1218			 have remapped a parameter as the property might be
1219			 valid only for the parameter itself.  */
1220		      if (TREE_THIS_NOTRAP (old)
1221			  && (!is_parm (TREE_OPERAND (old, 0))
1222			      || (!id->transform_parameter && is_parm (ptr))))
1223		        TREE_THIS_NOTRAP (*tp) = 1;
1224		    }
1225		}
1226	      *walk_subtrees = 0;
1227	      return NULL;
1228	    }
1229	}
1230      else if (TREE_CODE (*tp) == MEM_REF)
1231	{
1232	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1233	     that can happen when a pointer argument is an ADDR_EXPR.
1234	     Recurse here manually to allow that.  */
1235	  tree ptr = TREE_OPERAND (*tp, 0);
1236	  tree type = remap_type (TREE_TYPE (*tp), id);
1237	  tree old = *tp;
1238	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1239	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1240	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1241	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1242	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1243	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1244	    {
1245	      MR_DEPENDENCE_CLIQUE (*tp)
1246		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1247	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1248	    }
1249	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1250	     remapped a parameter as the property might be valid only
1251	     for the parameter itself.  */
1252	  if (TREE_THIS_NOTRAP (old)
1253	      && (!is_parm (TREE_OPERAND (old, 0))
1254		  || (!id->transform_parameter && is_parm (ptr))))
1255	    TREE_THIS_NOTRAP (*tp) = 1;
1256	  *walk_subtrees = 0;
1257	  return NULL;
1258	}
1259
1260      /* Here is the "usual case".  Copy this tree node, and then
1261	 tweak some special cases.  */
1262      copy_tree_r (tp, walk_subtrees, NULL);
1263
1264      /* If EXPR has block defined, map it to newly constructed block.
1265         When inlining we want EXPRs without block appear in the block
1266	 of function call if we are not remapping a type.  */
1267      if (EXPR_P (*tp))
1268	{
1269	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1270	  if (TREE_BLOCK (*tp))
1271	    {
1272	      tree *n;
1273	      n = id->decl_map->get (TREE_BLOCK (*tp));
1274	      if (n)
1275		new_block = *n;
1276	    }
1277	  TREE_SET_BLOCK (*tp, new_block);
1278	}
1279
1280      if (TREE_CODE (*tp) != OMP_CLAUSE)
1281	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1282
1283      /* The copied TARGET_EXPR has never been expanded, even if the
1284	 original node was expanded already.  */
1285      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1286	{
1287	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1288	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1289	}
1290
1291      /* Variable substitution need not be simple.  In particular, the
1292	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1293	 and friends are up-to-date.  */
1294      else if (TREE_CODE (*tp) == ADDR_EXPR)
1295	{
1296	  int invariant = is_gimple_min_invariant (*tp);
1297	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1298
1299	  /* Handle the case where we substituted an INDIRECT_REF
1300	     into the operand of the ADDR_EXPR.  */
1301	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1302	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1303	  else
1304	    recompute_tree_invariant_for_addr_expr (*tp);
1305
1306	  /* If this used to be invariant, but is not any longer,
1307	     then regimplification is probably needed.  */
1308	  if (invariant && !is_gimple_min_invariant (*tp))
1309	    id->regimplify = true;
1310
1311	  *walk_subtrees = 0;
1312	}
1313    }
1314
1315  /* Keep iterating.  */
1316  return NULL_TREE;
1317}
1318
1319/* Helper for remap_gimple_stmt.  Given an EH region number for the
1320   source function, map that to the duplicate EH region number in
1321   the destination function.  */
1322
1323static int
1324remap_eh_region_nr (int old_nr, copy_body_data *id)
1325{
1326  eh_region old_r, new_r;
1327
1328  old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1329  new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1330
1331  return new_r->index;
1332}
1333
1334/* Similar, but operate on INTEGER_CSTs.  */
1335
1336static tree
1337remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1338{
1339  int old_nr, new_nr;
1340
1341  old_nr = tree_to_shwi (old_t_nr);
1342  new_nr = remap_eh_region_nr (old_nr, id);
1343
1344  return build_int_cst (integer_type_node, new_nr);
1345}
1346
1347/* Helper for copy_bb.  Remap statement STMT using the inlining
1348   information in ID.  Return the new statement copy.  */
1349
1350static gimple_seq
1351remap_gimple_stmt (gimple stmt, copy_body_data *id)
1352{
1353  gimple copy = NULL;
1354  struct walk_stmt_info wi;
1355  bool skip_first = false;
1356  gimple_seq stmts = NULL;
1357
1358  if (is_gimple_debug (stmt)
1359      && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1360    return stmts;
1361
1362  /* Begin by recognizing trees that we'll completely rewrite for the
1363     inlining context.  Our output for these trees is completely
1364     different from out input (e.g. RETURN_EXPR is deleted, and morphs
1365     into an edge).  Further down, we'll handle trees that get
1366     duplicated and/or tweaked.  */
1367
1368  /* When requested, GIMPLE_RETURNs should be transformed to just the
1369     contained GIMPLE_ASSIGN.  The branch semantics of the return will
1370     be handled elsewhere by manipulating the CFG rather than the
1371     statement.  */
1372  if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1373    {
1374      tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1375      tree retbnd = gimple_return_retbnd (stmt);
1376      tree bndslot = id->retbnd;
1377
1378      if (retbnd && bndslot)
1379	{
1380	  gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1381	  memset (&wi, 0, sizeof (wi));
1382	  wi.info = id;
1383	  walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1384	  gimple_seq_add_stmt (&stmts, bndcopy);
1385	}
1386
1387      /* If we're returning something, just turn that into an
1388	 assignment into the equivalent of the original RESULT_DECL.
1389	 If RETVAL is just the result decl, the result decl has
1390	 already been set (e.g. a recent "foo (&result_decl, ...)");
1391	 just toss the entire GIMPLE_RETURN.  */
1392      if (retval
1393	  && (TREE_CODE (retval) != RESULT_DECL
1394	      && (TREE_CODE (retval) != SSA_NAME
1395		  || ! SSA_NAME_VAR (retval)
1396		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1397        {
1398	  copy = gimple_build_assign (id->do_not_unshare
1399				      ? id->retvar : unshare_expr (id->retvar),
1400				      retval);
1401	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1402	  skip_first = true;
1403
1404	  /* We need to copy bounds if return structure with pointers into
1405	     instrumented function.  */
1406	  if (chkp_function_instrumented_p (id->dst_fn)
1407	      && !bndslot
1408	      && !BOUNDED_P (id->retvar)
1409	      && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1410	    id->assign_stmts.safe_push (copy);
1411
1412	}
1413      else
1414	return stmts;
1415    }
1416  else if (gimple_has_substatements (stmt))
1417    {
1418      gimple_seq s1, s2;
1419
1420      /* When cloning bodies from the C++ front end, we will be handed bodies
1421	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1422	 have embedded statements.  */
1423      switch (gimple_code (stmt))
1424	{
1425	case GIMPLE_BIND:
1426	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1427	  break;
1428
1429	case GIMPLE_CATCH:
1430	  {
1431	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1432	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1433	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1434	  }
1435	  break;
1436
1437	case GIMPLE_EH_FILTER:
1438	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1439	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1440	  break;
1441
1442	case GIMPLE_TRY:
1443	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1444	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1445	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1446	  break;
1447
1448	case GIMPLE_WITH_CLEANUP_EXPR:
1449	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1450	  copy = gimple_build_wce (s1);
1451	  break;
1452
1453	case GIMPLE_OMP_PARALLEL:
1454	  {
1455	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1456	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1457	    copy = gimple_build_omp_parallel
1458	             (s1,
1459		      gimple_omp_parallel_clauses (omp_par_stmt),
1460		      gimple_omp_parallel_child_fn (omp_par_stmt),
1461		      gimple_omp_parallel_data_arg (omp_par_stmt));
1462	  }
1463	  break;
1464
1465	case GIMPLE_OMP_TASK:
1466	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1467	  copy = gimple_build_omp_task
1468	           (s1,
1469		    gimple_omp_task_clauses (stmt),
1470		    gimple_omp_task_child_fn (stmt),
1471		    gimple_omp_task_data_arg (stmt),
1472		    gimple_omp_task_copy_fn (stmt),
1473		    gimple_omp_task_arg_size (stmt),
1474		    gimple_omp_task_arg_align (stmt));
1475	  break;
1476
1477	case GIMPLE_OMP_FOR:
1478	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1479	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1480	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1481				       gimple_omp_for_clauses (stmt),
1482				       gimple_omp_for_collapse (stmt), s2);
1483	  {
1484	    size_t i;
1485	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1486	      {
1487		gimple_omp_for_set_index (copy, i,
1488					  gimple_omp_for_index (stmt, i));
1489		gimple_omp_for_set_initial (copy, i,
1490					    gimple_omp_for_initial (stmt, i));
1491		gimple_omp_for_set_final (copy, i,
1492					  gimple_omp_for_final (stmt, i));
1493		gimple_omp_for_set_incr (copy, i,
1494					 gimple_omp_for_incr (stmt, i));
1495		gimple_omp_for_set_cond (copy, i,
1496					 gimple_omp_for_cond (stmt, i));
1497	      }
1498	  }
1499	  break;
1500
1501	case GIMPLE_OMP_MASTER:
1502	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1503	  copy = gimple_build_omp_master (s1);
1504	  break;
1505
1506	case GIMPLE_OMP_TASKGROUP:
1507	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1508	  copy = gimple_build_omp_taskgroup (s1);
1509	  break;
1510
1511	case GIMPLE_OMP_ORDERED:
1512	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1513	  copy = gimple_build_omp_ordered (s1);
1514	  break;
1515
1516	case GIMPLE_OMP_SECTION:
1517	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1518	  copy = gimple_build_omp_section (s1);
1519	  break;
1520
1521	case GIMPLE_OMP_SECTIONS:
1522	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1523	  copy = gimple_build_omp_sections
1524	           (s1, gimple_omp_sections_clauses (stmt));
1525	  break;
1526
1527	case GIMPLE_OMP_SINGLE:
1528	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1529	  copy = gimple_build_omp_single
1530	           (s1, gimple_omp_single_clauses (stmt));
1531	  break;
1532
1533	case GIMPLE_OMP_TARGET:
1534	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1535	  copy = gimple_build_omp_target
1536		   (s1, gimple_omp_target_kind (stmt),
1537		    gimple_omp_target_clauses (stmt));
1538	  break;
1539
1540	case GIMPLE_OMP_TEAMS:
1541	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1542	  copy = gimple_build_omp_teams
1543		   (s1, gimple_omp_teams_clauses (stmt));
1544	  break;
1545
1546	case GIMPLE_OMP_CRITICAL:
1547	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1548	  copy = gimple_build_omp_critical (s1,
1549					    gimple_omp_critical_name (
1550					      as_a <gomp_critical *> (stmt)));
1551	  break;
1552
1553	case GIMPLE_TRANSACTION:
1554	  {
1555	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1556	    gtransaction *new_trans_stmt;
1557	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1558				   id);
1559	    copy = new_trans_stmt
1560	      = gimple_build_transaction (
1561		  s1,
1562		  gimple_transaction_label (old_trans_stmt));
1563	    gimple_transaction_set_subcode (
1564              new_trans_stmt,
1565	      gimple_transaction_subcode (old_trans_stmt));
1566	  }
1567	  break;
1568
1569	default:
1570	  gcc_unreachable ();
1571	}
1572    }
1573  else
1574    {
1575      if (gimple_assign_copy_p (stmt)
1576	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1577	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1578	{
1579	  /* Here we handle statements that are not completely rewritten.
1580	     First we detect some inlining-induced bogosities for
1581	     discarding.  */
1582
1583	  /* Some assignments VAR = VAR; don't generate any rtl code
1584	     and thus don't count as variable modification.  Avoid
1585	     keeping bogosities like 0 = 0.  */
1586	  tree decl = gimple_assign_lhs (stmt), value;
1587	  tree *n;
1588
1589	  n = id->decl_map->get (decl);
1590	  if (n)
1591	    {
1592	      value = *n;
1593	      STRIP_TYPE_NOPS (value);
1594	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1595		return NULL;
1596	    }
1597	}
1598
1599      /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1600	 in a block that we aren't copying during tree_function_versioning,
1601	 just drop the clobber stmt.  */
1602      if (id->blocks_to_copy && gimple_clobber_p (stmt))
1603	{
1604	  tree lhs = gimple_assign_lhs (stmt);
1605	  if (TREE_CODE (lhs) == MEM_REF
1606	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1607	    {
1608	      gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1609	      if (gimple_bb (def_stmt)
1610		  && !bitmap_bit_p (id->blocks_to_copy,
1611				    gimple_bb (def_stmt)->index))
1612		return NULL;
1613	    }
1614	}
1615
1616      if (gimple_debug_bind_p (stmt))
1617	{
1618	  gdebug *copy
1619	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1620				       gimple_debug_bind_get_value (stmt),
1621				       stmt);
1622	  id->debug_stmts.safe_push (copy);
1623	  gimple_seq_add_stmt (&stmts, copy);
1624	  return stmts;
1625	}
1626      if (gimple_debug_source_bind_p (stmt))
1627	{
1628	  gdebug *copy = gimple_build_debug_source_bind
1629	                   (gimple_debug_source_bind_get_var (stmt),
1630			    gimple_debug_source_bind_get_value (stmt),
1631			    stmt);
1632	  id->debug_stmts.safe_push (copy);
1633	  gimple_seq_add_stmt (&stmts, copy);
1634	  return stmts;
1635	}
1636
1637      /* Create a new deep copy of the statement.  */
1638      copy = gimple_copy (stmt);
1639
1640      /* Clear flags that need revisiting.  */
1641      if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1642        {
1643	  if (gimple_call_tail_p (call_stmt))
1644	    gimple_call_set_tail (call_stmt, false);
1645	  if (gimple_call_from_thunk_p (call_stmt))
1646	    gimple_call_set_from_thunk (call_stmt, false);
1647	}
1648
1649      /* Remap the region numbers for __builtin_eh_{pointer,filter},
1650	 RESX and EH_DISPATCH.  */
1651      if (id->eh_map)
1652	switch (gimple_code (copy))
1653	  {
1654	  case GIMPLE_CALL:
1655	    {
1656	      tree r, fndecl = gimple_call_fndecl (copy);
1657	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1658		switch (DECL_FUNCTION_CODE (fndecl))
1659		  {
1660		  case BUILT_IN_EH_COPY_VALUES:
1661		    r = gimple_call_arg (copy, 1);
1662		    r = remap_eh_region_tree_nr (r, id);
1663		    gimple_call_set_arg (copy, 1, r);
1664		    /* FALLTHRU */
1665
1666		  case BUILT_IN_EH_POINTER:
1667		  case BUILT_IN_EH_FILTER:
1668		    r = gimple_call_arg (copy, 0);
1669		    r = remap_eh_region_tree_nr (r, id);
1670		    gimple_call_set_arg (copy, 0, r);
1671		    break;
1672
1673		  default:
1674		    break;
1675		  }
1676
1677	      /* Reset alias info if we didn't apply measures to
1678		 keep it valid over inlining by setting DECL_PT_UID.  */
1679	      if (!id->src_cfun->gimple_df
1680		  || !id->src_cfun->gimple_df->ipa_pta)
1681		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1682	    }
1683	    break;
1684
1685	  case GIMPLE_RESX:
1686	    {
1687	      gresx *resx_stmt = as_a <gresx *> (copy);
1688	      int r = gimple_resx_region (resx_stmt);
1689	      r = remap_eh_region_nr (r, id);
1690	      gimple_resx_set_region (resx_stmt, r);
1691	    }
1692	    break;
1693
1694	  case GIMPLE_EH_DISPATCH:
1695	    {
1696	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1697	      int r = gimple_eh_dispatch_region (eh_dispatch);
1698	      r = remap_eh_region_nr (r, id);
1699	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1700	    }
1701	    break;
1702
1703	  default:
1704	    break;
1705	  }
1706    }
1707
1708  /* If STMT has a block defined, map it to the newly constructed
1709     block.  */
1710  if (gimple_block (copy))
1711    {
1712      tree *n;
1713      n = id->decl_map->get (gimple_block (copy));
1714      gcc_assert (n);
1715      gimple_set_block (copy, *n);
1716    }
1717
1718  if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1719    {
1720      gimple_seq_add_stmt (&stmts, copy);
1721      return stmts;
1722    }
1723
1724  /* Remap all the operands in COPY.  */
1725  memset (&wi, 0, sizeof (wi));
1726  wi.info = id;
1727  if (skip_first)
1728    walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1729  else
1730    walk_gimple_op (copy, remap_gimple_op_r, &wi);
1731
1732  /* Clear the copied virtual operands.  We are not remapping them here
1733     but are going to recreate them from scratch.  */
1734  if (gimple_has_mem_ops (copy))
1735    {
1736      gimple_set_vdef (copy, NULL_TREE);
1737      gimple_set_vuse (copy, NULL_TREE);
1738    }
1739
1740  gimple_seq_add_stmt (&stmts, copy);
1741  return stmts;
1742}
1743
1744
1745/* Copy basic block, scale profile accordingly.  Edges will be taken care of
1746   later  */
1747
1748static basic_block
1749copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1750         gcov_type count_scale)
1751{
1752  gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1753  basic_block copy_basic_block;
1754  tree decl;
1755  gcov_type freq;
1756  basic_block prev;
1757
1758  /* Search for previous copied basic block.  */
1759  prev = bb->prev_bb;
1760  while (!prev->aux)
1761    prev = prev->prev_bb;
1762
1763  /* create_basic_block() will append every new block to
1764     basic_block_info automatically.  */
1765  copy_basic_block = create_basic_block (NULL, (void *) 0,
1766                                         (basic_block) prev->aux);
1767  copy_basic_block->count = apply_scale (bb->count, count_scale);
1768
1769  /* We are going to rebuild frequencies from scratch.  These values
1770     have just small importance to drive canonicalize_loop_headers.  */
1771  freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1772
1773  /* We recompute frequencies after inlining, so this is quite safe.  */
1774  if (freq > BB_FREQ_MAX)
1775    freq = BB_FREQ_MAX;
1776  copy_basic_block->frequency = freq;
1777
1778  copy_gsi = gsi_start_bb (copy_basic_block);
1779
1780  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1781    {
1782      gimple_seq stmts;
1783      gimple stmt = gsi_stmt (gsi);
1784      gimple orig_stmt = stmt;
1785      gimple_stmt_iterator stmts_gsi;
1786      bool stmt_added = false;
1787
1788      id->regimplify = false;
1789      stmts = remap_gimple_stmt (stmt, id);
1790
1791      if (gimple_seq_empty_p (stmts))
1792	continue;
1793
1794      seq_gsi = copy_gsi;
1795
1796      for (stmts_gsi = gsi_start (stmts);
1797	   !gsi_end_p (stmts_gsi); )
1798	{
1799	  stmt = gsi_stmt (stmts_gsi);
1800
1801	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1802	  gsi_next (&stmts_gsi);
1803
1804	  if (gimple_nop_p (stmt))
1805	      continue;
1806
1807	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1808					    orig_stmt);
1809
1810	  /* With return slot optimization we can end up with
1811	     non-gimple (foo *)&this->m, fix that here.  */
1812	  if (is_gimple_assign (stmt)
1813	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1814	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1815	    {
1816	      tree new_rhs;
1817	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1818						  gimple_assign_rhs1 (stmt),
1819						  true, NULL, false,
1820						  GSI_CONTINUE_LINKING);
1821	      gimple_assign_set_rhs1 (stmt, new_rhs);
1822	      id->regimplify = false;
1823	    }
1824
1825	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1826
1827	  if (id->regimplify)
1828	    gimple_regimplify_operands (stmt, &seq_gsi);
1829
1830	  stmt_added = true;
1831	}
1832
1833      if (!stmt_added)
1834	continue;
1835
1836      /* If copy_basic_block has been empty at the start of this iteration,
1837	 call gsi_start_bb again to get at the newly added statements.  */
1838      if (gsi_end_p (copy_gsi))
1839	copy_gsi = gsi_start_bb (copy_basic_block);
1840      else
1841	gsi_next (&copy_gsi);
1842
1843      /* Process the new statement.  The call to gimple_regimplify_operands
1844	 possibly turned the statement into multiple statements, we
1845	 need to process all of them.  */
1846      do
1847	{
1848	  tree fn;
1849	  gcall *call_stmt;
1850
1851	  stmt = gsi_stmt (copy_gsi);
1852	  call_stmt = dyn_cast <gcall *> (stmt);
1853	  if (call_stmt
1854	      && gimple_call_va_arg_pack_p (call_stmt)
1855	      && id->call_stmt)
1856	    {
1857	      /* __builtin_va_arg_pack () should be replaced by
1858		 all arguments corresponding to ... in the caller.  */
1859	      tree p;
1860	      gcall *new_call;
1861	      vec<tree> argarray;
1862	      size_t nargs = gimple_call_num_args (id->call_stmt);
1863	      size_t n, i, nargs_to_copy;
1864	      bool remove_bounds = false;
1865
1866	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1867		nargs--;
1868
1869	      /* Bounds should be removed from arg pack in case
1870		 we handle not instrumented call in instrumented
1871		 function.  */
1872	      nargs_to_copy = nargs;
1873	      if (gimple_call_with_bounds_p (id->call_stmt)
1874		  && !gimple_call_with_bounds_p (stmt))
1875		{
1876		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1877		       i < gimple_call_num_args (id->call_stmt);
1878		       i++)
1879		    if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1880		      nargs_to_copy--;
1881		  remove_bounds = true;
1882		}
1883
1884	      /* Create the new array of arguments.  */
1885	      n = nargs_to_copy + gimple_call_num_args (call_stmt);
1886	      argarray.create (n);
1887	      argarray.safe_grow_cleared (n);
1888
1889	      /* Copy all the arguments before '...'  */
1890	      memcpy (argarray.address (),
1891		      gimple_call_arg_ptr (call_stmt, 0),
1892		      gimple_call_num_args (call_stmt) * sizeof (tree));
1893
1894	      if (remove_bounds)
1895		{
1896		  /* Append the rest of arguments removing bounds.  */
1897		  unsigned cur = gimple_call_num_args (call_stmt);
1898		  i = gimple_call_num_args (id->call_stmt) - nargs;
1899		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1900		       i < gimple_call_num_args (id->call_stmt);
1901		       i++)
1902		    if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1903		      argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1904		  gcc_assert (cur == n);
1905		}
1906	      else
1907		{
1908		  /* Append the arguments passed in '...'  */
1909		  memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1910			  gimple_call_arg_ptr (id->call_stmt, 0)
1911			  + (gimple_call_num_args (id->call_stmt) - nargs),
1912			  nargs * sizeof (tree));
1913		}
1914
1915	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1916						argarray);
1917
1918	      argarray.release ();
1919
1920	      /* Copy all GIMPLE_CALL flags, location and block, except
1921		 GF_CALL_VA_ARG_PACK.  */
1922	      gimple_call_copy_flags (new_call, call_stmt);
1923	      gimple_call_set_va_arg_pack (new_call, false);
1924	      gimple_set_location (new_call, gimple_location (stmt));
1925	      gimple_set_block (new_call, gimple_block (stmt));
1926	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1927
1928	      gsi_replace (&copy_gsi, new_call, false);
1929	      stmt = new_call;
1930	    }
1931	  else if (call_stmt
1932		   && id->call_stmt
1933		   && (decl = gimple_call_fndecl (stmt))
1934		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1935		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1936	    {
1937	      /* __builtin_va_arg_pack_len () should be replaced by
1938		 the number of anonymous arguments.  */
1939	      size_t nargs = gimple_call_num_args (id->call_stmt), i;
1940	      tree count, p;
1941	      gimple new_stmt;
1942
1943	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1944		nargs--;
1945
1946	      /* For instrumented calls we should ignore bounds.  */
1947	      for (i = gimple_call_num_args (id->call_stmt) - nargs;
1948		   i < gimple_call_num_args (id->call_stmt);
1949		   i++)
1950		if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1951		  nargs--;
1952
1953	      count = build_int_cst (integer_type_node, nargs);
1954	      new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1955	      gsi_replace (&copy_gsi, new_stmt, false);
1956	      stmt = new_stmt;
1957	    }
1958	  else if (call_stmt
1959		   && id->call_stmt
1960		   && gimple_call_internal_p (stmt)
1961		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1962	    {
1963	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
1964	      gsi_remove (&copy_gsi, false);
1965	      continue;
1966	    }
1967
1968	  /* Statements produced by inlining can be unfolded, especially
1969	     when we constant propagated some operands.  We can't fold
1970	     them right now for two reasons:
1971	     1) folding require SSA_NAME_DEF_STMTs to be correct
1972	     2) we can't change function calls to builtins.
1973	     So we just mark statement for later folding.  We mark
1974	     all new statements, instead just statements that has changed
1975	     by some nontrivial substitution so even statements made
1976	     foldable indirectly are updated.  If this turns out to be
1977	     expensive, copy_body can be told to watch for nontrivial
1978	     changes.  */
1979	  if (id->statements_to_fold)
1980	    id->statements_to_fold->add (stmt);
1981
1982	  /* We're duplicating a CALL_EXPR.  Find any corresponding
1983	     callgraph edges and update or duplicate them.  */
1984	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1985	    {
1986	      struct cgraph_edge *edge;
1987
1988	      switch (id->transform_call_graph_edges)
1989		{
1990		case CB_CGE_DUPLICATE:
1991		  edge = id->src_node->get_edge (orig_stmt);
1992		  if (edge)
1993		    {
1994		      int edge_freq = edge->frequency;
1995		      int new_freq;
1996		      struct cgraph_edge *old_edge = edge;
1997		      edge = edge->clone (id->dst_node, call_stmt,
1998					  gimple_uid (stmt),
1999					  REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2000					  true);
2001		      /* We could also just rescale the frequency, but
2002		         doing so would introduce roundoff errors and make
2003			 verifier unhappy.  */
2004		      new_freq  = compute_call_stmt_bb_frequency (id->dst_node->decl,
2005								  copy_basic_block);
2006
2007		      /* Speculative calls consist of two edges - direct and indirect.
2008			 Duplicate the whole thing and distribute frequencies accordingly.  */
2009		      if (edge->speculative)
2010			{
2011			  struct cgraph_edge *direct, *indirect;
2012			  struct ipa_ref *ref;
2013
2014			  gcc_assert (!edge->indirect_unknown_callee);
2015			  old_edge->speculative_call_info (direct, indirect, ref);
2016			  indirect = indirect->clone (id->dst_node, call_stmt,
2017						      gimple_uid (stmt),
2018						      REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2019						      true);
2020			  if (old_edge->frequency + indirect->frequency)
2021			    {
2022			      edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2023						           (old_edge->frequency + indirect->frequency)),
2024						     CGRAPH_FREQ_MAX);
2025			      indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2026							       (old_edge->frequency + indirect->frequency)),
2027							 CGRAPH_FREQ_MAX);
2028			    }
2029			  id->dst_node->clone_reference (ref, stmt);
2030			}
2031		      else
2032			{
2033			  edge->frequency = new_freq;
2034			  if (dump_file
2035			      && profile_status_for_fn (cfun) != PROFILE_ABSENT
2036			      && (edge_freq > edge->frequency + 10
2037				  || edge_freq < edge->frequency - 10))
2038			    {
2039			      fprintf (dump_file, "Edge frequency estimated by "
2040				       "cgraph %i diverge from inliner's estimate %i\n",
2041				       edge_freq,
2042				       edge->frequency);
2043			      fprintf (dump_file,
2044				       "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2045				       bb->index,
2046				       bb->frequency,
2047				       copy_basic_block->frequency);
2048			    }
2049			}
2050		    }
2051		  break;
2052
2053		case CB_CGE_MOVE_CLONES:
2054		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2055								call_stmt);
2056		  edge = id->dst_node->get_edge (stmt);
2057		  break;
2058
2059		case CB_CGE_MOVE:
2060		  edge = id->dst_node->get_edge (orig_stmt);
2061		  if (edge)
2062		    edge->set_call_stmt (call_stmt);
2063		  break;
2064
2065		default:
2066		  gcc_unreachable ();
2067		}
2068
2069	      /* Constant propagation on argument done during inlining
2070		 may create new direct call.  Produce an edge for it.  */
2071	      if ((!edge
2072		   || (edge->indirect_inlining_edge
2073		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2074		  && id->dst_node->definition
2075		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2076		{
2077		  struct cgraph_node *dest = cgraph_node::get (fn);
2078
2079		  /* We have missing edge in the callgraph.  This can happen
2080		     when previous inlining turned an indirect call into a
2081		     direct call by constant propagating arguments or we are
2082		     producing dead clone (for further cloning).  In all
2083		     other cases we hit a bug (incorrect node sharing is the
2084		     most common reason for missing edges).  */
2085		  gcc_assert (!dest->definition
2086			      || dest->address_taken
2087		  	      || !id->src_node->definition
2088			      || !id->dst_node->definition);
2089		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2090		    id->dst_node->create_edge_including_clones
2091		      (dest, orig_stmt, call_stmt, bb->count,
2092		       compute_call_stmt_bb_frequency (id->dst_node->decl,
2093		       				       copy_basic_block),
2094		       CIF_ORIGINALLY_INDIRECT_CALL);
2095		  else
2096		    id->dst_node->create_edge (dest, call_stmt,
2097					bb->count,
2098					compute_call_stmt_bb_frequency
2099					  (id->dst_node->decl,
2100					   copy_basic_block))->inline_failed
2101		      = CIF_ORIGINALLY_INDIRECT_CALL;
2102		  if (dump_file)
2103		    {
2104		      fprintf (dump_file, "Created new direct edge to %s\n",
2105			       dest->name ());
2106		    }
2107		}
2108
2109	      notice_special_calls (as_a <gcall *> (stmt));
2110	    }
2111
2112	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2113				      id->eh_map, id->eh_lp_nr);
2114
2115	  if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2116	    {
2117	      ssa_op_iter i;
2118	      tree def;
2119
2120	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2121		if (TREE_CODE (def) == SSA_NAME)
2122		  SSA_NAME_DEF_STMT (def) = stmt;
2123	    }
2124
2125	  gsi_next (&copy_gsi);
2126	}
2127      while (!gsi_end_p (copy_gsi));
2128
2129      copy_gsi = gsi_last_bb (copy_basic_block);
2130    }
2131
2132  return copy_basic_block;
2133}
2134
2135/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2136   form is quite easy, since dominator relationship for old basic blocks does
2137   not change.
2138
2139   There is however exception where inlining might change dominator relation
2140   across EH edges from basic block within inlined functions destinating
2141   to landing pads in function we inline into.
2142
2143   The function fills in PHI_RESULTs of such PHI nodes if they refer
2144   to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2145   PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2146   EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2147   set, and this means that there will be no overlapping live ranges
2148   for the underlying symbol.
2149
2150   This might change in future if we allow redirecting of EH edges and
2151   we might want to change way build CFG pre-inlining to include
2152   all the possible edges then.  */
2153static void
2154update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2155				  bool can_throw, bool nonlocal_goto)
2156{
2157  edge e;
2158  edge_iterator ei;
2159
2160  FOR_EACH_EDGE (e, ei, bb->succs)
2161    if (!e->dest->aux
2162	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2163      {
2164	gphi *phi;
2165	gphi_iterator si;
2166
2167	if (!nonlocal_goto)
2168	  gcc_assert (e->flags & EDGE_EH);
2169
2170	if (!can_throw)
2171	  gcc_assert (!(e->flags & EDGE_EH));
2172
2173	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2174	  {
2175	    edge re;
2176
2177	    phi = si.phi ();
2178
2179	    /* For abnormal goto/call edges the receiver can be the
2180	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2181
2182	    gcc_assert ((e->flags & EDGE_EH)
2183			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2184
2185	    re = find_edge (ret_bb, e->dest);
2186	    gcc_checking_assert (re);
2187	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2188			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2189
2190	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2191		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2192	  }
2193      }
2194}
2195
2196
2197/* Copy edges from BB into its copy constructed earlier, scale profile
2198   accordingly.  Edges will be taken care of later.  Assume aux
2199   pointers to point to the copies of each BB.  Return true if any
2200   debug stmts are left after a statement that must end the basic block.  */
2201
2202static bool
2203copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2204		   basic_block abnormal_goto_dest)
2205{
2206  basic_block new_bb = (basic_block) bb->aux;
2207  edge_iterator ei;
2208  edge old_edge;
2209  gimple_stmt_iterator si;
2210  int flags;
2211  bool need_debug_cleanup = false;
2212
2213  /* Use the indices from the original blocks to create edges for the
2214     new ones.  */
2215  FOR_EACH_EDGE (old_edge, ei, bb->succs)
2216    if (!(old_edge->flags & EDGE_EH))
2217      {
2218	edge new_edge;
2219
2220	flags = old_edge->flags;
2221
2222	/* Return edges do get a FALLTHRU flag when the get inlined.  */
2223	if (old_edge->dest->index == EXIT_BLOCK
2224	    && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2225	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2226	  flags |= EDGE_FALLTHRU;
2227	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2228	new_edge->count = apply_scale (old_edge->count, count_scale);
2229	new_edge->probability = old_edge->probability;
2230      }
2231
2232  if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2233    return false;
2234
2235  for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2236    {
2237      gimple copy_stmt;
2238      bool can_throw, nonlocal_goto;
2239
2240      copy_stmt = gsi_stmt (si);
2241      if (!is_gimple_debug (copy_stmt))
2242	update_stmt (copy_stmt);
2243
2244      /* Do this before the possible split_block.  */
2245      gsi_next (&si);
2246
2247      /* If this tree could throw an exception, there are two
2248         cases where we need to add abnormal edge(s): the
2249         tree wasn't in a region and there is a "current
2250         region" in the caller; or the original tree had
2251         EH edges.  In both cases split the block after the tree,
2252         and add abnormal edge(s) as needed; we need both
2253         those from the callee and the caller.
2254         We check whether the copy can throw, because the const
2255         propagation can change an INDIRECT_REF which throws
2256         into a COMPONENT_REF which doesn't.  If the copy
2257         can throw, the original could also throw.  */
2258      can_throw = stmt_can_throw_internal (copy_stmt);
2259      nonlocal_goto
2260	= (stmt_can_make_abnormal_goto (copy_stmt)
2261	   && !computed_goto_p (copy_stmt));
2262
2263      if (can_throw || nonlocal_goto)
2264	{
2265	  if (!gsi_end_p (si))
2266	    {
2267	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2268		gsi_next (&si);
2269	      if (gsi_end_p (si))
2270		need_debug_cleanup = true;
2271	    }
2272	  if (!gsi_end_p (si))
2273	    /* Note that bb's predecessor edges aren't necessarily
2274	       right at this point; split_block doesn't care.  */
2275	    {
2276	      edge e = split_block (new_bb, copy_stmt);
2277
2278	      new_bb = e->dest;
2279	      new_bb->aux = e->src->aux;
2280	      si = gsi_start_bb (new_bb);
2281	    }
2282	}
2283
2284      if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2285	make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2286      else if (can_throw)
2287	make_eh_edges (copy_stmt);
2288
2289      /* If the call we inline cannot make abnormal goto do not add
2290         additional abnormal edges but only retain those already present
2291	 in the original function body.  */
2292      if (abnormal_goto_dest == NULL)
2293	nonlocal_goto = false;
2294      if (nonlocal_goto)
2295	{
2296	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2297
2298	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2299	    nonlocal_goto = false;
2300	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2301	     in OpenMP regions which aren't allowed to be left abnormally.
2302	     So, no need to add abnormal edge in that case.  */
2303	  else if (is_gimple_call (copy_stmt)
2304		   && gimple_call_internal_p (copy_stmt)
2305		   && (gimple_call_internal_fn (copy_stmt)
2306		       == IFN_ABNORMAL_DISPATCHER)
2307		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2308	    nonlocal_goto = false;
2309	  else
2310	    make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2311	}
2312
2313      if ((can_throw || nonlocal_goto)
2314	  && gimple_in_ssa_p (cfun))
2315	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2316					  can_throw, nonlocal_goto);
2317    }
2318  return need_debug_cleanup;
2319}
2320
2321/* Copy the PHIs.  All blocks and edges are copied, some blocks
2322   was possibly split and new outgoing EH edges inserted.
2323   BB points to the block of original function and AUX pointers links
2324   the original and newly copied blocks.  */
2325
2326static void
2327copy_phis_for_bb (basic_block bb, copy_body_data *id)
2328{
2329  basic_block const new_bb = (basic_block) bb->aux;
2330  edge_iterator ei;
2331  gphi *phi;
2332  gphi_iterator si;
2333  edge new_edge;
2334  bool inserted = false;
2335
2336  for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2337    {
2338      tree res, new_res;
2339      gphi *new_phi;
2340
2341      phi = si.phi ();
2342      res = PHI_RESULT (phi);
2343      new_res = res;
2344      if (!virtual_operand_p (res))
2345	{
2346	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2347	  new_phi = create_phi_node (new_res, new_bb);
2348	  FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2349	    {
2350	      edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2351	      tree arg;
2352	      tree new_arg;
2353	      edge_iterator ei2;
2354	      location_t locus;
2355
2356	      /* When doing partial cloning, we allow PHIs on the entry block
2357		 as long as all the arguments are the same.  Find any input
2358		 edge to see argument to copy.  */
2359	      if (!old_edge)
2360		FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2361		  if (!old_edge->src->aux)
2362		    break;
2363
2364	      arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2365	      new_arg = arg;
2366	      walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2367	      gcc_assert (new_arg);
2368	      /* With return slot optimization we can end up with
2369	         non-gimple (foo *)&this->m, fix that here.  */
2370	      if (TREE_CODE (new_arg) != SSA_NAME
2371		  && TREE_CODE (new_arg) != FUNCTION_DECL
2372		  && !is_gimple_val (new_arg))
2373		{
2374		  gimple_seq stmts = NULL;
2375		  new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2376		  gsi_insert_seq_on_edge (new_edge, stmts);
2377		  inserted = true;
2378		}
2379	      locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2380	      if (LOCATION_BLOCK (locus))
2381		{
2382		  tree *n;
2383		  n = id->decl_map->get (LOCATION_BLOCK (locus));
2384		  gcc_assert (n);
2385		  if (*n)
2386		    locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2387		  else
2388		    locus = LOCATION_LOCUS (locus);
2389		}
2390	      else
2391		locus = LOCATION_LOCUS (locus);
2392
2393	      add_phi_arg (new_phi, new_arg, new_edge, locus);
2394	    }
2395	}
2396    }
2397
2398  /* Commit the delayed edge insertions.  */
2399  if (inserted)
2400    FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2401      gsi_commit_one_edge_insert (new_edge, NULL);
2402}
2403
2404
2405/* Wrapper for remap_decl so it can be used as a callback.  */
2406
2407static tree
2408remap_decl_1 (tree decl, void *data)
2409{
2410  return remap_decl (decl, (copy_body_data *) data);
2411}
2412
2413/* Build struct function and associated datastructures for the new clone
2414   NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2415   the cfun to the function of new_fndecl (and current_function_decl too).  */
2416
2417static void
2418initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2419{
2420  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2421  gcov_type count_scale;
2422
2423  if (!DECL_ARGUMENTS (new_fndecl))
2424    DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2425  if (!DECL_RESULT (new_fndecl))
2426    DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2427
2428  if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2429    count_scale
2430        = GCOV_COMPUTE_SCALE (count,
2431                              ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2432  else
2433    count_scale = REG_BR_PROB_BASE;
2434
2435  /* Register specific tree functions.  */
2436  gimple_register_cfg_hooks ();
2437
2438  /* Get clean struct function.  */
2439  push_struct_function (new_fndecl);
2440
2441  /* We will rebuild these, so just sanity check that they are empty.  */
2442  gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2443  gcc_assert (cfun->local_decls == NULL);
2444  gcc_assert (cfun->cfg == NULL);
2445  gcc_assert (cfun->decl == new_fndecl);
2446
2447  /* Copy items we preserve during cloning.  */
2448  cfun->static_chain_decl = src_cfun->static_chain_decl;
2449  cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2450  cfun->function_end_locus = src_cfun->function_end_locus;
2451  cfun->curr_properties = src_cfun->curr_properties;
2452  cfun->last_verified = src_cfun->last_verified;
2453  cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2454  cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2455  cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2456  cfun->stdarg = src_cfun->stdarg;
2457  cfun->after_inlining = src_cfun->after_inlining;
2458  cfun->can_throw_non_call_exceptions
2459    = src_cfun->can_throw_non_call_exceptions;
2460  cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2461  cfun->returns_struct = src_cfun->returns_struct;
2462  cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2463
2464  init_empty_tree_cfg ();
2465
2466  profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2467  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2468    (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2469     REG_BR_PROB_BASE);
2470  ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2471    = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2472  EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2473    (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2474     REG_BR_PROB_BASE);
2475  EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2476    EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2477  if (src_cfun->eh)
2478    init_eh_for_function ();
2479
2480  if (src_cfun->gimple_df)
2481    {
2482      init_tree_ssa (cfun);
2483      cfun->gimple_df->in_ssa_p = true;
2484      init_ssa_operands (cfun);
2485    }
2486}
2487
2488/* Helper function for copy_cfg_body.  Move debug stmts from the end
2489   of NEW_BB to the beginning of successor basic blocks when needed.  If the
2490   successor has multiple predecessors, reset them, otherwise keep
2491   their value.  */
2492
2493static void
2494maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2495{
2496  edge e;
2497  edge_iterator ei;
2498  gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2499
2500  if (gsi_end_p (si)
2501      || gsi_one_before_end_p (si)
2502      || !(stmt_can_throw_internal (gsi_stmt (si))
2503	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2504    return;
2505
2506  FOR_EACH_EDGE (e, ei, new_bb->succs)
2507    {
2508      gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2509      gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2510      while (is_gimple_debug (gsi_stmt (ssi)))
2511	{
2512	  gimple stmt = gsi_stmt (ssi);
2513	  gdebug *new_stmt;
2514	  tree var;
2515	  tree value;
2516
2517	  /* For the last edge move the debug stmts instead of copying
2518	     them.  */
2519	  if (ei_one_before_end_p (ei))
2520	    {
2521	      si = ssi;
2522	      gsi_prev (&ssi);
2523	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2524		gimple_debug_bind_reset_value (stmt);
2525	      gsi_remove (&si, false);
2526	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2527	      continue;
2528	    }
2529
2530	  if (gimple_debug_bind_p (stmt))
2531	    {
2532	      var = gimple_debug_bind_get_var (stmt);
2533	      if (single_pred_p (e->dest))
2534		{
2535		  value = gimple_debug_bind_get_value (stmt);
2536		  value = unshare_expr (value);
2537		}
2538	      else
2539		value = NULL_TREE;
2540	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2541	    }
2542	  else if (gimple_debug_source_bind_p (stmt))
2543	    {
2544	      var = gimple_debug_source_bind_get_var (stmt);
2545	      value = gimple_debug_source_bind_get_value (stmt);
2546	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2547	    }
2548	  else
2549	    gcc_unreachable ();
2550	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2551	  id->debug_stmts.safe_push (new_stmt);
2552	  gsi_prev (&ssi);
2553	}
2554    }
2555}
2556
2557/* Make a copy of the sub-loops of SRC_PARENT and place them
2558   as siblings of DEST_PARENT.  */
2559
2560static void
2561copy_loops (copy_body_data *id,
2562	    struct loop *dest_parent, struct loop *src_parent)
2563{
2564  struct loop *src_loop = src_parent->inner;
2565  while (src_loop)
2566    {
2567      if (!id->blocks_to_copy
2568	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2569	{
2570	  struct loop *dest_loop = alloc_loop ();
2571
2572	  /* Assign the new loop its header and latch and associate
2573	     those with the new loop.  */
2574	  dest_loop->header = (basic_block)src_loop->header->aux;
2575	  dest_loop->header->loop_father = dest_loop;
2576	  if (src_loop->latch != NULL)
2577	    {
2578	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2579	      dest_loop->latch->loop_father = dest_loop;
2580	    }
2581
2582	  /* Copy loop meta-data.  */
2583	  copy_loop_info (src_loop, dest_loop);
2584
2585	  /* Finally place it into the loop array and the loop tree.  */
2586	  place_new_loop (cfun, dest_loop);
2587	  flow_loop_tree_node_add (dest_parent, dest_loop);
2588
2589	  dest_loop->safelen = src_loop->safelen;
2590	  dest_loop->dont_vectorize = src_loop->dont_vectorize;
2591	  if (src_loop->force_vectorize)
2592	    {
2593	      dest_loop->force_vectorize = true;
2594	      cfun->has_force_vectorize_loops = true;
2595	    }
2596	  if (src_loop->simduid)
2597	    {
2598	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2599	      cfun->has_simduid_loops = true;
2600	    }
2601
2602	  /* Recurse.  */
2603	  copy_loops (id, dest_loop, src_loop);
2604	}
2605      src_loop = src_loop->next;
2606    }
2607}
2608
2609/* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2610
2611void
2612redirect_all_calls (copy_body_data * id, basic_block bb)
2613{
2614  gimple_stmt_iterator si;
2615  gimple last = last_stmt (bb);
2616  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2617    {
2618      gimple stmt = gsi_stmt (si);
2619      if (is_gimple_call (stmt))
2620	{
2621	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2622	  if (edge)
2623	    {
2624	      edge->redirect_call_stmt_to_callee ();
2625	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2626		gimple_purge_dead_eh_edges (bb);
2627	    }
2628	}
2629    }
2630}
2631
2632/* Convert estimated frequencies into counts for NODE, scaling COUNT
2633   with each bb's frequency. Used when NODE has a 0-weight entry
2634   but we are about to inline it into a non-zero count call bb.
2635   See the comments for handle_missing_profiles() in predict.c for
2636   when this can happen for COMDATs.  */
2637
2638void
2639freqs_to_counts (struct cgraph_node *node, gcov_type count)
2640{
2641  basic_block bb;
2642  edge_iterator ei;
2643  edge e;
2644  struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2645
2646  FOR_ALL_BB_FN(bb, fn)
2647    {
2648      bb->count = apply_scale (count,
2649                               GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2650      FOR_EACH_EDGE (e, ei, bb->succs)
2651        e->count = apply_probability (e->src->count, e->probability);
2652    }
2653}
2654
2655/* Make a copy of the body of FN so that it can be inserted inline in
2656   another function.  Walks FN via CFG, returns new fndecl.  */
2657
2658static tree
2659copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2660	       basic_block entry_block_map, basic_block exit_block_map,
2661	       basic_block new_entry)
2662{
2663  tree callee_fndecl = id->src_fn;
2664  /* Original cfun for the callee, doesn't change.  */
2665  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2666  struct function *cfun_to_copy;
2667  basic_block bb;
2668  tree new_fndecl = NULL;
2669  bool need_debug_cleanup = false;
2670  gcov_type count_scale;
2671  int last;
2672  int incoming_frequency = 0;
2673  gcov_type incoming_count = 0;
2674
2675  /* This can happen for COMDAT routines that end up with 0 counts
2676     despite being called (see the comments for handle_missing_profiles()
2677     in predict.c as to why). Apply counts to the blocks in the callee
2678     before inlining, using the guessed edge frequencies, so that we don't
2679     end up with a 0-count inline body which can confuse downstream
2680     optimizations such as function splitting.  */
2681  if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2682    {
2683      /* Apply the larger of the call bb count and the total incoming
2684         call edge count to the callee.  */
2685      gcov_type in_count = 0;
2686      struct cgraph_edge *in_edge;
2687      for (in_edge = id->src_node->callers; in_edge;
2688           in_edge = in_edge->next_caller)
2689        in_count += in_edge->count;
2690      freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2691    }
2692
2693  if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2694    count_scale
2695        = GCOV_COMPUTE_SCALE (count,
2696                              ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2697  else
2698    count_scale = REG_BR_PROB_BASE;
2699
2700  /* Register specific tree functions.  */
2701  gimple_register_cfg_hooks ();
2702
2703  /* If we are inlining just region of the function, make sure to connect
2704     new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2705     part of loop, we must compute frequency and probability of
2706     ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2707     probabilities of edges incoming from nonduplicated region.  */
2708  if (new_entry)
2709    {
2710      edge e;
2711      edge_iterator ei;
2712
2713      FOR_EACH_EDGE (e, ei, new_entry->preds)
2714	if (!e->src->aux)
2715	  {
2716	    incoming_frequency += EDGE_FREQUENCY (e);
2717	    incoming_count += e->count;
2718	  }
2719      incoming_count = apply_scale (incoming_count, count_scale);
2720      incoming_frequency
2721	= apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2722      ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2723      ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2724    }
2725
2726  /* Must have a CFG here at this point.  */
2727  gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2728	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2729
2730  cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2731
2732  ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2733  EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2734  entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2735  exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2736
2737  /* Duplicate any exception-handling regions.  */
2738  if (cfun->eh)
2739    id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2740				       remap_decl_1, id);
2741
2742  /* Use aux pointers to map the original blocks to copy.  */
2743  FOR_EACH_BB_FN (bb, cfun_to_copy)
2744    if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2745      {
2746	basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2747	bb->aux = new_bb;
2748	new_bb->aux = bb;
2749	new_bb->loop_father = entry_block_map->loop_father;
2750      }
2751
2752  last = last_basic_block_for_fn (cfun);
2753
2754  /* Now that we've duplicated the blocks, duplicate their edges.  */
2755  basic_block abnormal_goto_dest = NULL;
2756  if (id->call_stmt
2757      && stmt_can_make_abnormal_goto (id->call_stmt))
2758    {
2759      gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2760
2761      bb = gimple_bb (id->call_stmt);
2762      gsi_next (&gsi);
2763      if (gsi_end_p (gsi))
2764	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2765    }
2766  FOR_ALL_BB_FN (bb, cfun_to_copy)
2767    if (!id->blocks_to_copy
2768	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2769      need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2770					       abnormal_goto_dest);
2771
2772  if (new_entry)
2773    {
2774      edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2775      e->probability = REG_BR_PROB_BASE;
2776      e->count = incoming_count;
2777    }
2778
2779  /* Duplicate the loop tree, if available and wanted.  */
2780  if (loops_for_fn (src_cfun) != NULL
2781      && current_loops != NULL)
2782    {
2783      copy_loops (id, entry_block_map->loop_father,
2784		  get_loop (src_cfun, 0));
2785      /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2786      loops_state_set (LOOPS_NEED_FIXUP);
2787    }
2788
2789  /* If the loop tree in the source function needed fixup, mark the
2790     destination loop tree for fixup, too.  */
2791  if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2792    loops_state_set (LOOPS_NEED_FIXUP);
2793
2794  if (gimple_in_ssa_p (cfun))
2795    FOR_ALL_BB_FN (bb, cfun_to_copy)
2796      if (!id->blocks_to_copy
2797	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2798	copy_phis_for_bb (bb, id);
2799
2800  FOR_ALL_BB_FN (bb, cfun_to_copy)
2801    if (bb->aux)
2802      {
2803	if (need_debug_cleanup
2804	    && bb->index != ENTRY_BLOCK
2805	    && bb->index != EXIT_BLOCK)
2806	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2807	/* Update call edge destinations.  This can not be done before loop
2808	   info is updated, because we may split basic blocks.  */
2809	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2810	    && bb->index != ENTRY_BLOCK
2811	    && bb->index != EXIT_BLOCK)
2812	  redirect_all_calls (id, (basic_block)bb->aux);
2813	((basic_block)bb->aux)->aux = NULL;
2814	bb->aux = NULL;
2815      }
2816
2817  /* Zero out AUX fields of newly created block during EH edge
2818     insertion. */
2819  for (; last < last_basic_block_for_fn (cfun); last++)
2820    {
2821      if (need_debug_cleanup)
2822	maybe_move_debug_stmts_to_successors (id,
2823					      BASIC_BLOCK_FOR_FN (cfun, last));
2824      BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2825      /* Update call edge destinations.  This can not be done before loop
2826	 info is updated, because we may split basic blocks.  */
2827      if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2828	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2829    }
2830  entry_block_map->aux = NULL;
2831  exit_block_map->aux = NULL;
2832
2833  if (id->eh_map)
2834    {
2835      delete id->eh_map;
2836      id->eh_map = NULL;
2837    }
2838  if (id->dependence_map)
2839    {
2840      delete id->dependence_map;
2841      id->dependence_map = NULL;
2842    }
2843
2844  return new_fndecl;
2845}
2846
2847/* Copy the debug STMT using ID.  We deal with these statements in a
2848   special way: if any variable in their VALUE expression wasn't
2849   remapped yet, we won't remap it, because that would get decl uids
2850   out of sync, causing codegen differences between -g and -g0.  If
2851   this arises, we drop the VALUE expression altogether.  */
2852
2853static void
2854copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2855{
2856  tree t, *n;
2857  struct walk_stmt_info wi;
2858
2859  if (gimple_block (stmt))
2860    {
2861      n = id->decl_map->get (gimple_block (stmt));
2862      gimple_set_block (stmt, n ? *n : id->block);
2863    }
2864
2865  /* Remap all the operands in COPY.  */
2866  memset (&wi, 0, sizeof (wi));
2867  wi.info = id;
2868
2869  processing_debug_stmt = 1;
2870
2871  if (gimple_debug_source_bind_p (stmt))
2872    t = gimple_debug_source_bind_get_var (stmt);
2873  else
2874    t = gimple_debug_bind_get_var (stmt);
2875
2876  if (TREE_CODE (t) == PARM_DECL && id->debug_map
2877      && (n = id->debug_map->get (t)))
2878    {
2879      gcc_assert (TREE_CODE (*n) == VAR_DECL);
2880      t = *n;
2881    }
2882  else if (TREE_CODE (t) == VAR_DECL
2883	   && !is_global_var (t)
2884	   && !id->decl_map->get (t))
2885    /* T is a non-localized variable.  */;
2886  else
2887    walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2888
2889  if (gimple_debug_bind_p (stmt))
2890    {
2891      gimple_debug_bind_set_var (stmt, t);
2892
2893      if (gimple_debug_bind_has_value_p (stmt))
2894	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2895		   remap_gimple_op_r, &wi, NULL);
2896
2897      /* Punt if any decl couldn't be remapped.  */
2898      if (processing_debug_stmt < 0)
2899	gimple_debug_bind_reset_value (stmt);
2900    }
2901  else if (gimple_debug_source_bind_p (stmt))
2902    {
2903      gimple_debug_source_bind_set_var (stmt, t);
2904      /* When inlining and source bind refers to one of the optimized
2905	 away parameters, change the source bind into normal debug bind
2906	 referring to the corresponding DEBUG_EXPR_DECL that should have
2907	 been bound before the call stmt.  */
2908      t = gimple_debug_source_bind_get_value (stmt);
2909      if (t != NULL_TREE
2910	  && TREE_CODE (t) == PARM_DECL
2911	  && id->call_stmt)
2912	{
2913	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2914	  unsigned int i;
2915	  if (debug_args != NULL)
2916	    {
2917	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2918		if ((**debug_args)[i] == DECL_ORIGIN (t)
2919		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2920		  {
2921		    t = (**debug_args)[i + 1];
2922		    stmt->subcode = GIMPLE_DEBUG_BIND;
2923		    gimple_debug_bind_set_value (stmt, t);
2924		    break;
2925		  }
2926	    }
2927	}
2928      if (gimple_debug_source_bind_p (stmt))
2929	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2930		   remap_gimple_op_r, &wi, NULL);
2931    }
2932
2933  processing_debug_stmt = 0;
2934
2935  update_stmt (stmt);
2936}
2937
2938/* Process deferred debug stmts.  In order to give values better odds
2939   of being successfully remapped, we delay the processing of debug
2940   stmts until all other stmts that might require remapping are
2941   processed.  */
2942
2943static void
2944copy_debug_stmts (copy_body_data *id)
2945{
2946  size_t i;
2947  gdebug *stmt;
2948
2949  if (!id->debug_stmts.exists ())
2950    return;
2951
2952  FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2953    copy_debug_stmt (stmt, id);
2954
2955  id->debug_stmts.release ();
2956}
2957
2958/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2959   another function.  */
2960
2961static tree
2962copy_tree_body (copy_body_data *id)
2963{
2964  tree fndecl = id->src_fn;
2965  tree body = DECL_SAVED_TREE (fndecl);
2966
2967  walk_tree (&body, copy_tree_body_r, id, NULL);
2968
2969  return body;
2970}
2971
2972/* Make a copy of the body of FN so that it can be inserted inline in
2973   another function.  */
2974
2975static tree
2976copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2977	   basic_block entry_block_map, basic_block exit_block_map,
2978	   basic_block new_entry)
2979{
2980  tree fndecl = id->src_fn;
2981  tree body;
2982
2983  /* If this body has a CFG, walk CFG and copy.  */
2984  gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2985  body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2986			new_entry);
2987  copy_debug_stmts (id);
2988
2989  return body;
2990}
2991
2992/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2993   defined in function FN, or of a data member thereof.  */
2994
2995static bool
2996self_inlining_addr_expr (tree value, tree fn)
2997{
2998  tree var;
2999
3000  if (TREE_CODE (value) != ADDR_EXPR)
3001    return false;
3002
3003  var = get_base_address (TREE_OPERAND (value, 0));
3004
3005  return var && auto_var_in_fn_p (var, fn);
3006}
3007
3008/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3009   lexical block and line number information from base_stmt, if given,
3010   or from the last stmt of the block otherwise.  */
3011
3012static gimple
3013insert_init_debug_bind (copy_body_data *id,
3014			basic_block bb, tree var, tree value,
3015			gimple base_stmt)
3016{
3017  gimple note;
3018  gimple_stmt_iterator gsi;
3019  tree tracked_var;
3020
3021  if (!gimple_in_ssa_p (id->src_cfun))
3022    return NULL;
3023
3024  if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3025    return NULL;
3026
3027  tracked_var = target_for_debug_bind (var);
3028  if (!tracked_var)
3029    return NULL;
3030
3031  if (bb)
3032    {
3033      gsi = gsi_last_bb (bb);
3034      if (!base_stmt && !gsi_end_p (gsi))
3035	base_stmt = gsi_stmt (gsi);
3036    }
3037
3038  note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3039
3040  if (bb)
3041    {
3042      if (!gsi_end_p (gsi))
3043	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3044      else
3045	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3046    }
3047
3048  return note;
3049}
3050
3051static void
3052insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
3053{
3054  /* If VAR represents a zero-sized variable, it's possible that the
3055     assignment statement may result in no gimple statements.  */
3056  if (init_stmt)
3057    {
3058      gimple_stmt_iterator si = gsi_last_bb (bb);
3059
3060      /* We can end up with init statements that store to a non-register
3061         from a rhs with a conversion.  Handle that here by forcing the
3062	 rhs into a temporary.  gimple_regimplify_operands is not
3063	 prepared to do this for us.  */
3064      if (!is_gimple_debug (init_stmt)
3065	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3066	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3067	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3068	{
3069	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3070			     gimple_expr_type (init_stmt),
3071			     gimple_assign_rhs1 (init_stmt));
3072	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3073					  GSI_NEW_STMT);
3074	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3075	  gimple_assign_set_rhs1 (init_stmt, rhs);
3076	}
3077      gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3078      gimple_regimplify_operands (init_stmt, &si);
3079
3080      if (!is_gimple_debug (init_stmt))
3081	{
3082	  tree def = gimple_assign_lhs (init_stmt);
3083	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3084	}
3085    }
3086}
3087
3088/* Initialize parameter P with VALUE.  If needed, produce init statement
3089   at the end of BB.  When BB is NULL, we return init statement to be
3090   output later.  */
3091static gimple
3092setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3093		     basic_block bb, tree *vars)
3094{
3095  gimple init_stmt = NULL;
3096  tree var;
3097  tree rhs = value;
3098  tree def = (gimple_in_ssa_p (cfun)
3099	      ? ssa_default_def (id->src_cfun, p) : NULL);
3100
3101  if (value
3102      && value != error_mark_node
3103      && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3104    {
3105      /* If we can match up types by promotion/demotion do so.  */
3106      if (fold_convertible_p (TREE_TYPE (p), value))
3107	rhs = fold_convert (TREE_TYPE (p), value);
3108      else
3109	{
3110	  /* ???  For valid programs we should not end up here.
3111	     Still if we end up with truly mismatched types here, fall back
3112	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3113	     GIMPLE to the following passes.  */
3114	  if (!is_gimple_reg_type (TREE_TYPE (value))
3115	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3116	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3117	  else
3118	    rhs = build_zero_cst (TREE_TYPE (p));
3119	}
3120    }
3121
3122  /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3123     here since the type of this decl must be visible to the calling
3124     function.  */
3125  var = copy_decl_to_var (p, id);
3126
3127  /* Declare this new variable.  */
3128  DECL_CHAIN (var) = *vars;
3129  *vars = var;
3130
3131  /* Make gimplifier happy about this variable.  */
3132  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3133
3134  /* If the parameter is never assigned to, has no SSA_NAMEs created,
3135     we would not need to create a new variable here at all, if it
3136     weren't for debug info.  Still, we can just use the argument
3137     value.  */
3138  if (TREE_READONLY (p)
3139      && !TREE_ADDRESSABLE (p)
3140      && value && !TREE_SIDE_EFFECTS (value)
3141      && !def)
3142    {
3143      /* We may produce non-gimple trees by adding NOPs or introduce
3144	 invalid sharing when operand is not really constant.
3145	 It is not big deal to prohibit constant propagation here as
3146	 we will constant propagate in DOM1 pass anyway.  */
3147      if (is_gimple_min_invariant (value)
3148	  && useless_type_conversion_p (TREE_TYPE (p),
3149						 TREE_TYPE (value))
3150	  /* We have to be very careful about ADDR_EXPR.  Make sure
3151	     the base variable isn't a local variable of the inlined
3152	     function, e.g., when doing recursive inlining, direct or
3153	     mutually-recursive or whatever, which is why we don't
3154	     just test whether fn == current_function_decl.  */
3155	  && ! self_inlining_addr_expr (value, fn))
3156	{
3157	  insert_decl_map (id, p, value);
3158	  insert_debug_decl_map (id, p, var);
3159	  return insert_init_debug_bind (id, bb, var, value, NULL);
3160	}
3161    }
3162
3163  /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3164     that way, when the PARM_DECL is encountered, it will be
3165     automatically replaced by the VAR_DECL.  */
3166  insert_decl_map (id, p, var);
3167
3168  /* Even if P was TREE_READONLY, the new VAR should not be.
3169     In the original code, we would have constructed a
3170     temporary, and then the function body would have never
3171     changed the value of P.  However, now, we will be
3172     constructing VAR directly.  The constructor body may
3173     change its value multiple times as it is being
3174     constructed.  Therefore, it must not be TREE_READONLY;
3175     the back-end assumes that TREE_READONLY variable is
3176     assigned to only once.  */
3177  if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3178    TREE_READONLY (var) = 0;
3179
3180  /* If there is no setup required and we are in SSA, take the easy route
3181     replacing all SSA names representing the function parameter by the
3182     SSA name passed to function.
3183
3184     We need to construct map for the variable anyway as it might be used
3185     in different SSA names when parameter is set in function.
3186
3187     Do replacement at -O0 for const arguments replaced by constant.
3188     This is important for builtin_constant_p and other construct requiring
3189     constant argument to be visible in inlined function body.  */
3190  if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3191      && (optimize
3192          || (TREE_READONLY (p)
3193	      && is_gimple_min_invariant (rhs)))
3194      && (TREE_CODE (rhs) == SSA_NAME
3195	  || is_gimple_min_invariant (rhs))
3196      && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3197    {
3198      insert_decl_map (id, def, rhs);
3199      return insert_init_debug_bind (id, bb, var, rhs, NULL);
3200    }
3201
3202  /* If the value of argument is never used, don't care about initializing
3203     it.  */
3204  if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3205    {
3206      gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3207      return insert_init_debug_bind (id, bb, var, rhs, NULL);
3208    }
3209
3210  /* Initialize this VAR_DECL from the equivalent argument.  Convert
3211     the argument to the proper type in case it was promoted.  */
3212  if (value)
3213    {
3214      if (rhs == error_mark_node)
3215	{
3216	  insert_decl_map (id, p, var);
3217	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3218	}
3219
3220      STRIP_USELESS_TYPE_CONVERSION (rhs);
3221
3222      /* If we are in SSA form properly remap the default definition
3223         or assign to a dummy SSA name if the parameter is unused and
3224	 we are not optimizing.  */
3225      if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3226	{
3227	  if (def)
3228	    {
3229	      def = remap_ssa_name (def, id);
3230	      init_stmt = gimple_build_assign (def, rhs);
3231	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3232	      set_ssa_default_def (cfun, var, NULL);
3233	    }
3234	  else if (!optimize)
3235	    {
3236	      def = make_ssa_name (var);
3237	      init_stmt = gimple_build_assign (def, rhs);
3238	    }
3239	}
3240      else
3241        init_stmt = gimple_build_assign (var, rhs);
3242
3243      if (bb && init_stmt)
3244        insert_init_stmt (id, bb, init_stmt);
3245    }
3246  return init_stmt;
3247}
3248
3249/* Generate code to initialize the parameters of the function at the
3250   top of the stack in ID from the GIMPLE_CALL STMT.  */
3251
3252static void
3253initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3254			       tree fn, basic_block bb)
3255{
3256  tree parms;
3257  size_t i;
3258  tree p;
3259  tree vars = NULL_TREE;
3260  tree static_chain = gimple_call_chain (stmt);
3261
3262  /* Figure out what the parameters are.  */
3263  parms = DECL_ARGUMENTS (fn);
3264
3265  /* Loop through the parameter declarations, replacing each with an
3266     equivalent VAR_DECL, appropriately initialized.  */
3267  for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3268    {
3269      tree val;
3270      val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3271      setup_one_parameter (id, p, val, fn, bb, &vars);
3272    }
3273  /* After remapping parameters remap their types.  This has to be done
3274     in a second loop over all parameters to appropriately remap
3275     variable sized arrays when the size is specified in a
3276     parameter following the array.  */
3277  for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3278    {
3279      tree *varp = id->decl_map->get (p);
3280      if (varp
3281	  && TREE_CODE (*varp) == VAR_DECL)
3282	{
3283	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3284		      ? ssa_default_def (id->src_cfun, p) : NULL);
3285	  tree var = *varp;
3286	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3287	  /* Also remap the default definition if it was remapped
3288	     to the default definition of the parameter replacement
3289	     by the parameter setup.  */
3290	  if (def)
3291	    {
3292	      tree *defp = id->decl_map->get (def);
3293	      if (defp
3294		  && TREE_CODE (*defp) == SSA_NAME
3295		  && SSA_NAME_VAR (*defp) == var)
3296		TREE_TYPE (*defp) = TREE_TYPE (var);
3297	    }
3298	}
3299    }
3300
3301  /* Initialize the static chain.  */
3302  p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3303  gcc_assert (fn != current_function_decl);
3304  if (p)
3305    {
3306      /* No static chain?  Seems like a bug in tree-nested.c.  */
3307      gcc_assert (static_chain);
3308
3309      setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3310    }
3311
3312  declare_inline_vars (id->block, vars);
3313}
3314
3315
3316/* Declare a return variable to replace the RESULT_DECL for the
3317   function we are calling.  An appropriate DECL_STMT is returned.
3318   The USE_STMT is filled to contain a use of the declaration to
3319   indicate the return value of the function.
3320
3321   RETURN_SLOT, if non-null is place where to store the result.  It
3322   is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3323   was the LHS of the MODIFY_EXPR to which this call is the RHS.
3324
3325   RETURN_BOUNDS holds a destination for returned bounds.
3326
3327   The return value is a (possibly null) value that holds the result
3328   as seen by the caller.  */
3329
3330static tree
3331declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3332			 tree return_bounds, basic_block entry_bb)
3333{
3334  tree callee = id->src_fn;
3335  tree result = DECL_RESULT (callee);
3336  tree callee_type = TREE_TYPE (result);
3337  tree caller_type;
3338  tree var, use;
3339
3340  /* Handle type-mismatches in the function declaration return type
3341     vs. the call expression.  */
3342  if (modify_dest)
3343    caller_type = TREE_TYPE (modify_dest);
3344  else
3345    caller_type = TREE_TYPE (TREE_TYPE (callee));
3346
3347  /* We don't need to do anything for functions that don't return anything.  */
3348  if (VOID_TYPE_P (callee_type))
3349    return NULL_TREE;
3350
3351  /* If there was a return slot, then the return value is the
3352     dereferenced address of that object.  */
3353  if (return_slot)
3354    {
3355      /* The front end shouldn't have used both return_slot and
3356	 a modify expression.  */
3357      gcc_assert (!modify_dest);
3358      if (DECL_BY_REFERENCE (result))
3359	{
3360	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3361	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3362
3363	  /* We are going to construct *&return_slot and we can't do that
3364	     for variables believed to be not addressable.
3365
3366	     FIXME: This check possibly can match, because values returned
3367	     via return slot optimization are not believed to have address
3368	     taken by alias analysis.  */
3369	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3370	  var = return_slot_addr;
3371	}
3372      else
3373	{
3374	  var = return_slot;
3375	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3376	  if (TREE_ADDRESSABLE (result))
3377	    mark_addressable (var);
3378	}
3379      if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3380           || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3381	  && !DECL_GIMPLE_REG_P (result)
3382	  && DECL_P (var))
3383	DECL_GIMPLE_REG_P (var) = 0;
3384      use = NULL;
3385      goto done;
3386    }
3387
3388  /* All types requiring non-trivial constructors should have been handled.  */
3389  gcc_assert (!TREE_ADDRESSABLE (callee_type));
3390
3391  /* Attempt to avoid creating a new temporary variable.  */
3392  if (modify_dest
3393      && TREE_CODE (modify_dest) != SSA_NAME)
3394    {
3395      bool use_it = false;
3396
3397      /* We can't use MODIFY_DEST if there's type promotion involved.  */
3398      if (!useless_type_conversion_p (callee_type, caller_type))
3399	use_it = false;
3400
3401      /* ??? If we're assigning to a variable sized type, then we must
3402	 reuse the destination variable, because we've no good way to
3403	 create variable sized temporaries at this point.  */
3404      else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3405	use_it = true;
3406
3407      /* If the callee cannot possibly modify MODIFY_DEST, then we can
3408	 reuse it as the result of the call directly.  Don't do this if
3409	 it would promote MODIFY_DEST to addressable.  */
3410      else if (TREE_ADDRESSABLE (result))
3411	use_it = false;
3412      else
3413	{
3414	  tree base_m = get_base_address (modify_dest);
3415
3416	  /* If the base isn't a decl, then it's a pointer, and we don't
3417	     know where that's going to go.  */
3418	  if (!DECL_P (base_m))
3419	    use_it = false;
3420	  else if (is_global_var (base_m))
3421	    use_it = false;
3422	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3423		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3424		   && !DECL_GIMPLE_REG_P (result)
3425		   && DECL_GIMPLE_REG_P (base_m))
3426	    use_it = false;
3427	  else if (!TREE_ADDRESSABLE (base_m))
3428	    use_it = true;
3429	}
3430
3431      if (use_it)
3432	{
3433	  var = modify_dest;
3434	  use = NULL;
3435	  goto done;
3436	}
3437    }
3438
3439  gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3440
3441  var = copy_result_decl_to_var (result, id);
3442  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3443
3444  /* Do not have the rest of GCC warn about this variable as it should
3445     not be visible to the user.  */
3446  TREE_NO_WARNING (var) = 1;
3447
3448  declare_inline_vars (id->block, var);
3449
3450  /* Build the use expr.  If the return type of the function was
3451     promoted, convert it back to the expected type.  */
3452  use = var;
3453  if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3454    {
3455      /* If we can match up types by promotion/demotion do so.  */
3456      if (fold_convertible_p (caller_type, var))
3457	use = fold_convert (caller_type, var);
3458      else
3459	{
3460	  /* ???  For valid programs we should not end up here.
3461	     Still if we end up with truly mismatched types here, fall back
3462	     to using a MEM_REF to not leak invalid GIMPLE to the following
3463	     passes.  */
3464	  /* Prevent var from being written into SSA form.  */
3465	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3466	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3467	    DECL_GIMPLE_REG_P (var) = false;
3468	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3469	    TREE_ADDRESSABLE (var) = true;
3470	  use = fold_build2 (MEM_REF, caller_type,
3471			     build_fold_addr_expr (var),
3472			     build_int_cst (ptr_type_node, 0));
3473	}
3474    }
3475
3476  STRIP_USELESS_TYPE_CONVERSION (use);
3477
3478  if (DECL_BY_REFERENCE (result))
3479    {
3480      TREE_ADDRESSABLE (var) = 1;
3481      var = build_fold_addr_expr (var);
3482    }
3483
3484 done:
3485  /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3486     way, when the RESULT_DECL is encountered, it will be
3487     automatically replaced by the VAR_DECL.
3488
3489     When returning by reference, ensure that RESULT_DECL remaps to
3490     gimple_val.  */
3491  if (DECL_BY_REFERENCE (result)
3492      && !is_gimple_val (var))
3493    {
3494      tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3495      insert_decl_map (id, result, temp);
3496      /* When RESULT_DECL is in SSA form, we need to remap and initialize
3497	 it's default_def SSA_NAME.  */
3498      if (gimple_in_ssa_p (id->src_cfun)
3499	  && is_gimple_reg (result))
3500	{
3501	  temp = make_ssa_name (temp);
3502	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3503	}
3504      insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3505    }
3506  else
3507    insert_decl_map (id, result, var);
3508
3509  /* Remember this so we can ignore it in remap_decls.  */
3510  id->retvar = var;
3511
3512  /* If returned bounds are used, then make var for them.  */
3513  if (return_bounds)
3514  {
3515    tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3516    DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3517    TREE_NO_WARNING (bndtemp) = 1;
3518    declare_inline_vars (id->block, bndtemp);
3519
3520    id->retbnd = bndtemp;
3521    insert_init_stmt (id, entry_bb,
3522		      gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3523  }
3524
3525  return use;
3526}
3527
3528/* Callback through walk_tree.  Determine if a DECL_INITIAL makes reference
3529   to a local label.  */
3530
3531static tree
3532has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3533{
3534  tree node = *nodep;
3535  tree fn = (tree) fnp;
3536
3537  if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3538    return node;
3539
3540  if (TYPE_P (node))
3541    *walk_subtrees = 0;
3542
3543  return NULL_TREE;
3544}
3545
3546/* Determine if the function can be copied.  If so return NULL.  If
3547   not return a string describng the reason for failure.  */
3548
3549const char *
3550copy_forbidden (struct function *fun, tree fndecl)
3551{
3552  const char *reason = fun->cannot_be_copied_reason;
3553  tree decl;
3554  unsigned ix;
3555
3556  /* Only examine the function once.  */
3557  if (fun->cannot_be_copied_set)
3558    return reason;
3559
3560  /* We cannot copy a function that receives a non-local goto
3561     because we cannot remap the destination label used in the
3562     function that is performing the non-local goto.  */
3563  /* ??? Actually, this should be possible, if we work at it.
3564     No doubt there's just a handful of places that simply
3565     assume it doesn't happen and don't substitute properly.  */
3566  if (fun->has_nonlocal_label)
3567    {
3568      reason = G_("function %q+F can never be copied "
3569		  "because it receives a non-local goto");
3570      goto fail;
3571    }
3572
3573  FOR_EACH_LOCAL_DECL (fun, ix, decl)
3574    if (TREE_CODE (decl) == VAR_DECL
3575	&& TREE_STATIC (decl)
3576	&& !DECL_EXTERNAL (decl)
3577	&& DECL_INITIAL (decl)
3578	&& walk_tree_without_duplicates (&DECL_INITIAL (decl),
3579					 has_label_address_in_static_1,
3580					 fndecl))
3581      {
3582	reason = G_("function %q+F can never be copied because it saves "
3583		    "address of local label in a static variable");
3584	goto fail;
3585      }
3586
3587 fail:
3588  fun->cannot_be_copied_reason = reason;
3589  fun->cannot_be_copied_set = true;
3590  return reason;
3591}
3592
3593
3594static const char *inline_forbidden_reason;
3595
3596/* A callback for walk_gimple_seq to handle statements.  Returns non-null
3597   iff a function can not be inlined.  Also sets the reason why. */
3598
3599static tree
3600inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3601			 struct walk_stmt_info *wip)
3602{
3603  tree fn = (tree) wip->info;
3604  tree t;
3605  gimple stmt = gsi_stmt (*gsi);
3606
3607  switch (gimple_code (stmt))
3608    {
3609    case GIMPLE_CALL:
3610      /* Refuse to inline alloca call unless user explicitly forced so as
3611	 this may change program's memory overhead drastically when the
3612	 function using alloca is called in loop.  In GCC present in
3613	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3614	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3615	 VLA objects as those can't cause unbounded growth (they're always
3616	 wrapped inside stack_save/stack_restore regions.  */
3617      if (gimple_alloca_call_p (stmt)
3618	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3619	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3620	{
3621	  inline_forbidden_reason
3622	    = G_("function %q+F can never be inlined because it uses "
3623		 "alloca (override using the always_inline attribute)");
3624	  *handled_ops_p = true;
3625	  return fn;
3626	}
3627
3628      t = gimple_call_fndecl (stmt);
3629      if (t == NULL_TREE)
3630	break;
3631
3632      /* We cannot inline functions that call setjmp.  */
3633      if (setjmp_call_p (t))
3634	{
3635	  inline_forbidden_reason
3636	    = G_("function %q+F can never be inlined because it uses setjmp");
3637	  *handled_ops_p = true;
3638	  return t;
3639	}
3640
3641      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3642	switch (DECL_FUNCTION_CODE (t))
3643	  {
3644	    /* We cannot inline functions that take a variable number of
3645	       arguments.  */
3646	  case BUILT_IN_VA_START:
3647	  case BUILT_IN_NEXT_ARG:
3648	  case BUILT_IN_VA_END:
3649	    inline_forbidden_reason
3650	      = G_("function %q+F can never be inlined because it "
3651		   "uses variable argument lists");
3652	    *handled_ops_p = true;
3653	    return t;
3654
3655	  case BUILT_IN_LONGJMP:
3656	    /* We can't inline functions that call __builtin_longjmp at
3657	       all.  The non-local goto machinery really requires the
3658	       destination be in a different function.  If we allow the
3659	       function calling __builtin_longjmp to be inlined into the
3660	       function calling __builtin_setjmp, Things will Go Awry.  */
3661	    inline_forbidden_reason
3662	      = G_("function %q+F can never be inlined because "
3663		   "it uses setjmp-longjmp exception handling");
3664	    *handled_ops_p = true;
3665	    return t;
3666
3667	  case BUILT_IN_NONLOCAL_GOTO:
3668	    /* Similarly.  */
3669	    inline_forbidden_reason
3670	      = G_("function %q+F can never be inlined because "
3671		   "it uses non-local goto");
3672	    *handled_ops_p = true;
3673	    return t;
3674
3675	  case BUILT_IN_RETURN:
3676	  case BUILT_IN_APPLY_ARGS:
3677	    /* If a __builtin_apply_args caller would be inlined,
3678	       it would be saving arguments of the function it has
3679	       been inlined into.  Similarly __builtin_return would
3680	       return from the function the inline has been inlined into.  */
3681	    inline_forbidden_reason
3682	      = G_("function %q+F can never be inlined because "
3683		   "it uses __builtin_return or __builtin_apply_args");
3684	    *handled_ops_p = true;
3685	    return t;
3686
3687	  default:
3688	    break;
3689	  }
3690      break;
3691
3692    case GIMPLE_GOTO:
3693      t = gimple_goto_dest (stmt);
3694
3695      /* We will not inline a function which uses computed goto.  The
3696	 addresses of its local labels, which may be tucked into
3697	 global storage, are of course not constant across
3698	 instantiations, which causes unexpected behavior.  */
3699      if (TREE_CODE (t) != LABEL_DECL)
3700	{
3701	  inline_forbidden_reason
3702	    = G_("function %q+F can never be inlined "
3703		 "because it contains a computed goto");
3704	  *handled_ops_p = true;
3705	  return t;
3706	}
3707      break;
3708
3709    default:
3710      break;
3711    }
3712
3713  *handled_ops_p = false;
3714  return NULL_TREE;
3715}
3716
3717/* Return true if FNDECL is a function that cannot be inlined into
3718   another one.  */
3719
3720static bool
3721inline_forbidden_p (tree fndecl)
3722{
3723  struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3724  struct walk_stmt_info wi;
3725  basic_block bb;
3726  bool forbidden_p = false;
3727
3728  /* First check for shared reasons not to copy the code.  */
3729  inline_forbidden_reason = copy_forbidden (fun, fndecl);
3730  if (inline_forbidden_reason != NULL)
3731    return true;
3732
3733  /* Next, walk the statements of the function looking for
3734     constraucts we can't handle, or are non-optimal for inlining.  */
3735  hash_set<tree> visited_nodes;
3736  memset (&wi, 0, sizeof (wi));
3737  wi.info = (void *) fndecl;
3738  wi.pset = &visited_nodes;
3739
3740  FOR_EACH_BB_FN (bb, fun)
3741    {
3742      gimple ret;
3743      gimple_seq seq = bb_seq (bb);
3744      ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3745      forbidden_p = (ret != NULL);
3746      if (forbidden_p)
3747	break;
3748    }
3749
3750  return forbidden_p;
3751}
3752
3753/* Return false if the function FNDECL cannot be inlined on account of its
3754   attributes, true otherwise.  */
3755static bool
3756function_attribute_inlinable_p (const_tree fndecl)
3757{
3758  if (targetm.attribute_table)
3759    {
3760      const_tree a;
3761
3762      for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3763	{
3764	  const_tree name = TREE_PURPOSE (a);
3765	  int i;
3766
3767	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3768	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3769	      return targetm.function_attribute_inlinable_p (fndecl);
3770	}
3771    }
3772
3773  return true;
3774}
3775
3776/* Returns nonzero if FN is a function that does not have any
3777   fundamental inline blocking properties.  */
3778
3779bool
3780tree_inlinable_function_p (tree fn)
3781{
3782  bool inlinable = true;
3783  bool do_warning;
3784  tree always_inline;
3785
3786  /* If we've already decided this function shouldn't be inlined,
3787     there's no need to check again.  */
3788  if (DECL_UNINLINABLE (fn))
3789    return false;
3790
3791  /* We only warn for functions declared `inline' by the user.  */
3792  do_warning = (warn_inline
3793		&& DECL_DECLARED_INLINE_P (fn)
3794		&& !DECL_NO_INLINE_WARNING_P (fn)
3795		&& !DECL_IN_SYSTEM_HEADER (fn));
3796
3797  always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3798
3799  if (flag_no_inline
3800      && always_inline == NULL)
3801    {
3802      if (do_warning)
3803        warning (OPT_Winline, "function %q+F can never be inlined because it "
3804                 "is suppressed using -fno-inline", fn);
3805      inlinable = false;
3806    }
3807
3808  else if (!function_attribute_inlinable_p (fn))
3809    {
3810      if (do_warning)
3811        warning (OPT_Winline, "function %q+F can never be inlined because it "
3812                 "uses attributes conflicting with inlining", fn);
3813      inlinable = false;
3814    }
3815
3816  else if (inline_forbidden_p (fn))
3817    {
3818      /* See if we should warn about uninlinable functions.  Previously,
3819	 some of these warnings would be issued while trying to expand
3820	 the function inline, but that would cause multiple warnings
3821	 about functions that would for example call alloca.  But since
3822	 this a property of the function, just one warning is enough.
3823	 As a bonus we can now give more details about the reason why a
3824	 function is not inlinable.  */
3825      if (always_inline)
3826	error (inline_forbidden_reason, fn);
3827      else if (do_warning)
3828	warning (OPT_Winline, inline_forbidden_reason, fn);
3829
3830      inlinable = false;
3831    }
3832
3833  /* Squirrel away the result so that we don't have to check again.  */
3834  DECL_UNINLINABLE (fn) = !inlinable;
3835
3836  return inlinable;
3837}
3838
3839/* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3840   word size and take possible memcpy call into account and return
3841   cost based on whether optimizing for size or speed according to SPEED_P.  */
3842
3843int
3844estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3845{
3846  HOST_WIDE_INT size;
3847
3848  gcc_assert (!VOID_TYPE_P (type));
3849
3850  if (TREE_CODE (type) == VECTOR_TYPE)
3851    {
3852      machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3853      machine_mode simd
3854	= targetm.vectorize.preferred_simd_mode (inner);
3855      int simd_mode_size = GET_MODE_SIZE (simd);
3856      return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3857	      / simd_mode_size);
3858    }
3859
3860  size = int_size_in_bytes (type);
3861
3862  if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3863    /* Cost of a memcpy call, 3 arguments and the call.  */
3864    return 4;
3865  else
3866    return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3867}
3868
3869/* Returns cost of operation CODE, according to WEIGHTS  */
3870
3871static int
3872estimate_operator_cost (enum tree_code code, eni_weights *weights,
3873			tree op1 ATTRIBUTE_UNUSED, tree op2)
3874{
3875  switch (code)
3876    {
3877    /* These are "free" conversions, or their presumed cost
3878       is folded into other operations.  */
3879    case RANGE_EXPR:
3880    CASE_CONVERT:
3881    case COMPLEX_EXPR:
3882    case PAREN_EXPR:
3883    case VIEW_CONVERT_EXPR:
3884      return 0;
3885
3886    /* Assign cost of 1 to usual operations.
3887       ??? We may consider mapping RTL costs to this.  */
3888    case COND_EXPR:
3889    case VEC_COND_EXPR:
3890    case VEC_PERM_EXPR:
3891
3892    case PLUS_EXPR:
3893    case POINTER_PLUS_EXPR:
3894    case MINUS_EXPR:
3895    case MULT_EXPR:
3896    case MULT_HIGHPART_EXPR:
3897    case FMA_EXPR:
3898
3899    case ADDR_SPACE_CONVERT_EXPR:
3900    case FIXED_CONVERT_EXPR:
3901    case FIX_TRUNC_EXPR:
3902
3903    case NEGATE_EXPR:
3904    case FLOAT_EXPR:
3905    case MIN_EXPR:
3906    case MAX_EXPR:
3907    case ABS_EXPR:
3908
3909    case LSHIFT_EXPR:
3910    case RSHIFT_EXPR:
3911    case LROTATE_EXPR:
3912    case RROTATE_EXPR:
3913
3914    case BIT_IOR_EXPR:
3915    case BIT_XOR_EXPR:
3916    case BIT_AND_EXPR:
3917    case BIT_NOT_EXPR:
3918
3919    case TRUTH_ANDIF_EXPR:
3920    case TRUTH_ORIF_EXPR:
3921    case TRUTH_AND_EXPR:
3922    case TRUTH_OR_EXPR:
3923    case TRUTH_XOR_EXPR:
3924    case TRUTH_NOT_EXPR:
3925
3926    case LT_EXPR:
3927    case LE_EXPR:
3928    case GT_EXPR:
3929    case GE_EXPR:
3930    case EQ_EXPR:
3931    case NE_EXPR:
3932    case ORDERED_EXPR:
3933    case UNORDERED_EXPR:
3934
3935    case UNLT_EXPR:
3936    case UNLE_EXPR:
3937    case UNGT_EXPR:
3938    case UNGE_EXPR:
3939    case UNEQ_EXPR:
3940    case LTGT_EXPR:
3941
3942    case CONJ_EXPR:
3943
3944    case PREDECREMENT_EXPR:
3945    case PREINCREMENT_EXPR:
3946    case POSTDECREMENT_EXPR:
3947    case POSTINCREMENT_EXPR:
3948
3949    case REALIGN_LOAD_EXPR:
3950
3951    case REDUC_MAX_EXPR:
3952    case REDUC_MIN_EXPR:
3953    case REDUC_PLUS_EXPR:
3954    case WIDEN_SUM_EXPR:
3955    case WIDEN_MULT_EXPR:
3956    case DOT_PROD_EXPR:
3957    case SAD_EXPR:
3958    case WIDEN_MULT_PLUS_EXPR:
3959    case WIDEN_MULT_MINUS_EXPR:
3960    case WIDEN_LSHIFT_EXPR:
3961
3962    case VEC_WIDEN_MULT_HI_EXPR:
3963    case VEC_WIDEN_MULT_LO_EXPR:
3964    case VEC_WIDEN_MULT_EVEN_EXPR:
3965    case VEC_WIDEN_MULT_ODD_EXPR:
3966    case VEC_UNPACK_HI_EXPR:
3967    case VEC_UNPACK_LO_EXPR:
3968    case VEC_UNPACK_FLOAT_HI_EXPR:
3969    case VEC_UNPACK_FLOAT_LO_EXPR:
3970    case VEC_PACK_TRUNC_EXPR:
3971    case VEC_PACK_SAT_EXPR:
3972    case VEC_PACK_FIX_TRUNC_EXPR:
3973    case VEC_WIDEN_LSHIFT_HI_EXPR:
3974    case VEC_WIDEN_LSHIFT_LO_EXPR:
3975
3976      return 1;
3977
3978    /* Few special cases of expensive operations.  This is useful
3979       to avoid inlining on functions having too many of these.  */
3980    case TRUNC_DIV_EXPR:
3981    case CEIL_DIV_EXPR:
3982    case FLOOR_DIV_EXPR:
3983    case ROUND_DIV_EXPR:
3984    case EXACT_DIV_EXPR:
3985    case TRUNC_MOD_EXPR:
3986    case CEIL_MOD_EXPR:
3987    case FLOOR_MOD_EXPR:
3988    case ROUND_MOD_EXPR:
3989    case RDIV_EXPR:
3990      if (TREE_CODE (op2) != INTEGER_CST)
3991        return weights->div_mod_cost;
3992      return 1;
3993
3994    default:
3995      /* We expect a copy assignment with no operator.  */
3996      gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3997      return 0;
3998    }
3999}
4000
4001
4002/* Estimate number of instructions that will be created by expanding
4003   the statements in the statement sequence STMTS.
4004   WEIGHTS contains weights attributed to various constructs.  */
4005
4006static
4007int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4008{
4009  int cost;
4010  gimple_stmt_iterator gsi;
4011
4012  cost = 0;
4013  for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4014    cost += estimate_num_insns (gsi_stmt (gsi), weights);
4015
4016  return cost;
4017}
4018
4019
4020/* Estimate number of instructions that will be created by expanding STMT.
4021   WEIGHTS contains weights attributed to various constructs.  */
4022
4023int
4024estimate_num_insns (gimple stmt, eni_weights *weights)
4025{
4026  unsigned cost, i;
4027  enum gimple_code code = gimple_code (stmt);
4028  tree lhs;
4029  tree rhs;
4030
4031  switch (code)
4032    {
4033    case GIMPLE_ASSIGN:
4034      /* Try to estimate the cost of assignments.  We have three cases to
4035	 deal with:
4036	 1) Simple assignments to registers;
4037	 2) Stores to things that must live in memory.  This includes
4038	    "normal" stores to scalars, but also assignments of large
4039	    structures, or constructors of big arrays;
4040
4041	 Let us look at the first two cases, assuming we have "a = b + C":
4042	 <GIMPLE_ASSIGN <var_decl "a">
4043	        <plus_expr <var_decl "b"> <constant C>>
4044	 If "a" is a GIMPLE register, the assignment to it is free on almost
4045	 any target, because "a" usually ends up in a real register.  Hence
4046	 the only cost of this expression comes from the PLUS_EXPR, and we
4047	 can ignore the GIMPLE_ASSIGN.
4048	 If "a" is not a GIMPLE register, the assignment to "a" will most
4049	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4050	 of moving something into "a", which we compute using the function
4051	 estimate_move_cost.  */
4052      if (gimple_clobber_p (stmt))
4053	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4054
4055      lhs = gimple_assign_lhs (stmt);
4056      rhs = gimple_assign_rhs1 (stmt);
4057
4058      cost = 0;
4059
4060      /* Account for the cost of moving to / from memory.  */
4061      if (gimple_store_p (stmt))
4062	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4063      if (gimple_assign_load_p (stmt))
4064	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4065
4066      cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4067      				      gimple_assign_rhs1 (stmt),
4068				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4069				      == GIMPLE_BINARY_RHS
4070				      ? gimple_assign_rhs2 (stmt) : NULL);
4071      break;
4072
4073    case GIMPLE_COND:
4074      cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4075      				         gimple_op (stmt, 0),
4076				         gimple_op (stmt, 1));
4077      break;
4078
4079    case GIMPLE_SWITCH:
4080      {
4081	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4082	/* Take into account cost of the switch + guess 2 conditional jumps for
4083	   each case label.
4084
4085	   TODO: once the switch expansion logic is sufficiently separated, we can
4086	   do better job on estimating cost of the switch.  */
4087	if (weights->time_based)
4088	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4089	else
4090	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4091      }
4092      break;
4093
4094    case GIMPLE_CALL:
4095      {
4096	tree decl;
4097
4098	if (gimple_call_internal_p (stmt))
4099	  return 0;
4100	else if ((decl = gimple_call_fndecl (stmt))
4101		 && DECL_BUILT_IN (decl))
4102	  {
4103	    /* Do not special case builtins where we see the body.
4104	       This just confuse inliner.  */
4105	    struct cgraph_node *node;
4106	    if (!(node = cgraph_node::get (decl))
4107		|| node->definition)
4108	      ;
4109	    /* For buitins that are likely expanded to nothing or
4110	       inlined do not account operand costs.  */
4111	    else if (is_simple_builtin (decl))
4112	      return 0;
4113	    else if (is_inexpensive_builtin (decl))
4114	      return weights->target_builtin_call_cost;
4115	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4116	      {
4117		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4118		   specialize the cheap expansion we do here.
4119		   ???  This asks for a more general solution.  */
4120		switch (DECL_FUNCTION_CODE (decl))
4121		  {
4122		    case BUILT_IN_POW:
4123		    case BUILT_IN_POWF:
4124		    case BUILT_IN_POWL:
4125		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4126			  && REAL_VALUES_EQUAL
4127			  (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4128			return estimate_operator_cost
4129			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4130			     gimple_call_arg (stmt, 0));
4131		      break;
4132
4133		    default:
4134		      break;
4135		  }
4136	      }
4137	  }
4138
4139	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4140	if (gimple_call_lhs (stmt))
4141	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4142				      weights->time_based);
4143	for (i = 0; i < gimple_call_num_args (stmt); i++)
4144	  {
4145	    tree arg = gimple_call_arg (stmt, i);
4146	    cost += estimate_move_cost (TREE_TYPE (arg),
4147					weights->time_based);
4148	  }
4149	break;
4150      }
4151
4152    case GIMPLE_RETURN:
4153      return weights->return_cost;
4154
4155    case GIMPLE_GOTO:
4156    case GIMPLE_LABEL:
4157    case GIMPLE_NOP:
4158    case GIMPLE_PHI:
4159    case GIMPLE_PREDICT:
4160    case GIMPLE_DEBUG:
4161      return 0;
4162
4163    case GIMPLE_ASM:
4164      {
4165	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4166	/* 1000 means infinity. This avoids overflows later
4167	   with very long asm statements.  */
4168	if (count > 1000)
4169	  count = 1000;
4170	return count;
4171      }
4172
4173    case GIMPLE_RESX:
4174      /* This is either going to be an external function call with one
4175	 argument, or two register copy statements plus a goto.  */
4176      return 2;
4177
4178    case GIMPLE_EH_DISPATCH:
4179      /* ??? This is going to turn into a switch statement.  Ideally
4180	 we'd have a look at the eh region and estimate the number of
4181	 edges involved.  */
4182      return 10;
4183
4184    case GIMPLE_BIND:
4185      return estimate_num_insns_seq (
4186	       gimple_bind_body (as_a <gbind *> (stmt)),
4187	       weights);
4188
4189    case GIMPLE_EH_FILTER:
4190      return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4191
4192    case GIMPLE_CATCH:
4193      return estimate_num_insns_seq (gimple_catch_handler (
4194				       as_a <gcatch *> (stmt)),
4195				     weights);
4196
4197    case GIMPLE_TRY:
4198      return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4199              + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4200
4201    /* OMP directives are generally very expensive.  */
4202
4203    case GIMPLE_OMP_RETURN:
4204    case GIMPLE_OMP_SECTIONS_SWITCH:
4205    case GIMPLE_OMP_ATOMIC_STORE:
4206    case GIMPLE_OMP_CONTINUE:
4207      /* ...except these, which are cheap.  */
4208      return 0;
4209
4210    case GIMPLE_OMP_ATOMIC_LOAD:
4211      return weights->omp_cost;
4212
4213    case GIMPLE_OMP_FOR:
4214      return (weights->omp_cost
4215              + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4216              + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4217
4218    case GIMPLE_OMP_PARALLEL:
4219    case GIMPLE_OMP_TASK:
4220    case GIMPLE_OMP_CRITICAL:
4221    case GIMPLE_OMP_MASTER:
4222    case GIMPLE_OMP_TASKGROUP:
4223    case GIMPLE_OMP_ORDERED:
4224    case GIMPLE_OMP_SECTION:
4225    case GIMPLE_OMP_SECTIONS:
4226    case GIMPLE_OMP_SINGLE:
4227    case GIMPLE_OMP_TARGET:
4228    case GIMPLE_OMP_TEAMS:
4229      return (weights->omp_cost
4230              + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4231
4232    case GIMPLE_TRANSACTION:
4233      return (weights->tm_cost
4234	      + estimate_num_insns_seq (gimple_transaction_body (
4235					  as_a <gtransaction *> (stmt)),
4236					weights));
4237
4238    default:
4239      gcc_unreachable ();
4240    }
4241
4242  return cost;
4243}
4244
4245/* Estimate number of instructions that will be created by expanding
4246   function FNDECL.  WEIGHTS contains weights attributed to various
4247   constructs.  */
4248
4249int
4250estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4251{
4252  struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4253  gimple_stmt_iterator bsi;
4254  basic_block bb;
4255  int n = 0;
4256
4257  gcc_assert (my_function && my_function->cfg);
4258  FOR_EACH_BB_FN (bb, my_function)
4259    {
4260      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4261	n += estimate_num_insns (gsi_stmt (bsi), weights);
4262    }
4263
4264  return n;
4265}
4266
4267
4268/* Initializes weights used by estimate_num_insns.  */
4269
4270void
4271init_inline_once (void)
4272{
4273  eni_size_weights.call_cost = 1;
4274  eni_size_weights.indirect_call_cost = 3;
4275  eni_size_weights.target_builtin_call_cost = 1;
4276  eni_size_weights.div_mod_cost = 1;
4277  eni_size_weights.omp_cost = 40;
4278  eni_size_weights.tm_cost = 10;
4279  eni_size_weights.time_based = false;
4280  eni_size_weights.return_cost = 1;
4281
4282  /* Estimating time for call is difficult, since we have no idea what the
4283     called function does.  In the current uses of eni_time_weights,
4284     underestimating the cost does less harm than overestimating it, so
4285     we choose a rather small value here.  */
4286  eni_time_weights.call_cost = 10;
4287  eni_time_weights.indirect_call_cost = 15;
4288  eni_time_weights.target_builtin_call_cost = 1;
4289  eni_time_weights.div_mod_cost = 10;
4290  eni_time_weights.omp_cost = 40;
4291  eni_time_weights.tm_cost = 40;
4292  eni_time_weights.time_based = true;
4293  eni_time_weights.return_cost = 2;
4294}
4295
4296/* Estimate the number of instructions in a gimple_seq. */
4297
4298int
4299count_insns_seq (gimple_seq seq, eni_weights *weights)
4300{
4301  gimple_stmt_iterator gsi;
4302  int n = 0;
4303  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4304    n += estimate_num_insns (gsi_stmt (gsi), weights);
4305
4306  return n;
4307}
4308
4309
4310/* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4311
4312static void
4313prepend_lexical_block (tree current_block, tree new_block)
4314{
4315  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4316  BLOCK_SUBBLOCKS (current_block) = new_block;
4317  BLOCK_SUPERCONTEXT (new_block) = current_block;
4318}
4319
4320/* Add local variables from CALLEE to CALLER.  */
4321
4322static inline void
4323add_local_variables (struct function *callee, struct function *caller,
4324		     copy_body_data *id)
4325{
4326  tree var;
4327  unsigned ix;
4328
4329  FOR_EACH_LOCAL_DECL (callee, ix, var)
4330    if (!can_be_nonlocal (var, id))
4331      {
4332        tree new_var = remap_decl (var, id);
4333
4334        /* Remap debug-expressions.  */
4335	if (TREE_CODE (new_var) == VAR_DECL
4336	    && DECL_HAS_DEBUG_EXPR_P (var)
4337	    && new_var != var)
4338	  {
4339	    tree tem = DECL_DEBUG_EXPR (var);
4340	    bool old_regimplify = id->regimplify;
4341	    id->remapping_type_depth++;
4342	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4343	    id->remapping_type_depth--;
4344	    id->regimplify = old_regimplify;
4345	    SET_DECL_DEBUG_EXPR (new_var, tem);
4346	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4347	  }
4348	add_local_decl (caller, new_var);
4349      }
4350}
4351
4352/* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4353   have brought in or introduced any debug stmts for SRCVAR.  */
4354
4355static inline void
4356reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4357{
4358  tree *remappedvarp = id->decl_map->get (srcvar);
4359
4360  if (!remappedvarp)
4361    return;
4362
4363  if (TREE_CODE (*remappedvarp) != VAR_DECL)
4364    return;
4365
4366  if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4367    return;
4368
4369  tree tvar = target_for_debug_bind (*remappedvarp);
4370  if (!tvar)
4371    return;
4372
4373  gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4374					  id->call_stmt);
4375  gimple_seq_add_stmt (bindings, stmt);
4376}
4377
4378/* For each inlined variable for which we may have debug bind stmts,
4379   add before GSI a final debug stmt resetting it, marking the end of
4380   its life, so that var-tracking knows it doesn't have to compute
4381   further locations for it.  */
4382
4383static inline void
4384reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4385{
4386  tree var;
4387  unsigned ix;
4388  gimple_seq bindings = NULL;
4389
4390  if (!gimple_in_ssa_p (id->src_cfun))
4391    return;
4392
4393  if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4394    return;
4395
4396  for (var = DECL_ARGUMENTS (id->src_fn);
4397       var; var = DECL_CHAIN (var))
4398    reset_debug_binding (id, var, &bindings);
4399
4400  FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4401    reset_debug_binding (id, var, &bindings);
4402
4403  gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4404}
4405
4406/* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4407
4408static bool
4409expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4410{
4411  tree use_retvar;
4412  tree fn;
4413  hash_map<tree, tree> *dst;
4414  hash_map<tree, tree> *st = NULL;
4415  tree return_slot;
4416  tree modify_dest;
4417  tree return_bounds = NULL;
4418  location_t saved_location;
4419  struct cgraph_edge *cg_edge;
4420  cgraph_inline_failed_t reason;
4421  basic_block return_block;
4422  edge e;
4423  gimple_stmt_iterator gsi, stmt_gsi;
4424  bool successfully_inlined = FALSE;
4425  bool purge_dead_abnormal_edges;
4426  gcall *call_stmt;
4427  unsigned int i;
4428
4429  /* Set input_location here so we get the right instantiation context
4430     if we call instantiate_decl from inlinable_function_p.  */
4431  /* FIXME: instantiate_decl isn't called by inlinable_function_p.  */
4432  saved_location = input_location;
4433  input_location = gimple_location (stmt);
4434
4435  /* From here on, we're only interested in CALL_EXPRs.  */
4436  call_stmt = dyn_cast <gcall *> (stmt);
4437  if (!call_stmt)
4438    goto egress;
4439
4440  cg_edge = id->dst_node->get_edge (stmt);
4441  gcc_checking_assert (cg_edge);
4442  /* First, see if we can figure out what function is being called.
4443     If we cannot, then there is no hope of inlining the function.  */
4444  if (cg_edge->indirect_unknown_callee)
4445    goto egress;
4446  fn = cg_edge->callee->decl;
4447  gcc_checking_assert (fn);
4448
4449  /* If FN is a declaration of a function in a nested scope that was
4450     globally declared inline, we don't set its DECL_INITIAL.
4451     However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4452     C++ front-end uses it for cdtors to refer to their internal
4453     declarations, that are not real functions.  Fortunately those
4454     don't have trees to be saved, so we can tell by checking their
4455     gimple_body.  */
4456  if (!DECL_INITIAL (fn)
4457      && DECL_ABSTRACT_ORIGIN (fn)
4458      && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4459    fn = DECL_ABSTRACT_ORIGIN (fn);
4460
4461  /* Don't try to inline functions that are not well-suited to inlining.  */
4462  if (cg_edge->inline_failed)
4463    {
4464      reason = cg_edge->inline_failed;
4465      /* If this call was originally indirect, we do not want to emit any
4466	 inlining related warnings or sorry messages because there are no
4467	 guarantees regarding those.  */
4468      if (cg_edge->indirect_inlining_edge)
4469	goto egress;
4470
4471      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4472          /* For extern inline functions that get redefined we always
4473	     silently ignored always_inline flag. Better behaviour would
4474	     be to be able to keep both bodies and use extern inline body
4475	     for inlining, but we can't do that because frontends overwrite
4476	     the body.  */
4477	  && !cg_edge->callee->local.redefined_extern_inline
4478	  /* During early inline pass, report only when optimization is
4479	     not turned on.  */
4480	  && (symtab->global_info_ready
4481	      || !optimize
4482	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4483	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4484	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4485	{
4486	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4487		 cgraph_inline_failed_string (reason));
4488	  error ("called from here");
4489	}
4490      else if (warn_inline
4491	       && DECL_DECLARED_INLINE_P (fn)
4492	       && !DECL_NO_INLINE_WARNING_P (fn)
4493	       && !DECL_IN_SYSTEM_HEADER (fn)
4494	       && reason != CIF_UNSPECIFIED
4495	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4496	       /* Do not warn about not inlined recursive calls.  */
4497	       && !cg_edge->recursive_p ()
4498	       /* Avoid warnings during early inline pass. */
4499	       && symtab->global_info_ready)
4500	{
4501	  warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4502		   fn, _(cgraph_inline_failed_string (reason)));
4503	  warning (OPT_Winline, "called from here");
4504	}
4505      goto egress;
4506    }
4507  fn = cg_edge->callee->decl;
4508  cg_edge->callee->get_untransformed_body ();
4509
4510#ifdef ENABLE_CHECKING
4511  if (cg_edge->callee->decl != id->dst_node->decl)
4512    cg_edge->callee->verify ();
4513#endif
4514
4515  /* We will be inlining this callee.  */
4516  id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4517  id->assign_stmts.create (0);
4518
4519  /* Update the callers EH personality.  */
4520  if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4521    DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4522      = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4523
4524  /* Split the block holding the GIMPLE_CALL.  */
4525  e = split_block (bb, stmt);
4526  bb = e->src;
4527  return_block = e->dest;
4528  remove_edge (e);
4529
4530  /* split_block splits after the statement; work around this by
4531     moving the call into the second block manually.  Not pretty,
4532     but seems easier than doing the CFG manipulation by hand
4533     when the GIMPLE_CALL is in the last statement of BB.  */
4534  stmt_gsi = gsi_last_bb (bb);
4535  gsi_remove (&stmt_gsi, false);
4536
4537  /* If the GIMPLE_CALL was in the last statement of BB, it may have
4538     been the source of abnormal edges.  In this case, schedule
4539     the removal of dead abnormal edges.  */
4540  gsi = gsi_start_bb (return_block);
4541  if (gsi_end_p (gsi))
4542    {
4543      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4544      purge_dead_abnormal_edges = true;
4545    }
4546  else
4547    {
4548      gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4549      purge_dead_abnormal_edges = false;
4550    }
4551
4552  stmt_gsi = gsi_start_bb (return_block);
4553
4554  /* Build a block containing code to initialize the arguments, the
4555     actual inline expansion of the body, and a label for the return
4556     statements within the function to jump to.  The type of the
4557     statement expression is the return type of the function call.
4558     ???  If the call does not have an associated block then we will
4559     remap all callee blocks to NULL, effectively dropping most of
4560     its debug information.  This should only happen for calls to
4561     artificial decls inserted by the compiler itself.  We need to
4562     either link the inlined blocks into the caller block tree or
4563     not refer to them in any way to not break GC for locations.  */
4564  if (gimple_block (stmt))
4565    {
4566      id->block = make_node (BLOCK);
4567      BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4568      BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4569      prepend_lexical_block (gimple_block (stmt), id->block);
4570    }
4571
4572  /* Local declarations will be replaced by their equivalents in this
4573     map.  */
4574  st = id->decl_map;
4575  id->decl_map = new hash_map<tree, tree>;
4576  dst = id->debug_map;
4577  id->debug_map = NULL;
4578
4579  /* Record the function we are about to inline.  */
4580  id->src_fn = fn;
4581  id->src_node = cg_edge->callee;
4582  id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4583  id->call_stmt = stmt;
4584
4585  gcc_assert (!id->src_cfun->after_inlining);
4586
4587  id->entry_bb = bb;
4588  if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4589    {
4590      gimple_stmt_iterator si = gsi_last_bb (bb);
4591      gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4592      						   NOT_TAKEN),
4593			GSI_NEW_STMT);
4594    }
4595  initialize_inlined_parameters (id, stmt, fn, bb);
4596
4597  if (DECL_INITIAL (fn))
4598    {
4599      if (gimple_block (stmt))
4600	{
4601	  tree *var;
4602
4603	  prepend_lexical_block (id->block,
4604				 remap_blocks (DECL_INITIAL (fn), id));
4605	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4606			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4607				   == NULL_TREE));
4608	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4609	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4610	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4611	     under it.  The parameters can be then evaluated in the debugger,
4612	     but don't show in backtraces.  */
4613	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4614	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4615	      {
4616		tree v = *var;
4617		*var = TREE_CHAIN (v);
4618		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4619		BLOCK_VARS (id->block) = v;
4620	      }
4621	    else
4622	      var = &TREE_CHAIN (*var);
4623	}
4624      else
4625	remap_blocks_to_null (DECL_INITIAL (fn), id);
4626    }
4627
4628  /* Return statements in the function body will be replaced by jumps
4629     to the RET_LABEL.  */
4630  gcc_assert (DECL_INITIAL (fn));
4631  gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4632
4633  /* Find the LHS to which the result of this call is assigned.  */
4634  return_slot = NULL;
4635  if (gimple_call_lhs (stmt))
4636    {
4637      modify_dest = gimple_call_lhs (stmt);
4638
4639      /* Remember where to copy returned bounds.  */
4640      if (gimple_call_with_bounds_p (stmt)
4641	  && TREE_CODE (modify_dest) == SSA_NAME)
4642	{
4643	  gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4644	  if (retbnd)
4645	    {
4646	      return_bounds = gimple_call_lhs (retbnd);
4647	      /* If returned bounds are not used then just
4648		 remove unused call.  */
4649	      if (!return_bounds)
4650		{
4651		  gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4652		  gsi_remove (&iter, true);
4653		}
4654	    }
4655	}
4656
4657      /* The function which we are inlining might not return a value,
4658	 in which case we should issue a warning that the function
4659	 does not return a value.  In that case the optimizers will
4660	 see that the variable to which the value is assigned was not
4661	 initialized.  We do not want to issue a warning about that
4662	 uninitialized variable.  */
4663      if (DECL_P (modify_dest))
4664	TREE_NO_WARNING (modify_dest) = 1;
4665
4666      if (gimple_call_return_slot_opt_p (call_stmt))
4667	{
4668	  return_slot = modify_dest;
4669	  modify_dest = NULL;
4670	}
4671    }
4672  else
4673    modify_dest = NULL;
4674
4675  /* If we are inlining a call to the C++ operator new, we don't want
4676     to use type based alias analysis on the return value.  Otherwise
4677     we may get confused if the compiler sees that the inlined new
4678     function returns a pointer which was just deleted.  See bug
4679     33407.  */
4680  if (DECL_IS_OPERATOR_NEW (fn))
4681    {
4682      return_slot = NULL;
4683      modify_dest = NULL;
4684    }
4685
4686  /* Declare the return variable for the function.  */
4687  use_retvar = declare_return_variable (id, return_slot, modify_dest,
4688					return_bounds, bb);
4689
4690  /* Add local vars in this inlined callee to caller.  */
4691  add_local_variables (id->src_cfun, cfun, id);
4692
4693  if (dump_file && (dump_flags & TDF_DETAILS))
4694    {
4695      fprintf (dump_file, "Inlining ");
4696      print_generic_expr (dump_file, id->src_fn, 0);
4697      fprintf (dump_file, " to ");
4698      print_generic_expr (dump_file, id->dst_fn, 0);
4699      fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4700    }
4701
4702  /* This is it.  Duplicate the callee body.  Assume callee is
4703     pre-gimplified.  Note that we must not alter the caller
4704     function in any way before this point, as this CALL_EXPR may be
4705     a self-referential call; if we're calling ourselves, we need to
4706     duplicate our body before altering anything.  */
4707  copy_body (id, cg_edge->callee->count,
4708  	     GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4709	     bb, return_block, NULL);
4710
4711  reset_debug_bindings (id, stmt_gsi);
4712
4713  /* Reset the escaped solution.  */
4714  if (cfun->gimple_df)
4715    pt_solution_reset (&cfun->gimple_df->escaped);
4716
4717  /* Clean up.  */
4718  if (id->debug_map)
4719    {
4720      delete id->debug_map;
4721      id->debug_map = dst;
4722    }
4723  delete id->decl_map;
4724  id->decl_map = st;
4725
4726  /* Unlink the calls virtual operands before replacing it.  */
4727  unlink_stmt_vdef (stmt);
4728  if (gimple_vdef (stmt)
4729      && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4730    release_ssa_name (gimple_vdef (stmt));
4731
4732  /* If the inlined function returns a result that we care about,
4733     substitute the GIMPLE_CALL with an assignment of the return
4734     variable to the LHS of the call.  That is, if STMT was
4735     'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4736  if (use_retvar && gimple_call_lhs (stmt))
4737    {
4738      gimple old_stmt = stmt;
4739      stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4740      gsi_replace (&stmt_gsi, stmt, false);
4741      maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4742
4743      /* Copy bounds if we copy structure with bounds.  */
4744      if (chkp_function_instrumented_p (id->dst_fn)
4745	  && !BOUNDED_P (use_retvar)
4746	  && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4747	id->assign_stmts.safe_push (stmt);
4748    }
4749  else
4750    {
4751      /* Handle the case of inlining a function with no return
4752	 statement, which causes the return value to become undefined.  */
4753      if (gimple_call_lhs (stmt)
4754	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4755	{
4756	  tree name = gimple_call_lhs (stmt);
4757	  tree var = SSA_NAME_VAR (name);
4758	  tree def = ssa_default_def (cfun, var);
4759
4760	  if (def)
4761	    {
4762	      /* If the variable is used undefined, make this name
4763		 undefined via a move.  */
4764	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4765	      gsi_replace (&stmt_gsi, stmt, true);
4766	    }
4767	  else
4768	    {
4769	      /* Otherwise make this variable undefined.  */
4770	      gsi_remove (&stmt_gsi, true);
4771	      set_ssa_default_def (cfun, var, name);
4772	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4773	    }
4774	}
4775      else
4776        gsi_remove (&stmt_gsi, true);
4777    }
4778
4779  /* Put returned bounds into the correct place if required.  */
4780  if (return_bounds)
4781    {
4782      gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4783      gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4784      gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4785      unlink_stmt_vdef (old_stmt);
4786      gsi_replace (&bnd_gsi, new_stmt, false);
4787      maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4788      cgraph_update_edges_for_call_stmt (old_stmt,
4789					 gimple_call_fndecl (old_stmt),
4790					 new_stmt);
4791    }
4792
4793  if (purge_dead_abnormal_edges)
4794    {
4795      gimple_purge_dead_eh_edges (return_block);
4796      gimple_purge_dead_abnormal_call_edges (return_block);
4797    }
4798
4799  /* If the value of the new expression is ignored, that's OK.  We
4800     don't warn about this for CALL_EXPRs, so we shouldn't warn about
4801     the equivalent inlined version either.  */
4802  if (is_gimple_assign (stmt))
4803    {
4804      gcc_assert (gimple_assign_single_p (stmt)
4805		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4806      TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4807    }
4808
4809  /* Copy bounds for all generated assigns that need it.  */
4810  for (i = 0; i < id->assign_stmts.length (); i++)
4811    chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4812  id->assign_stmts.release ();
4813
4814  /* Output the inlining info for this abstract function, since it has been
4815     inlined.  If we don't do this now, we can lose the information about the
4816     variables in the function when the blocks get blown away as soon as we
4817     remove the cgraph node.  */
4818  if (gimple_block (stmt))
4819    (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4820
4821  /* Update callgraph if needed.  */
4822  cg_edge->callee->remove ();
4823
4824  id->block = NULL_TREE;
4825  successfully_inlined = TRUE;
4826
4827 egress:
4828  input_location = saved_location;
4829  return successfully_inlined;
4830}
4831
4832/* Expand call statements reachable from STMT_P.
4833   We can only have CALL_EXPRs as the "toplevel" tree code or nested
4834   in a MODIFY_EXPR.  */
4835
4836static bool
4837gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4838{
4839  gimple_stmt_iterator gsi;
4840  bool inlined = false;
4841
4842  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4843    {
4844      gimple stmt = gsi_stmt (gsi);
4845      gsi_prev (&gsi);
4846
4847      if (is_gimple_call (stmt)
4848	  && !gimple_call_internal_p (stmt))
4849	inlined |= expand_call_inline (bb, stmt, id);
4850    }
4851
4852  return inlined;
4853}
4854
4855
4856/* Walk all basic blocks created after FIRST and try to fold every statement
4857   in the STATEMENTS pointer set.  */
4858
4859static void
4860fold_marked_statements (int first, hash_set<gimple> *statements)
4861{
4862  for (; first < n_basic_blocks_for_fn (cfun); first++)
4863    if (BASIC_BLOCK_FOR_FN (cfun, first))
4864      {
4865        gimple_stmt_iterator gsi;
4866
4867	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4868	     !gsi_end_p (gsi);
4869	     gsi_next (&gsi))
4870	  if (statements->contains (gsi_stmt (gsi)))
4871	    {
4872	      gimple old_stmt = gsi_stmt (gsi);
4873	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4874
4875	      if (old_decl && DECL_BUILT_IN (old_decl))
4876		{
4877		  /* Folding builtins can create multiple instructions,
4878		     we need to look at all of them.  */
4879		  gimple_stmt_iterator i2 = gsi;
4880		  gsi_prev (&i2);
4881		  if (fold_stmt (&gsi))
4882		    {
4883		      gimple new_stmt;
4884		      /* If a builtin at the end of a bb folded into nothing,
4885			 the following loop won't work.  */
4886		      if (gsi_end_p (gsi))
4887			{
4888			  cgraph_update_edges_for_call_stmt (old_stmt,
4889							     old_decl, NULL);
4890			  break;
4891			}
4892		      if (gsi_end_p (i2))
4893			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4894		      else
4895			gsi_next (&i2);
4896		      while (1)
4897			{
4898			  new_stmt = gsi_stmt (i2);
4899			  update_stmt (new_stmt);
4900			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4901							     new_stmt);
4902
4903			  if (new_stmt == gsi_stmt (gsi))
4904			    {
4905			      /* It is okay to check only for the very last
4906				 of these statements.  If it is a throwing
4907				 statement nothing will change.  If it isn't
4908				 this can remove EH edges.  If that weren't
4909				 correct then because some intermediate stmts
4910				 throw, but not the last one.  That would mean
4911				 we'd have to split the block, which we can't
4912				 here and we'd loose anyway.  And as builtins
4913				 probably never throw, this all
4914				 is mood anyway.  */
4915			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
4916								  new_stmt))
4917				gimple_purge_dead_eh_edges (
4918				  BASIC_BLOCK_FOR_FN (cfun, first));
4919			      break;
4920			    }
4921			  gsi_next (&i2);
4922			}
4923		    }
4924		}
4925	      else if (fold_stmt (&gsi))
4926		{
4927		  /* Re-read the statement from GSI as fold_stmt() may
4928		     have changed it.  */
4929		  gimple new_stmt = gsi_stmt (gsi);
4930		  update_stmt (new_stmt);
4931
4932		  if (is_gimple_call (old_stmt)
4933		      || is_gimple_call (new_stmt))
4934		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4935						       new_stmt);
4936
4937		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4938		    gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4939								    first));
4940		}
4941	    }
4942      }
4943}
4944
4945/* Expand calls to inline functions in the body of FN.  */
4946
4947unsigned int
4948optimize_inline_calls (tree fn)
4949{
4950  copy_body_data id;
4951  basic_block bb;
4952  int last = n_basic_blocks_for_fn (cfun);
4953  bool inlined_p = false;
4954
4955  /* Clear out ID.  */
4956  memset (&id, 0, sizeof (id));
4957
4958  id.src_node = id.dst_node = cgraph_node::get (fn);
4959  gcc_assert (id.dst_node->definition);
4960  id.dst_fn = fn;
4961  /* Or any functions that aren't finished yet.  */
4962  if (current_function_decl)
4963    id.dst_fn = current_function_decl;
4964
4965  id.copy_decl = copy_decl_maybe_to_var;
4966  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4967  id.transform_new_cfg = false;
4968  id.transform_return_to_modify = true;
4969  id.transform_parameter = true;
4970  id.transform_lang_insert_block = NULL;
4971  id.statements_to_fold = new hash_set<gimple>;
4972
4973  push_gimplify_context ();
4974
4975  /* We make no attempts to keep dominance info up-to-date.  */
4976  free_dominance_info (CDI_DOMINATORS);
4977  free_dominance_info (CDI_POST_DOMINATORS);
4978
4979  /* Register specific gimple functions.  */
4980  gimple_register_cfg_hooks ();
4981
4982  /* Reach the trees by walking over the CFG, and note the
4983     enclosing basic-blocks in the call edges.  */
4984  /* We walk the blocks going forward, because inlined function bodies
4985     will split id->current_basic_block, and the new blocks will
4986     follow it; we'll trudge through them, processing their CALL_EXPRs
4987     along the way.  */
4988  FOR_EACH_BB_FN (bb, cfun)
4989    inlined_p |= gimple_expand_calls_inline (bb, &id);
4990
4991  pop_gimplify_context (NULL);
4992
4993#ifdef ENABLE_CHECKING
4994    {
4995      struct cgraph_edge *e;
4996
4997      id.dst_node->verify ();
4998
4999      /* Double check that we inlined everything we are supposed to inline.  */
5000      for (e = id.dst_node->callees; e; e = e->next_callee)
5001	gcc_assert (e->inline_failed);
5002    }
5003#endif
5004
5005  /* Fold queued statements.  */
5006  fold_marked_statements (last, id.statements_to_fold);
5007  delete id.statements_to_fold;
5008
5009  gcc_assert (!id.debug_stmts.exists ());
5010
5011  /* If we didn't inline into the function there is nothing to do.  */
5012  if (!inlined_p)
5013    return 0;
5014
5015  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5016  number_blocks (fn);
5017
5018  delete_unreachable_blocks_update_callgraph (&id);
5019#ifdef ENABLE_CHECKING
5020  id.dst_node->verify ();
5021#endif
5022
5023  /* It would be nice to check SSA/CFG/statement consistency here, but it is
5024     not possible yet - the IPA passes might make various functions to not
5025     throw and they don't care to proactively update local EH info.  This is
5026     done later in fixup_cfg pass that also execute the verification.  */
5027  return (TODO_update_ssa
5028	  | TODO_cleanup_cfg
5029	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5030	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5031	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5032	     ? TODO_rebuild_frequencies : 0));
5033}
5034
5035/* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5036
5037tree
5038copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5039{
5040  enum tree_code code = TREE_CODE (*tp);
5041  enum tree_code_class cl = TREE_CODE_CLASS (code);
5042
5043  /* We make copies of most nodes.  */
5044  if (IS_EXPR_CODE_CLASS (cl)
5045      || code == TREE_LIST
5046      || code == TREE_VEC
5047      || code == TYPE_DECL
5048      || code == OMP_CLAUSE)
5049    {
5050      /* Because the chain gets clobbered when we make a copy, we save it
5051	 here.  */
5052      tree chain = NULL_TREE, new_tree;
5053
5054      if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5055	chain = TREE_CHAIN (*tp);
5056
5057      /* Copy the node.  */
5058      new_tree = copy_node (*tp);
5059
5060      *tp = new_tree;
5061
5062      /* Now, restore the chain, if appropriate.  That will cause
5063	 walk_tree to walk into the chain as well.  */
5064      if (code == PARM_DECL
5065	  || code == TREE_LIST
5066	  || code == OMP_CLAUSE)
5067	TREE_CHAIN (*tp) = chain;
5068
5069      /* For now, we don't update BLOCKs when we make copies.  So, we
5070	 have to nullify all BIND_EXPRs.  */
5071      if (TREE_CODE (*tp) == BIND_EXPR)
5072	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5073    }
5074  else if (code == CONSTRUCTOR)
5075    {
5076      /* CONSTRUCTOR nodes need special handling because
5077         we need to duplicate the vector of elements.  */
5078      tree new_tree;
5079
5080      new_tree = copy_node (*tp);
5081      CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5082      *tp = new_tree;
5083    }
5084  else if (code == STATEMENT_LIST)
5085    /* We used to just abort on STATEMENT_LIST, but we can run into them
5086       with statement-expressions (c++/40975).  */
5087    copy_statement_list (tp);
5088  else if (TREE_CODE_CLASS (code) == tcc_type)
5089    *walk_subtrees = 0;
5090  else if (TREE_CODE_CLASS (code) == tcc_declaration)
5091    *walk_subtrees = 0;
5092  else if (TREE_CODE_CLASS (code) == tcc_constant)
5093    *walk_subtrees = 0;
5094  return NULL_TREE;
5095}
5096
5097/* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5098   information indicating to what new SAVE_EXPR this one should be mapped,
5099   use that one.  Otherwise, create a new node and enter it in ST.  FN is
5100   the function into which the copy will be placed.  */
5101
5102static void
5103remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5104{
5105  tree *n;
5106  tree t;
5107
5108  /* See if we already encountered this SAVE_EXPR.  */
5109  n = st->get (*tp);
5110
5111  /* If we didn't already remap this SAVE_EXPR, do so now.  */
5112  if (!n)
5113    {
5114      t = copy_node (*tp);
5115
5116      /* Remember this SAVE_EXPR.  */
5117      st->put (*tp, t);
5118      /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5119      st->put (t, t);
5120    }
5121  else
5122    {
5123      /* We've already walked into this SAVE_EXPR; don't do it again.  */
5124      *walk_subtrees = 0;
5125      t = *n;
5126    }
5127
5128  /* Replace this SAVE_EXPR with the copy.  */
5129  *tp = t;
5130}
5131
5132/* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5133   label, copies the declaration and enters it in the splay_tree in DATA (which
5134   is really a 'copy_body_data *'.  */
5135
5136static tree
5137mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5138		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5139		        struct walk_stmt_info *wi)
5140{
5141  copy_body_data *id = (copy_body_data *) wi->info;
5142  glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5143
5144  if (stmt)
5145    {
5146      tree decl = gimple_label_label (stmt);
5147
5148      /* Copy the decl and remember the copy.  */
5149      insert_decl_map (id, decl, id->copy_decl (decl, id));
5150    }
5151
5152  return NULL_TREE;
5153}
5154
5155
5156/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5157   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5158   remaps all local declarations to appropriate replacements in gimple
5159   operands. */
5160
5161static tree
5162replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5163{
5164  struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5165  copy_body_data *id = (copy_body_data *) wi->info;
5166  hash_map<tree, tree> *st = id->decl_map;
5167  tree *n;
5168  tree expr = *tp;
5169
5170  /* Only a local declaration (variable or label).  */
5171  if ((TREE_CODE (expr) == VAR_DECL
5172       && !TREE_STATIC (expr))
5173      || TREE_CODE (expr) == LABEL_DECL)
5174    {
5175      /* Lookup the declaration.  */
5176      n = st->get (expr);
5177
5178      /* If it's there, remap it.  */
5179      if (n)
5180	*tp = *n;
5181      *walk_subtrees = 0;
5182    }
5183  else if (TREE_CODE (expr) == STATEMENT_LIST
5184	   || TREE_CODE (expr) == BIND_EXPR
5185	   || TREE_CODE (expr) == SAVE_EXPR)
5186    gcc_unreachable ();
5187  else if (TREE_CODE (expr) == TARGET_EXPR)
5188    {
5189      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5190         It's OK for this to happen if it was part of a subtree that
5191         isn't immediately expanded, such as operand 2 of another
5192         TARGET_EXPR.  */
5193      if (!TREE_OPERAND (expr, 1))
5194	{
5195	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5196	  TREE_OPERAND (expr, 3) = NULL_TREE;
5197	}
5198    }
5199
5200  /* Keep iterating.  */
5201  return NULL_TREE;
5202}
5203
5204
5205/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5206   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5207   remaps all local declarations to appropriate replacements in gimple
5208   statements. */
5209
5210static tree
5211replace_locals_stmt (gimple_stmt_iterator *gsip,
5212		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5213		     struct walk_stmt_info *wi)
5214{
5215  copy_body_data *id = (copy_body_data *) wi->info;
5216  gimple gs = gsi_stmt (*gsip);
5217
5218  if (gbind *stmt = dyn_cast <gbind *> (gs))
5219    {
5220      tree block = gimple_bind_block (stmt);
5221
5222      if (block)
5223	{
5224	  remap_block (&block, id);
5225	  gimple_bind_set_block (stmt, block);
5226	}
5227
5228      /* This will remap a lot of the same decls again, but this should be
5229	 harmless.  */
5230      if (gimple_bind_vars (stmt))
5231	gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5232						 NULL, id));
5233    }
5234
5235  /* Keep iterating.  */
5236  return NULL_TREE;
5237}
5238
5239
5240/* Copies everything in SEQ and replaces variables and labels local to
5241   current_function_decl.  */
5242
5243gimple_seq
5244copy_gimple_seq_and_replace_locals (gimple_seq seq)
5245{
5246  copy_body_data id;
5247  struct walk_stmt_info wi;
5248  gimple_seq copy;
5249
5250  /* There's nothing to do for NULL_TREE.  */
5251  if (seq == NULL)
5252    return seq;
5253
5254  /* Set up ID.  */
5255  memset (&id, 0, sizeof (id));
5256  id.src_fn = current_function_decl;
5257  id.dst_fn = current_function_decl;
5258  id.decl_map = new hash_map<tree, tree>;
5259  id.debug_map = NULL;
5260
5261  id.copy_decl = copy_decl_no_change;
5262  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5263  id.transform_new_cfg = false;
5264  id.transform_return_to_modify = false;
5265  id.transform_parameter = false;
5266  id.transform_lang_insert_block = NULL;
5267
5268  /* Walk the tree once to find local labels.  */
5269  memset (&wi, 0, sizeof (wi));
5270  hash_set<tree> visited;
5271  wi.info = &id;
5272  wi.pset = &visited;
5273  walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5274
5275  copy = gimple_seq_copy (seq);
5276
5277  /* Walk the copy, remapping decls.  */
5278  memset (&wi, 0, sizeof (wi));
5279  wi.info = &id;
5280  walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5281
5282  /* Clean up.  */
5283  delete id.decl_map;
5284  if (id.debug_map)
5285    delete id.debug_map;
5286  if (id.dependence_map)
5287    {
5288      delete id.dependence_map;
5289      id.dependence_map = NULL;
5290    }
5291
5292  return copy;
5293}
5294
5295
5296/* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5297
5298static tree
5299debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5300{
5301  if (*tp == data)
5302    return (tree) data;
5303  else
5304    return NULL;
5305}
5306
5307DEBUG_FUNCTION bool
5308debug_find_tree (tree top, tree search)
5309{
5310  return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5311}
5312
5313
5314/* Declare the variables created by the inliner.  Add all the variables in
5315   VARS to BIND_EXPR.  */
5316
5317static void
5318declare_inline_vars (tree block, tree vars)
5319{
5320  tree t;
5321  for (t = vars; t; t = DECL_CHAIN (t))
5322    {
5323      DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5324      gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5325      add_local_decl (cfun, t);
5326    }
5327
5328  if (block)
5329    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5330}
5331
5332/* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5333   but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5334   VAR_DECL translation.  */
5335
5336static tree
5337copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5338{
5339  /* Don't generate debug information for the copy if we wouldn't have
5340     generated it for the copy either.  */
5341  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5342  DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5343
5344  /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5345     declaration inspired this copy.  */
5346  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5347
5348  /* The new variable/label has no RTL, yet.  */
5349  if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5350      && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5351    SET_DECL_RTL (copy, 0);
5352
5353  /* These args would always appear unused, if not for this.  */
5354  TREE_USED (copy) = 1;
5355
5356  /* Set the context for the new declaration.  */
5357  if (!DECL_CONTEXT (decl))
5358    /* Globals stay global.  */
5359    ;
5360  else if (DECL_CONTEXT (decl) != id->src_fn)
5361    /* Things that weren't in the scope of the function we're inlining
5362       from aren't in the scope we're inlining to, either.  */
5363    ;
5364  else if (TREE_STATIC (decl))
5365    /* Function-scoped static variables should stay in the original
5366       function.  */
5367    ;
5368  else
5369    /* Ordinary automatic local variables are now in the scope of the
5370       new function.  */
5371    DECL_CONTEXT (copy) = id->dst_fn;
5372
5373  return copy;
5374}
5375
5376static tree
5377copy_decl_to_var (tree decl, copy_body_data *id)
5378{
5379  tree copy, type;
5380
5381  gcc_assert (TREE_CODE (decl) == PARM_DECL
5382	      || TREE_CODE (decl) == RESULT_DECL);
5383
5384  type = TREE_TYPE (decl);
5385
5386  copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5387		     VAR_DECL, DECL_NAME (decl), type);
5388  if (DECL_PT_UID_SET_P (decl))
5389    SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5390  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5391  TREE_READONLY (copy) = TREE_READONLY (decl);
5392  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5393  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5394
5395  return copy_decl_for_dup_finish (id, decl, copy);
5396}
5397
5398/* Like copy_decl_to_var, but create a return slot object instead of a
5399   pointer variable for return by invisible reference.  */
5400
5401static tree
5402copy_result_decl_to_var (tree decl, copy_body_data *id)
5403{
5404  tree copy, type;
5405
5406  gcc_assert (TREE_CODE (decl) == PARM_DECL
5407	      || TREE_CODE (decl) == RESULT_DECL);
5408
5409  type = TREE_TYPE (decl);
5410  if (DECL_BY_REFERENCE (decl))
5411    type = TREE_TYPE (type);
5412
5413  copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5414		     VAR_DECL, DECL_NAME (decl), type);
5415  if (DECL_PT_UID_SET_P (decl))
5416    SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5417  TREE_READONLY (copy) = TREE_READONLY (decl);
5418  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5419  if (!DECL_BY_REFERENCE (decl))
5420    {
5421      TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5422      DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5423    }
5424
5425  return copy_decl_for_dup_finish (id, decl, copy);
5426}
5427
5428tree
5429copy_decl_no_change (tree decl, copy_body_data *id)
5430{
5431  tree copy;
5432
5433  copy = copy_node (decl);
5434
5435  /* The COPY is not abstract; it will be generated in DST_FN.  */
5436  DECL_ABSTRACT_P (copy) = false;
5437  lang_hooks.dup_lang_specific_decl (copy);
5438
5439  /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5440     been taken; it's for internal bookkeeping in expand_goto_internal.  */
5441  if (TREE_CODE (copy) == LABEL_DECL)
5442    {
5443      TREE_ADDRESSABLE (copy) = 0;
5444      LABEL_DECL_UID (copy) = -1;
5445    }
5446
5447  return copy_decl_for_dup_finish (id, decl, copy);
5448}
5449
5450static tree
5451copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5452{
5453  if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5454    return copy_decl_to_var (decl, id);
5455  else
5456    return copy_decl_no_change (decl, id);
5457}
5458
5459/* Return a copy of the function's argument tree.  */
5460static tree
5461copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5462			       bitmap args_to_skip, tree *vars)
5463{
5464  tree arg, *parg;
5465  tree new_parm = NULL;
5466  int i = 0;
5467
5468  parg = &new_parm;
5469
5470  for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5471    if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5472      {
5473        tree new_tree = remap_decl (arg, id);
5474	if (TREE_CODE (new_tree) != PARM_DECL)
5475	  new_tree = id->copy_decl (arg, id);
5476        lang_hooks.dup_lang_specific_decl (new_tree);
5477        *parg = new_tree;
5478	parg = &DECL_CHAIN (new_tree);
5479      }
5480    else if (!id->decl_map->get (arg))
5481      {
5482	/* Make an equivalent VAR_DECL.  If the argument was used
5483	   as temporary variable later in function, the uses will be
5484	   replaced by local variable.  */
5485	tree var = copy_decl_to_var (arg, id);
5486	insert_decl_map (id, arg, var);
5487        /* Declare this new variable.  */
5488        DECL_CHAIN (var) = *vars;
5489        *vars = var;
5490      }
5491  return new_parm;
5492}
5493
5494/* Return a copy of the function's static chain.  */
5495static tree
5496copy_static_chain (tree static_chain, copy_body_data * id)
5497{
5498  tree *chain_copy, *pvar;
5499
5500  chain_copy = &static_chain;
5501  for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5502    {
5503      tree new_tree = remap_decl (*pvar, id);
5504      lang_hooks.dup_lang_specific_decl (new_tree);
5505      DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5506      *pvar = new_tree;
5507    }
5508  return static_chain;
5509}
5510
5511/* Return true if the function is allowed to be versioned.
5512   This is a guard for the versioning functionality.  */
5513
5514bool
5515tree_versionable_function_p (tree fndecl)
5516{
5517  return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5518	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5519}
5520
5521/* Delete all unreachable basic blocks and update callgraph.
5522   Doing so is somewhat nontrivial because we need to update all clones and
5523   remove inline function that become unreachable.  */
5524
5525static bool
5526delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5527{
5528  bool changed = false;
5529  basic_block b, next_bb;
5530
5531  find_unreachable_blocks ();
5532
5533  /* Delete all unreachable basic blocks.  */
5534
5535  for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5536       != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5537    {
5538      next_bb = b->next_bb;
5539
5540      if (!(b->flags & BB_REACHABLE))
5541	{
5542          gimple_stmt_iterator bsi;
5543
5544          for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5545	    {
5546	      struct cgraph_edge *e;
5547	      struct cgraph_node *node;
5548
5549	      id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5550
5551	      if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5552		  &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5553		{
5554		  if (!e->inline_failed)
5555		    e->callee->remove_symbol_and_inline_clones (id->dst_node);
5556		  else
5557		    e->remove ();
5558		}
5559	      if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5560		  && id->dst_node->clones)
5561		for (node = id->dst_node->clones; node != id->dst_node;)
5562		  {
5563		    node->remove_stmt_references (gsi_stmt (bsi));
5564		    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5565			&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5566		      {
5567			if (!e->inline_failed)
5568			  e->callee->remove_symbol_and_inline_clones (id->dst_node);
5569			else
5570			  e->remove ();
5571		      }
5572
5573		    if (node->clones)
5574		      node = node->clones;
5575		    else if (node->next_sibling_clone)
5576		      node = node->next_sibling_clone;
5577		    else
5578		      {
5579			while (node != id->dst_node && !node->next_sibling_clone)
5580			  node = node->clone_of;
5581			if (node != id->dst_node)
5582			  node = node->next_sibling_clone;
5583		      }
5584		  }
5585	    }
5586	  delete_basic_block (b);
5587	  changed = true;
5588	}
5589    }
5590
5591  return changed;
5592}
5593
5594/* Update clone info after duplication.  */
5595
5596static void
5597update_clone_info (copy_body_data * id)
5598{
5599  struct cgraph_node *node;
5600  if (!id->dst_node->clones)
5601    return;
5602  for (node = id->dst_node->clones; node != id->dst_node;)
5603    {
5604      /* First update replace maps to match the new body.  */
5605      if (node->clone.tree_map)
5606        {
5607	  unsigned int i;
5608          for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5609	    {
5610	      struct ipa_replace_map *replace_info;
5611	      replace_info = (*node->clone.tree_map)[i];
5612	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5613	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5614	    }
5615	}
5616      if (node->clones)
5617	node = node->clones;
5618      else if (node->next_sibling_clone)
5619	node = node->next_sibling_clone;
5620      else
5621	{
5622	  while (node != id->dst_node && !node->next_sibling_clone)
5623	    node = node->clone_of;
5624	  if (node != id->dst_node)
5625	    node = node->next_sibling_clone;
5626	}
5627    }
5628}
5629
5630/* Create a copy of a function's tree.
5631   OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5632   of the original function and the new copied function
5633   respectively.  In case we want to replace a DECL
5634   tree with another tree while duplicating the function's
5635   body, TREE_MAP represents the mapping between these
5636   trees. If UPDATE_CLONES is set, the call_stmt fields
5637   of edges of clones of the function will be updated.
5638
5639   If non-NULL ARGS_TO_SKIP determine function parameters to remove
5640   from new version.
5641   If SKIP_RETURN is true, the new version will return void.
5642   If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5643   If non_NULL NEW_ENTRY determine new entry BB of the clone.
5644*/
5645void
5646tree_function_versioning (tree old_decl, tree new_decl,
5647			  vec<ipa_replace_map *, va_gc> *tree_map,
5648			  bool update_clones, bitmap args_to_skip,
5649			  bool skip_return, bitmap blocks_to_copy,
5650			  basic_block new_entry)
5651{
5652  struct cgraph_node *old_version_node;
5653  struct cgraph_node *new_version_node;
5654  copy_body_data id;
5655  tree p;
5656  unsigned i;
5657  struct ipa_replace_map *replace_info;
5658  basic_block old_entry_block, bb;
5659  auto_vec<gimple, 10> init_stmts;
5660  tree vars = NULL_TREE;
5661
5662  gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5663	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5664  DECL_POSSIBLY_INLINED (old_decl) = 1;
5665
5666  old_version_node = cgraph_node::get (old_decl);
5667  gcc_checking_assert (old_version_node);
5668  new_version_node = cgraph_node::get (new_decl);
5669  gcc_checking_assert (new_version_node);
5670
5671  /* Copy over debug args.  */
5672  if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5673    {
5674      vec<tree, va_gc> **new_debug_args, **old_debug_args;
5675      gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5676      DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5677      old_debug_args = decl_debug_args_lookup (old_decl);
5678      if (old_debug_args)
5679	{
5680	  new_debug_args = decl_debug_args_insert (new_decl);
5681	  *new_debug_args = vec_safe_copy (*old_debug_args);
5682	}
5683    }
5684
5685  /* Output the inlining info for this abstract function, since it has been
5686     inlined.  If we don't do this now, we can lose the information about the
5687     variables in the function when the blocks get blown away as soon as we
5688     remove the cgraph node.  */
5689  (*debug_hooks->outlining_inline_function) (old_decl);
5690
5691  DECL_ARTIFICIAL (new_decl) = 1;
5692  DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5693  if (DECL_ORIGIN (old_decl) == old_decl)
5694    old_version_node->used_as_abstract_origin = true;
5695  DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5696
5697  /* Prepare the data structures for the tree copy.  */
5698  memset (&id, 0, sizeof (id));
5699
5700  /* Generate a new name for the new version. */
5701  id.statements_to_fold = new hash_set<gimple>;
5702
5703  id.decl_map = new hash_map<tree, tree>;
5704  id.debug_map = NULL;
5705  id.src_fn = old_decl;
5706  id.dst_fn = new_decl;
5707  id.src_node = old_version_node;
5708  id.dst_node = new_version_node;
5709  id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5710  id.blocks_to_copy = blocks_to_copy;
5711
5712  id.copy_decl = copy_decl_no_change;
5713  id.transform_call_graph_edges
5714    = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5715  id.transform_new_cfg = true;
5716  id.transform_return_to_modify = false;
5717  id.transform_parameter = false;
5718  id.transform_lang_insert_block = NULL;
5719
5720  old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5721    (DECL_STRUCT_FUNCTION (old_decl));
5722  DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5723  DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5724  initialize_cfun (new_decl, old_decl,
5725		   old_entry_block->count);
5726  if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5727    DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5728      = id.src_cfun->gimple_df->ipa_pta;
5729
5730  /* Copy the function's static chain.  */
5731  p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5732  if (p)
5733    DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5734      copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5735			 &id);
5736
5737  /* If there's a tree_map, prepare for substitution.  */
5738  if (tree_map)
5739    for (i = 0; i < tree_map->length (); i++)
5740      {
5741	gimple init;
5742	replace_info = (*tree_map)[i];
5743	if (replace_info->replace_p)
5744	  {
5745	    if (!replace_info->old_tree)
5746	      {
5747		int i = replace_info->parm_num;
5748		tree parm;
5749		tree req_type;
5750
5751		for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5752		  i --;
5753		replace_info->old_tree = parm;
5754		req_type = TREE_TYPE (parm);
5755		if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5756		  {
5757		    if (fold_convertible_p (req_type, replace_info->new_tree))
5758		      replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5759		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5760		      replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5761		    else
5762		      {
5763			if (dump_file)
5764			  {
5765			    fprintf (dump_file, "    const ");
5766			    print_generic_expr (dump_file, replace_info->new_tree, 0);
5767			    fprintf (dump_file, "  can't be converted to param ");
5768			    print_generic_expr (dump_file, parm, 0);
5769			    fprintf (dump_file, "\n");
5770			  }
5771			replace_info->old_tree = NULL;
5772		      }
5773		  }
5774	      }
5775	    else
5776	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5777	    if (replace_info->old_tree)
5778	      {
5779		init = setup_one_parameter (&id, replace_info->old_tree,
5780					    replace_info->new_tree, id.src_fn,
5781					    NULL,
5782					    &vars);
5783		if (init)
5784		  init_stmts.safe_push (init);
5785	      }
5786	  }
5787      }
5788  /* Copy the function's arguments.  */
5789  if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5790    DECL_ARGUMENTS (new_decl) =
5791      copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5792      				     args_to_skip, &vars);
5793
5794  DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5795  BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5796
5797  declare_inline_vars (DECL_INITIAL (new_decl), vars);
5798
5799  if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5800    /* Add local vars.  */
5801    add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5802
5803  if (DECL_RESULT (old_decl) == NULL_TREE)
5804    ;
5805  else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5806    {
5807      DECL_RESULT (new_decl)
5808	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5809		      RESULT_DECL, NULL_TREE, void_type_node);
5810      DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5811      cfun->returns_struct = 0;
5812      cfun->returns_pcc_struct = 0;
5813    }
5814  else
5815    {
5816      tree old_name;
5817      DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5818      lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5819      if (gimple_in_ssa_p (id.src_cfun)
5820	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5821	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5822	{
5823	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5824	  insert_decl_map (&id, old_name, new_name);
5825	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5826	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5827	}
5828    }
5829
5830  /* Set up the destination functions loop tree.  */
5831  if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5832    {
5833      cfun->curr_properties &= ~PROP_loops;
5834      loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5835      cfun->curr_properties |= PROP_loops;
5836    }
5837
5838  /* Copy the Function's body.  */
5839  copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5840	     ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5841	     new_entry);
5842
5843  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5844  number_blocks (new_decl);
5845
5846  /* We want to create the BB unconditionally, so that the addition of
5847     debug stmts doesn't affect BB count, which may in the end cause
5848     codegen differences.  */
5849  bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5850  while (init_stmts.length ())
5851    insert_init_stmt (&id, bb, init_stmts.pop ());
5852  update_clone_info (&id);
5853
5854  /* Remap the nonlocal_goto_save_area, if any.  */
5855  if (cfun->nonlocal_goto_save_area)
5856    {
5857      struct walk_stmt_info wi;
5858
5859      memset (&wi, 0, sizeof (wi));
5860      wi.info = &id;
5861      walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5862    }
5863
5864  /* Clean up.  */
5865  delete id.decl_map;
5866  if (id.debug_map)
5867    delete id.debug_map;
5868  free_dominance_info (CDI_DOMINATORS);
5869  free_dominance_info (CDI_POST_DOMINATORS);
5870
5871  fold_marked_statements (0, id.statements_to_fold);
5872  delete id.statements_to_fold;
5873  fold_cond_expr_cond ();
5874  delete_unreachable_blocks_update_callgraph (&id);
5875  if (id.dst_node->definition)
5876    cgraph_edge::rebuild_references ();
5877  if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5878    {
5879      calculate_dominance_info (CDI_DOMINATORS);
5880      fix_loop_structure (NULL);
5881    }
5882  update_ssa (TODO_update_ssa);
5883
5884  /* After partial cloning we need to rescale frequencies, so they are
5885     within proper range in the cloned function.  */
5886  if (new_entry)
5887    {
5888      struct cgraph_edge *e;
5889      rebuild_frequencies ();
5890
5891      new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5892      for (e = new_version_node->callees; e; e = e->next_callee)
5893	{
5894	  basic_block bb = gimple_bb (e->call_stmt);
5895	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5896							 bb);
5897	  e->count = bb->count;
5898	}
5899      for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5900	{
5901	  basic_block bb = gimple_bb (e->call_stmt);
5902	  e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5903							 bb);
5904	  e->count = bb->count;
5905	}
5906    }
5907
5908  free_dominance_info (CDI_DOMINATORS);
5909  free_dominance_info (CDI_POST_DOMINATORS);
5910
5911  gcc_assert (!id.debug_stmts.exists ());
5912  pop_cfun ();
5913  return;
5914}
5915
5916/* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
5917   the callee and return the inlined body on success.  */
5918
5919tree
5920maybe_inline_call_in_expr (tree exp)
5921{
5922  tree fn = get_callee_fndecl (exp);
5923
5924  /* We can only try to inline "const" functions.  */
5925  if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5926    {
5927      call_expr_arg_iterator iter;
5928      copy_body_data id;
5929      tree param, arg, t;
5930      hash_map<tree, tree> decl_map;
5931
5932      /* Remap the parameters.  */
5933      for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5934	   param;
5935	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5936	decl_map.put (param, arg);
5937
5938      memset (&id, 0, sizeof (id));
5939      id.src_fn = fn;
5940      id.dst_fn = current_function_decl;
5941      id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5942      id.decl_map = &decl_map;
5943
5944      id.copy_decl = copy_decl_no_change;
5945      id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5946      id.transform_new_cfg = false;
5947      id.transform_return_to_modify = true;
5948      id.transform_parameter = true;
5949      id.transform_lang_insert_block = NULL;
5950
5951      /* Make sure not to unshare trees behind the front-end's back
5952	 since front-end specific mechanisms may rely on sharing.  */
5953      id.regimplify = false;
5954      id.do_not_unshare = true;
5955
5956      /* We're not inside any EH region.  */
5957      id.eh_lp_nr = 0;
5958
5959      t = copy_tree_body (&id);
5960
5961      /* We can only return something suitable for use in a GENERIC
5962	 expression tree.  */
5963      if (TREE_CODE (t) == MODIFY_EXPR)
5964	return TREE_OPERAND (t, 1);
5965    }
5966
5967   return NULL_TREE;
5968}
5969
5970/* Duplicate a type, fields and all.  */
5971
5972tree
5973build_duplicate_type (tree type)
5974{
5975  struct copy_body_data id;
5976
5977  memset (&id, 0, sizeof (id));
5978  id.src_fn = current_function_decl;
5979  id.dst_fn = current_function_decl;
5980  id.src_cfun = cfun;
5981  id.decl_map = new hash_map<tree, tree>;
5982  id.debug_map = NULL;
5983  id.copy_decl = copy_decl_no_change;
5984
5985  type = remap_type_1 (type, &id);
5986
5987  delete id.decl_map;
5988  if (id.debug_map)
5989    delete id.debug_map;
5990
5991  TYPE_CANONICAL (type) = type;
5992
5993  return type;
5994}
5995
5996/* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5997   parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
5998   evaluation.  */
5999
6000tree
6001copy_fn (tree fn, tree& parms, tree& result)
6002{
6003  copy_body_data id;
6004  tree param;
6005  hash_map<tree, tree> decl_map;
6006
6007  tree *p = &parms;
6008  *p = NULL_TREE;
6009
6010  memset (&id, 0, sizeof (id));
6011  id.src_fn = fn;
6012  id.dst_fn = current_function_decl;
6013  id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6014  id.decl_map = &decl_map;
6015
6016  id.copy_decl = copy_decl_no_change;
6017  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6018  id.transform_new_cfg = false;
6019  id.transform_return_to_modify = false;
6020  id.transform_parameter = true;
6021  id.transform_lang_insert_block = NULL;
6022
6023  /* Make sure not to unshare trees behind the front-end's back
6024     since front-end specific mechanisms may rely on sharing.  */
6025  id.regimplify = false;
6026  id.do_not_unshare = true;
6027
6028  /* We're not inside any EH region.  */
6029  id.eh_lp_nr = 0;
6030
6031  /* Remap the parameters and result and return them to the caller.  */
6032  for (param = DECL_ARGUMENTS (fn);
6033       param;
6034       param = DECL_CHAIN (param))
6035    {
6036      *p = remap_decl (param, &id);
6037      p = &DECL_CHAIN (*p);
6038    }
6039
6040  if (DECL_RESULT (fn))
6041    result = remap_decl (DECL_RESULT (fn), &id);
6042  else
6043    result = NULL_TREE;
6044
6045  return copy_tree_body (&id);
6046}
6047