1/* Tree inlining.
2   Copyright 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3   Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to
19the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20Boston, MA 02110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "toplev.h"
27#include "tree.h"
28#include "tree-inline.h"
29#include "rtl.h"
30#include "expr.h"
31#include "flags.h"
32#include "params.h"
33#include "input.h"
34#include "insn-config.h"
35#include "varray.h"
36#include "hashtab.h"
37#include "langhooks.h"
38#include "basic-block.h"
39#include "tree-iterator.h"
40#include "cgraph.h"
41#include "intl.h"
42#include "tree-mudflap.h"
43#include "tree-flow.h"
44#include "function.h"
45#include "ggc.h"
46#include "tree-flow.h"
47#include "diagnostic.h"
48#include "except.h"
49#include "debug.h"
50#include "pointer-set.h"
51#include "ipa-prop.h"
52
53/* I'm not real happy about this, but we need to handle gimple and
54   non-gimple trees.  */
55#include "tree-gimple.h"
56
57/* Inlining, Cloning, Versioning, Parallelization
58
59   Inlining: a function body is duplicated, but the PARM_DECLs are
60   remapped into VAR_DECLs, and non-void RETURN_EXPRs become
61   MODIFY_EXPRs that store to a dedicated returned-value variable.
62   The duplicated eh_region info of the copy will later be appended
63   to the info for the caller; the eh_region info in copied throwing
64   statements and RESX_EXPRs is adjusted accordingly.
65
66   Cloning: (only in C++) We have one body for a con/de/structor, and
67   multiple function decls, each with a unique parameter list.
68   Duplicate the body, using the given splay tree; some parameters
69   will become constants (like 0 or 1).
70
71   Versioning: a function body is duplicated and the result is a new
72   function rather than into blocks of an existing function as with
73   inlining.  Some parameters will become constants.
74
75   Parallelization: a region of a function is duplicated resulting in
76   a new function.  Variables may be replaced with complex expressions
77   to enable shared variable semantics.
78
79   All of these will simultaneously lookup any callgraph edges.  If
80   we're going to inline the duplicated function body, and the given
81   function has some cloned callgraph nodes (one for each place this
82   function will be inlined) those callgraph edges will be duplicated.
83   If we're cloning the body, those callgraph edges will be
84   updated to point into the new body.  (Note that the original
85   callgraph node and edge list will not be altered.)
86
87   See the CALL_EXPR handling case in copy_body_r ().  */
88
89/* 0 if we should not perform inlining.
90   1 if we should expand functions calls inline at the tree level.
91   2 if we should consider *all* functions to be inline
92   candidates.  */
93
94int flag_inline_trees = 0;
95
96/* To Do:
97
98   o In order to make inlining-on-trees work, we pessimized
99     function-local static constants.  In particular, they are now
100     always output, even when not addressed.  Fix this by treating
101     function-local static constants just like global static
102     constants; the back-end already knows not to output them if they
103     are not needed.
104
105   o Provide heuristics to clamp inlining of recursive template
106     calls?  */
107
108/* Prototypes.  */
109
110static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
111static tree copy_generic_body (copy_body_data *);
112static bool inlinable_function_p (tree);
113static void remap_block (tree *, copy_body_data *);
114static tree remap_decls (tree, copy_body_data *);
115static void copy_bind_expr (tree *, int *, copy_body_data *);
116static tree mark_local_for_remap_r (tree *, int *, void *);
117static void unsave_expr_1 (tree);
118static tree unsave_r (tree *, int *, void *);
119static void declare_inline_vars (tree, tree);
120static void remap_save_expr (tree *, void *, int *);
121static void add_lexical_block (tree current_block, tree new_block);
122static tree copy_decl_to_var (tree, copy_body_data *);
123static tree copy_result_decl_to_var (tree, copy_body_data *);
124static tree copy_decl_no_change (tree, copy_body_data *);
125static tree copy_decl_maybe_to_var (tree, copy_body_data *);
126
127/* Insert a tree->tree mapping for ID.  Despite the name suggests
128   that the trees should be variables, it is used for more than that.  */
129
130void
131insert_decl_map (copy_body_data *id, tree key, tree value)
132{
133  splay_tree_insert (id->decl_map, (splay_tree_key) key,
134		     (splay_tree_value) value);
135
136  /* Always insert an identity map as well.  If we see this same new
137     node again, we won't want to duplicate it a second time.  */
138  if (key != value)
139    splay_tree_insert (id->decl_map, (splay_tree_key) value,
140		       (splay_tree_value) value);
141}
142
143/* Remap DECL during the copying of the BLOCK tree for the function.  */
144
145tree
146remap_decl (tree decl, copy_body_data *id)
147{
148  splay_tree_node n;
149  tree fn;
150
151  /* We only remap local variables in the current function.  */
152  fn = id->src_fn;
153
154  /* See if we have remapped this declaration.  */
155
156  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
157
158  /* If we didn't already have an equivalent for this declaration,
159     create one now.  */
160  if (!n)
161    {
162      /* Make a copy of the variable or label.  */
163      tree t = id->copy_decl (decl, id);
164
165      /* Remember it, so that if we encounter this local entity again
166	 we can reuse this copy.  Do this early because remap_type may
167	 need this decl for TYPE_STUB_DECL.  */
168      insert_decl_map (id, decl, t);
169
170      if (!DECL_P (t))
171	return t;
172
173      /* Remap types, if necessary.  */
174      TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
175      if (TREE_CODE (t) == TYPE_DECL)
176        DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
177
178      /* Remap sizes as necessary.  */
179      walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
180      walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
181
182      /* If fields, do likewise for offset and qualifier.  */
183      if (TREE_CODE (t) == FIELD_DECL)
184	{
185	  walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
186	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
187	    walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
188	}
189
190      return t;
191    }
192
193  return unshare_expr ((tree) n->value);
194}
195
196static tree
197remap_type_1 (tree type, copy_body_data *id)
198{
199  splay_tree_node node;
200  tree new, t;
201
202  if (type == NULL)
203    return type;
204
205  /* See if we have remapped this type.  */
206  node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
207  if (node)
208    return (tree) node->value;
209
210  /* The type only needs remapping if it's variably modified.  */
211  if (! variably_modified_type_p (type, id->src_fn))
212    {
213      insert_decl_map (id, type, type);
214      return type;
215    }
216
217  /* We do need a copy.  build and register it now.  If this is a pointer or
218     reference type, remap the designated type and make a new pointer or
219     reference type.  */
220  if (TREE_CODE (type) == POINTER_TYPE)
221    {
222      new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
223					 TYPE_MODE (type),
224					 TYPE_REF_CAN_ALIAS_ALL (type));
225      insert_decl_map (id, type, new);
226      return new;
227    }
228  else if (TREE_CODE (type) == REFERENCE_TYPE)
229    {
230      new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
231					    TYPE_MODE (type),
232					    TYPE_REF_CAN_ALIAS_ALL (type));
233      insert_decl_map (id, type, new);
234      return new;
235    }
236  else
237    new = copy_node (type);
238
239  insert_decl_map (id, type, new);
240
241  /* This is a new type, not a copy of an old type.  Need to reassociate
242     variants.  We can handle everything except the main variant lazily.  */
243  t = TYPE_MAIN_VARIANT (type);
244  if (type != t)
245    {
246      t = remap_type (t, id);
247      TYPE_MAIN_VARIANT (new) = t;
248      TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
249      TYPE_NEXT_VARIANT (t) = new;
250    }
251  else
252    {
253      TYPE_MAIN_VARIANT (new) = new;
254      TYPE_NEXT_VARIANT (new) = NULL;
255    }
256
257  if (TYPE_STUB_DECL (type))
258    TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
259
260  /* Lazily create pointer and reference types.  */
261  TYPE_POINTER_TO (new) = NULL;
262  TYPE_REFERENCE_TO (new) = NULL;
263
264  switch (TREE_CODE (new))
265    {
266    case INTEGER_TYPE:
267    case REAL_TYPE:
268    case ENUMERAL_TYPE:
269    case BOOLEAN_TYPE:
270      t = TYPE_MIN_VALUE (new);
271      if (t && TREE_CODE (t) != INTEGER_CST)
272        walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
273
274      t = TYPE_MAX_VALUE (new);
275      if (t && TREE_CODE (t) != INTEGER_CST)
276        walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
277      return new;
278
279    case FUNCTION_TYPE:
280      TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
281      walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
282      return new;
283
284    case ARRAY_TYPE:
285      TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
286      TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
287      break;
288
289    case RECORD_TYPE:
290    case UNION_TYPE:
291    case QUAL_UNION_TYPE:
292      {
293	tree f, nf = NULL;
294
295	for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
296	  {
297	    t = remap_decl (f, id);
298	    DECL_CONTEXT (t) = new;
299	    TREE_CHAIN (t) = nf;
300	    nf = t;
301	  }
302	TYPE_FIELDS (new) = nreverse (nf);
303      }
304      break;
305
306    case OFFSET_TYPE:
307    default:
308      /* Shouldn't have been thought variable sized.  */
309      gcc_unreachable ();
310    }
311
312  walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
313  walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
314
315  return new;
316}
317
318tree
319remap_type (tree type, copy_body_data *id)
320{
321  splay_tree_node node;
322
323  if (type == NULL)
324    return type;
325
326  /* See if we have remapped this type.  */
327  node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
328  if (node)
329    return (tree) node->value;
330
331  /* The type only needs remapping if it's variably modified.  */
332  if (! variably_modified_type_p (type, id->src_fn))
333    {
334      insert_decl_map (id, type, type);
335      return type;
336    }
337
338  return remap_type_1 (type, id);
339}
340
341static tree
342remap_decls (tree decls, copy_body_data *id)
343{
344  tree old_var;
345  tree new_decls = NULL_TREE;
346
347  /* Remap its variables.  */
348  for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
349    {
350      tree new_var;
351
352      /* We can not chain the local static declarations into the unexpanded_var_list
353         as we can't duplicate them or break one decl rule.  Go ahead and link
354         them into unexpanded_var_list.  */
355      if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->src_fn)
356	  && !DECL_EXTERNAL (old_var))
357	{
358	  cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
359						 cfun->unexpanded_var_list);
360	  continue;
361	}
362
363      /* Remap the variable.  */
364      new_var = remap_decl (old_var, id);
365
366      /* If we didn't remap this variable, so we can't mess with its
367	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
368	 already declared somewhere else, so don't declare it here.  */
369      if (!new_var || new_var == id->retvar)
370	;
371      else
372	{
373	  gcc_assert (DECL_P (new_var));
374	  TREE_CHAIN (new_var) = new_decls;
375	  new_decls = new_var;
376	}
377    }
378
379  return nreverse (new_decls);
380}
381
382/* Copy the BLOCK to contain remapped versions of the variables
383   therein.  And hook the new block into the block-tree.  */
384
385static void
386remap_block (tree *block, copy_body_data *id)
387{
388  tree old_block;
389  tree new_block;
390  tree fn;
391
392  /* Make the new block.  */
393  old_block = *block;
394  new_block = make_node (BLOCK);
395  TREE_USED (new_block) = TREE_USED (old_block);
396  BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
397  BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
398  *block = new_block;
399
400  /* Remap its variables.  */
401  BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
402
403  fn = id->dst_fn;
404
405  if (id->transform_lang_insert_block)
406    lang_hooks.decls.insert_block (new_block);
407
408  /* Remember the remapped block.  */
409  insert_decl_map (id, old_block, new_block);
410}
411
412/* Copy the whole block tree and root it in id->block.  */
413static tree
414remap_blocks (tree block, copy_body_data *id)
415{
416  tree t;
417  tree new = block;
418
419  if (!block)
420    return NULL;
421
422  remap_block (&new, id);
423  gcc_assert (new != block);
424  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
425    add_lexical_block (new, remap_blocks (t, id));
426  return new;
427}
428
429static void
430copy_statement_list (tree *tp)
431{
432  tree_stmt_iterator oi, ni;
433  tree new;
434
435  new = alloc_stmt_list ();
436  ni = tsi_start (new);
437  oi = tsi_start (*tp);
438  *tp = new;
439
440  for (; !tsi_end_p (oi); tsi_next (&oi))
441    tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
442}
443
444static void
445copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
446{
447  tree block = BIND_EXPR_BLOCK (*tp);
448  /* Copy (and replace) the statement.  */
449  copy_tree_r (tp, walk_subtrees, NULL);
450  if (block)
451    {
452      remap_block (&block, id);
453      BIND_EXPR_BLOCK (*tp) = block;
454    }
455
456  if (BIND_EXPR_VARS (*tp))
457    /* This will remap a lot of the same decls again, but this should be
458       harmless.  */
459    BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
460}
461
462/* Called from copy_body_id via walk_tree.  DATA is really an
463   `copy_body_data *'.  */
464
465tree
466copy_body_r (tree *tp, int *walk_subtrees, void *data)
467{
468  copy_body_data *id = (copy_body_data *) data;
469  tree fn = id->src_fn;
470  tree new_block;
471
472  /* Begin by recognizing trees that we'll completely rewrite for the
473     inlining context.  Our output for these trees is completely
474     different from out input (e.g. RETURN_EXPR is deleted, and morphs
475     into an edge).  Further down, we'll handle trees that get
476     duplicated and/or tweaked.  */
477
478  /* When requested, RETURN_EXPRs should be transformed to just the
479     contained MODIFY_EXPR.  The branch semantics of the return will
480     be handled elsewhere by manipulating the CFG rather than a statement.  */
481  if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
482    {
483      tree assignment = TREE_OPERAND (*tp, 0);
484
485      /* If we're returning something, just turn that into an
486	 assignment into the equivalent of the original RESULT_DECL.
487	 If the "assignment" is just the result decl, the result
488	 decl has already been set (e.g. a recent "foo (&result_decl,
489	 ...)"); just toss the entire RETURN_EXPR.  */
490      if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
491	{
492	  /* Replace the RETURN_EXPR with (a copy of) the
493	     MODIFY_EXPR hanging underneath.  */
494	  *tp = copy_node (assignment);
495	}
496      else /* Else the RETURN_EXPR returns no value.  */
497	{
498	  *tp = NULL;
499	  return (tree) (void *)1;
500	}
501    }
502
503  /* Local variables and labels need to be replaced by equivalent
504     variables.  We don't want to copy static variables; there's only
505     one of those, no matter how many times we inline the containing
506     function.  Similarly for globals from an outer function.  */
507  else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
508    {
509      tree new_decl;
510
511      /* Remap the declaration.  */
512      new_decl = remap_decl (*tp, id);
513      gcc_assert (new_decl);
514      /* Replace this variable with the copy.  */
515      STRIP_TYPE_NOPS (new_decl);
516      *tp = new_decl;
517      *walk_subtrees = 0;
518    }
519  else if (TREE_CODE (*tp) == STATEMENT_LIST)
520    copy_statement_list (tp);
521  else if (TREE_CODE (*tp) == SAVE_EXPR)
522    remap_save_expr (tp, id->decl_map, walk_subtrees);
523  else if (TREE_CODE (*tp) == LABEL_DECL
524	   && (! DECL_CONTEXT (*tp)
525	       || decl_function_context (*tp) == id->src_fn))
526    /* These may need to be remapped for EH handling.  */
527    *tp = remap_decl (*tp, id);
528  else if (TREE_CODE (*tp) == BIND_EXPR)
529    copy_bind_expr (tp, walk_subtrees, id);
530  /* Types may need remapping as well.  */
531  else if (TYPE_P (*tp))
532    *tp = remap_type (*tp, id);
533
534  /* If this is a constant, we have to copy the node iff the type will be
535     remapped.  copy_tree_r will not copy a constant.  */
536  else if (CONSTANT_CLASS_P (*tp))
537    {
538      tree new_type = remap_type (TREE_TYPE (*tp), id);
539
540      if (new_type == TREE_TYPE (*tp))
541	*walk_subtrees = 0;
542
543      else if (TREE_CODE (*tp) == INTEGER_CST)
544	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
545				  TREE_INT_CST_HIGH (*tp));
546      else
547	{
548	  *tp = copy_node (*tp);
549	  TREE_TYPE (*tp) = new_type;
550	}
551    }
552
553  /* Otherwise, just copy the node.  Note that copy_tree_r already
554     knows not to copy VAR_DECLs, etc., so this is safe.  */
555  else
556    {
557      /* Here we handle trees that are not completely rewritten.
558	 First we detect some inlining-induced bogosities for
559	 discarding.  */
560      if (TREE_CODE (*tp) == MODIFY_EXPR
561	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
562	  && (lang_hooks.tree_inlining.auto_var_in_fn_p
563	      (TREE_OPERAND (*tp, 0), fn)))
564	{
565	  /* Some assignments VAR = VAR; don't generate any rtl code
566	     and thus don't count as variable modification.  Avoid
567	     keeping bogosities like 0 = 0.  */
568	  tree decl = TREE_OPERAND (*tp, 0), value;
569	  splay_tree_node n;
570
571	  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
572	  if (n)
573	    {
574	      value = (tree) n->value;
575	      STRIP_TYPE_NOPS (value);
576	      if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
577		{
578		  *tp = build_empty_stmt ();
579		  return copy_body_r (tp, walk_subtrees, data);
580		}
581	    }
582	}
583      else if (TREE_CODE (*tp) == INDIRECT_REF)
584	{
585	  /* Get rid of *& from inline substitutions that can happen when a
586	     pointer argument is an ADDR_EXPR.  */
587	  tree decl = TREE_OPERAND (*tp, 0);
588	  splay_tree_node n;
589
590	  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
591	  if (n)
592	    {
593	      tree new;
594	      tree old;
595	      /* If we happen to get an ADDR_EXPR in n->value, strip
596	         it manually here as we'll eventually get ADDR_EXPRs
597		 which lie about their types pointed to.  In this case
598		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
599		 but we absolutely rely on that.  As fold_indirect_ref
600	         does other useful transformations, try that first, though.  */
601	      tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
602	      new = unshare_expr ((tree)n->value);
603	      old = *tp;
604	      *tp = fold_indirect_ref_1 (type, new);
605	      if (! *tp)
606	        {
607		  if (TREE_CODE (new) == ADDR_EXPR)
608		    *tp = TREE_OPERAND (new, 0);
609	          else
610		    {
611	              *tp = build1 (INDIRECT_REF, type, new);
612		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
613		    }
614		}
615	      *walk_subtrees = 0;
616	      return NULL;
617	    }
618	}
619
620      /* Here is the "usual case".  Copy this tree node, and then
621	 tweak some special cases.  */
622      copy_tree_r (tp, walk_subtrees, NULL);
623
624      /* If EXPR has block defined, map it to newly constructed block.
625         When inlining we want EXPRs without block appear in the block
626	 of function call.  */
627      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
628	{
629	  new_block = id->block;
630	  if (TREE_BLOCK (*tp))
631	    {
632	      splay_tree_node n;
633	      n = splay_tree_lookup (id->decl_map,
634				     (splay_tree_key) TREE_BLOCK (*tp));
635	      gcc_assert (n);
636	      new_block = (tree) n->value;
637	    }
638	  TREE_BLOCK (*tp) = new_block;
639	}
640
641      if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
642	TREE_OPERAND (*tp, 0) =
643	  build_int_cst
644	    (NULL_TREE,
645	     id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
646
647      if (TREE_CODE (*tp) != OMP_CLAUSE)
648	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
649
650      /* The copied TARGET_EXPR has never been expanded, even if the
651	 original node was expanded already.  */
652      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
653	{
654	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
655	  TREE_OPERAND (*tp, 3) = NULL_TREE;
656	}
657
658      /* Variable substitution need not be simple.  In particular, the
659	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
660	 and friends are up-to-date.  */
661      else if (TREE_CODE (*tp) == ADDR_EXPR)
662	{
663	  walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
664	  /* Handle the case where we substituted an INDIRECT_REF
665	     into the operand of the ADDR_EXPR.  */
666	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
667	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
668	  else
669	    recompute_tree_invariant_for_addr_expr (*tp);
670	  *walk_subtrees = 0;
671	}
672    }
673
674  /* Keep iterating.  */
675  return NULL_TREE;
676}
677
678/* Copy basic block, scale profile accordingly.  Edges will be taken care of
679   later  */
680
681static basic_block
682copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
683{
684  block_stmt_iterator bsi, copy_bsi;
685  basic_block copy_basic_block;
686
687  /* create_basic_block() will append every new block to
688     basic_block_info automatically.  */
689  copy_basic_block = create_basic_block (NULL, (void *) 0,
690                                         (basic_block) bb->prev_bb->aux);
691  copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
692  copy_basic_block->frequency = (bb->frequency
693				     * frequency_scale / REG_BR_PROB_BASE);
694  copy_bsi = bsi_start (copy_basic_block);
695
696  for (bsi = bsi_start (bb);
697       !bsi_end_p (bsi); bsi_next (&bsi))
698    {
699      tree stmt = bsi_stmt (bsi);
700      tree orig_stmt = stmt;
701
702      walk_tree (&stmt, copy_body_r, id, NULL);
703
704      /* RETURN_EXPR might be removed,
705         this is signalled by making stmt pointer NULL.  */
706      if (stmt)
707	{
708	  tree call, decl;
709
710	  /* With return slot optimization we can end up with
711	     non-gimple (foo *)&this->m, fix that here.  */
712	  if (TREE_CODE (stmt) == MODIFY_EXPR
713	      && TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
714	      && !is_gimple_val (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0)))
715	    gimplify_stmt (&stmt);
716
717          bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
718	  call = get_call_expr_in (stmt);
719	  /* We're duplicating a CALL_EXPR.  Find any corresponding
720	     callgraph edges and update or duplicate them.  */
721	  if (call && (decl = get_callee_fndecl (call)))
722	    {
723	      struct cgraph_node *node;
724	      struct cgraph_edge *edge;
725
726	      switch (id->transform_call_graph_edges)
727		{
728		case CB_CGE_DUPLICATE:
729		  edge = cgraph_edge (id->src_node, orig_stmt);
730		  if (edge)
731		    cgraph_clone_edge (edge, id->dst_node, stmt,
732				       REG_BR_PROB_BASE, 1, true);
733		  break;
734
735		case CB_CGE_MOVE_CLONES:
736		  for (node = id->dst_node->next_clone;
737		       node;
738		       node = node->next_clone)
739		    {
740		      edge = cgraph_edge (node, orig_stmt);
741		      gcc_assert (edge);
742		      cgraph_set_call_stmt (edge, stmt);
743		    }
744		  /* FALLTHRU */
745
746		case CB_CGE_MOVE:
747		  edge = cgraph_edge (id->dst_node, orig_stmt);
748		  if (edge)
749		    cgraph_set_call_stmt (edge, stmt);
750		  break;
751
752		default:
753		  gcc_unreachable ();
754		}
755	    }
756	  /* If you think we can abort here, you are wrong.
757	     There is no region 0 in tree land.  */
758	  gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
759		      != 0);
760
761	  if (tree_could_throw_p (stmt))
762	    {
763	      int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
764	      /* Add an entry for the copied tree in the EH hashtable.
765		 When cloning or versioning, use the hashtable in
766		 cfun, and just copy the EH number.  When inlining, use the
767		 hashtable in the caller, and adjust the region number.  */
768	      if (region > 0)
769		add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
770
771	      /* If this tree doesn't have a region associated with it,
772		 and there is a "current region,"
773		 then associate this tree with the current region
774		 and add edges associated with this region.  */
775	      if ((lookup_stmt_eh_region_fn (id->src_cfun,
776					     orig_stmt) <= 0
777		   && id->eh_region > 0)
778		  && tree_could_throw_p (stmt))
779		add_stmt_to_eh_region (stmt, id->eh_region);
780	    }
781	}
782    }
783  return copy_basic_block;
784}
785
786/* Copy edges from BB into its copy constructed earlier, scale profile
787   accordingly.  Edges will be taken care of later.  Assume aux
788   pointers to point to the copies of each BB.  */
789static void
790copy_edges_for_bb (basic_block bb, int count_scale)
791{
792  basic_block new_bb = (basic_block) bb->aux;
793  edge_iterator ei;
794  edge old_edge;
795  block_stmt_iterator bsi;
796  int flags;
797
798  /* Use the indices from the original blocks to create edges for the
799     new ones.  */
800  FOR_EACH_EDGE (old_edge, ei, bb->succs)
801    if (!(old_edge->flags & EDGE_EH))
802      {
803	edge new;
804
805	flags = old_edge->flags;
806
807	/* Return edges do get a FALLTHRU flag when the get inlined.  */
808	if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
809	    && old_edge->dest->aux != EXIT_BLOCK_PTR)
810	  flags |= EDGE_FALLTHRU;
811	new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
812	new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
813	new->probability = old_edge->probability;
814      }
815
816  if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
817    return;
818
819  for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
820    {
821      tree copy_stmt;
822
823      copy_stmt = bsi_stmt (bsi);
824      update_stmt (copy_stmt);
825      /* Do this before the possible split_block.  */
826      bsi_next (&bsi);
827
828      /* If this tree could throw an exception, there are two
829         cases where we need to add abnormal edge(s): the
830         tree wasn't in a region and there is a "current
831         region" in the caller; or the original tree had
832         EH edges.  In both cases split the block after the tree,
833         and add abnormal edge(s) as needed; we need both
834         those from the callee and the caller.
835         We check whether the copy can throw, because the const
836         propagation can change an INDIRECT_REF which throws
837         into a COMPONENT_REF which doesn't.  If the copy
838         can throw, the original could also throw.  */
839
840      if (tree_can_throw_internal (copy_stmt))
841	{
842	  if (!bsi_end_p (bsi))
843	    /* Note that bb's predecessor edges aren't necessarily
844	       right at this point; split_block doesn't care.  */
845	    {
846	      edge e = split_block (new_bb, copy_stmt);
847	      new_bb = e->dest;
848	      bsi = bsi_start (new_bb);
849	    }
850
851           make_eh_edges (copy_stmt);
852	}
853    }
854}
855
856/* Wrapper for remap_decl so it can be used as a callback.  */
857static tree
858remap_decl_1 (tree decl, void *data)
859{
860  return remap_decl (decl, (copy_body_data *) data);
861}
862
863/* Make a copy of the body of FN so that it can be inserted inline in
864   another function.  Walks FN via CFG, returns new fndecl.  */
865
866static tree
867copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
868	       basic_block entry_block_map, basic_block exit_block_map)
869{
870  tree callee_fndecl = id->src_fn;
871  /* Original cfun for the callee, doesn't change.  */
872  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
873  /* Copy, built by this function.  */
874  struct function *new_cfun;
875  /* Place to copy from; when a copy of the function was saved off earlier,
876     use that instead of the main copy.  */
877  struct function *cfun_to_copy =
878    (struct function *) ggc_alloc_cleared (sizeof (struct function));
879  basic_block bb;
880  tree new_fndecl = NULL;
881  int count_scale, frequency_scale;
882
883  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
884    count_scale = (REG_BR_PROB_BASE * count
885		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
886  else
887    count_scale = 1;
888
889  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
890    frequency_scale = (REG_BR_PROB_BASE * frequency
891		       /
892		       ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
893  else
894    frequency_scale = count_scale;
895
896  /* Register specific tree functions.  */
897  tree_register_cfg_hooks ();
898
899  /* Must have a CFG here at this point.  */
900  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
901	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
902
903  *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
904
905  id->src_cfun = cfun_to_copy;
906
907  /* If requested, create new basic_block_info and label_to_block_maps.
908     Otherwise, insert our new blocks and labels into the existing cfg.  */
909  if (id->transform_new_cfg)
910    {
911      new_cfun =
912	(struct function *) ggc_alloc_cleared (sizeof (struct function));
913      *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
914      new_cfun->cfg = NULL;
915      new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
916      new_cfun->ib_boundaries_block = NULL;
917      DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
918      push_cfun (new_cfun);
919      init_empty_tree_cfg ();
920
921      ENTRY_BLOCK_PTR->count =
922	(ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
923	 REG_BR_PROB_BASE);
924      ENTRY_BLOCK_PTR->frequency =
925	(ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
926	 frequency_scale / REG_BR_PROB_BASE);
927      EXIT_BLOCK_PTR->count =
928	(EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
929	 REG_BR_PROB_BASE);
930      EXIT_BLOCK_PTR->frequency =
931	(EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
932	 frequency_scale / REG_BR_PROB_BASE);
933
934      entry_block_map = ENTRY_BLOCK_PTR;
935      exit_block_map = EXIT_BLOCK_PTR;
936    }
937
938  ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
939  EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
940
941  /* Duplicate any exception-handling regions.  */
942  if (cfun->eh)
943    {
944      if (id->transform_new_cfg)
945        init_eh_for_function ();
946      id->eh_region_offset
947	= duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
948				0, id->eh_region);
949    }
950  /* Use aux pointers to map the original blocks to copy.  */
951  FOR_EACH_BB_FN (bb, cfun_to_copy)
952    bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
953  /* Now that we've duplicated the blocks, duplicate their edges.  */
954  FOR_ALL_BB_FN (bb, cfun_to_copy)
955    copy_edges_for_bb (bb, count_scale);
956  FOR_ALL_BB_FN (bb, cfun_to_copy)
957    bb->aux = NULL;
958
959  if (id->transform_new_cfg)
960    pop_cfun ();
961
962  return new_fndecl;
963}
964
965/* Make a copy of the body of FN so that it can be inserted inline in
966   another function.  */
967
968static tree
969copy_generic_body (copy_body_data *id)
970{
971  tree body;
972  tree fndecl = id->src_fn;
973
974  body = DECL_SAVED_TREE (fndecl);
975  walk_tree (&body, copy_body_r, id, NULL);
976
977  return body;
978}
979
980static tree
981copy_body (copy_body_data *id, gcov_type count, int frequency,
982	   basic_block entry_block_map, basic_block exit_block_map)
983{
984  tree fndecl = id->src_fn;
985  tree body;
986
987  /* If this body has a CFG, walk CFG and copy.  */
988  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
989  body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
990
991  return body;
992}
993
994/* Return true if VALUE is an ADDR_EXPR of an automatic variable
995   defined in function FN, or of a data member thereof.  */
996
997static bool
998self_inlining_addr_expr (tree value, tree fn)
999{
1000  tree var;
1001
1002  if (TREE_CODE (value) != ADDR_EXPR)
1003    return false;
1004
1005  var = get_base_address (TREE_OPERAND (value, 0));
1006
1007  return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1008}
1009
1010static void
1011setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
1012		     basic_block bb, tree *vars)
1013{
1014  tree init_stmt;
1015  tree var;
1016  tree var_sub;
1017
1018  /* If the parameter is never assigned to, we may not need to
1019     create a new variable here at all.  Instead, we may be able
1020     to just use the argument value.  */
1021  if (TREE_READONLY (p)
1022      && !TREE_ADDRESSABLE (p)
1023      && value && !TREE_SIDE_EFFECTS (value))
1024    {
1025      /* We may produce non-gimple trees by adding NOPs or introduce
1026	 invalid sharing when operand is not really constant.
1027	 It is not big deal to prohibit constant propagation here as
1028	 we will constant propagate in DOM1 pass anyway.  */
1029      if (is_gimple_min_invariant (value)
1030	  && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1031	  /* We have to be very careful about ADDR_EXPR.  Make sure
1032	     the base variable isn't a local variable of the inlined
1033	     function, e.g., when doing recursive inlining, direct or
1034	     mutually-recursive or whatever, which is why we don't
1035	     just test whether fn == current_function_decl.  */
1036	  && ! self_inlining_addr_expr (value, fn))
1037	{
1038	  insert_decl_map (id, p, value);
1039	  return;
1040	}
1041    }
1042
1043  /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
1044     here since the type of this decl must be visible to the calling
1045     function.  */
1046  var = copy_decl_to_var (p, id);
1047
1048  /* See if the frontend wants to pass this by invisible reference.  If
1049     so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1050     replace uses of the PARM_DECL with dereferences.  */
1051  if (TREE_TYPE (var) != TREE_TYPE (p)
1052      && POINTER_TYPE_P (TREE_TYPE (var))
1053      && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1054    {
1055      insert_decl_map (id, var, var);
1056      var_sub = build_fold_indirect_ref (var);
1057    }
1058  else
1059    var_sub = var;
1060
1061  /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1062     that way, when the PARM_DECL is encountered, it will be
1063     automatically replaced by the VAR_DECL.  */
1064  insert_decl_map (id, p, var_sub);
1065
1066  /* Declare this new variable.  */
1067  TREE_CHAIN (var) = *vars;
1068  *vars = var;
1069
1070  /* Make gimplifier happy about this variable.  */
1071  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1072
1073  /* Even if P was TREE_READONLY, the new VAR should not be.
1074     In the original code, we would have constructed a
1075     temporary, and then the function body would have never
1076     changed the value of P.  However, now, we will be
1077     constructing VAR directly.  The constructor body may
1078     change its value multiple times as it is being
1079     constructed.  Therefore, it must not be TREE_READONLY;
1080     the back-end assumes that TREE_READONLY variable is
1081     assigned to only once.  */
1082  if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1083    TREE_READONLY (var) = 0;
1084
1085  /* Initialize this VAR_DECL from the equivalent argument.  Convert
1086     the argument to the proper type in case it was promoted.  */
1087  if (value)
1088    {
1089      tree rhs = fold_convert (TREE_TYPE (var), value);
1090      block_stmt_iterator bsi = bsi_last (bb);
1091
1092      if (rhs == error_mark_node)
1093	return;
1094
1095      STRIP_USELESS_TYPE_CONVERSION (rhs);
1096
1097      /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1098	 keep our trees in gimple form.  */
1099      init_stmt = build2 (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
1100
1101      /* If we did not create a gimple value and we did not create a gimple
1102	 cast of a gimple value, then we will need to gimplify INIT_STMTS
1103	 at the end.  Note that is_gimple_cast only checks the outer
1104	 tree code, not its operand.  Thus the explicit check that its
1105	 operand is a gimple value.  */
1106      if (!is_gimple_val (rhs)
1107	  && (!is_gimple_cast (rhs)
1108	      || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1109	gimplify_stmt (&init_stmt);
1110
1111      /* If VAR represents a zero-sized variable, it's possible that the
1112	 assignment statment may result in no gimple statements.  */
1113      if (init_stmt)
1114        bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1115    }
1116}
1117
1118/* Generate code to initialize the parameters of the function at the
1119   top of the stack in ID from the ARGS (presented as a TREE_LIST).  */
1120
1121static void
1122initialize_inlined_parameters (copy_body_data *id, tree args, tree static_chain,
1123			       tree fn, basic_block bb)
1124{
1125  tree parms;
1126  tree a;
1127  tree p;
1128  tree vars = NULL_TREE;
1129  int argnum = 0;
1130
1131  /* Figure out what the parameters are.  */
1132  parms = DECL_ARGUMENTS (fn);
1133
1134  /* Loop through the parameter declarations, replacing each with an
1135     equivalent VAR_DECL, appropriately initialized.  */
1136  for (p = parms, a = args; p;
1137       a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1138    {
1139      tree value;
1140
1141      ++argnum;
1142
1143      /* Find the initializer.  */
1144      value = lang_hooks.tree_inlining.convert_parm_for_inlining
1145	      (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1146
1147      setup_one_parameter (id, p, value, fn, bb, &vars);
1148    }
1149
1150  /* Initialize the static chain.  */
1151  p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1152  gcc_assert (fn != current_function_decl);
1153  if (p)
1154    {
1155      /* No static chain?  Seems like a bug in tree-nested.c.  */
1156      gcc_assert (static_chain);
1157
1158      setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1159    }
1160
1161  declare_inline_vars (id->block, vars);
1162}
1163
1164/* Declare a return variable to replace the RESULT_DECL for the
1165   function we are calling.  An appropriate DECL_STMT is returned.
1166   The USE_STMT is filled to contain a use of the declaration to
1167   indicate the return value of the function.
1168
1169   RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1170   took the address of the result.  MODIFY_DEST, if non-null, was the LHS of
1171   the MODIFY_EXPR to which this call is the RHS.
1172
1173   The return value is a (possibly null) value that is the result of the
1174   function as seen by the callee.  *USE_P is a (possibly null) value that
1175   holds the result as seen by the caller.  */
1176
1177static tree
1178declare_return_variable (copy_body_data *id, tree return_slot_addr,
1179			 tree modify_dest, tree *use_p)
1180{
1181  tree callee = id->src_fn;
1182  tree caller = id->dst_fn;
1183  tree result = DECL_RESULT (callee);
1184  tree callee_type = TREE_TYPE (result);
1185  tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1186  tree var, use;
1187
1188  /* We don't need to do anything for functions that don't return
1189     anything.  */
1190  if (!result || VOID_TYPE_P (callee_type))
1191    {
1192      *use_p = NULL_TREE;
1193      return NULL_TREE;
1194    }
1195
1196  /* If there was a return slot, then the return value is the
1197     dereferenced address of that object.  */
1198  if (return_slot_addr)
1199    {
1200      /* The front end shouldn't have used both return_slot_addr and
1201	 a modify expression.  */
1202      gcc_assert (!modify_dest);
1203      if (DECL_BY_REFERENCE (result))
1204	var = return_slot_addr;
1205      else
1206	var = build_fold_indirect_ref (return_slot_addr);
1207      if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1208	  && !DECL_COMPLEX_GIMPLE_REG_P (result)
1209	  && DECL_P (var))
1210	DECL_COMPLEX_GIMPLE_REG_P (var) = 0;
1211      use = NULL;
1212      goto done;
1213    }
1214
1215  /* All types requiring non-trivial constructors should have been handled.  */
1216  gcc_assert (!TREE_ADDRESSABLE (callee_type));
1217
1218  /* Attempt to avoid creating a new temporary variable.  */
1219  if (modify_dest)
1220    {
1221      bool use_it = false;
1222
1223      /* We can't use MODIFY_DEST if there's type promotion involved.  */
1224      if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1225	use_it = false;
1226
1227      /* ??? If we're assigning to a variable sized type, then we must
1228	 reuse the destination variable, because we've no good way to
1229	 create variable sized temporaries at this point.  */
1230      else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1231	use_it = true;
1232
1233      /* If the callee cannot possibly modify MODIFY_DEST, then we can
1234	 reuse it as the result of the call directly.  Don't do this if
1235	 it would promote MODIFY_DEST to addressable.  */
1236      else if (TREE_ADDRESSABLE (result))
1237	use_it = false;
1238      else
1239	{
1240	  tree base_m = get_base_address (modify_dest);
1241
1242	  /* If the base isn't a decl, then it's a pointer, and we don't
1243	     know where that's going to go.  */
1244	  if (!DECL_P (base_m))
1245	    use_it = false;
1246	  else if (is_global_var (base_m))
1247	    use_it = false;
1248	  else if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1249		   && !DECL_COMPLEX_GIMPLE_REG_P (result)
1250		   && DECL_COMPLEX_GIMPLE_REG_P (base_m))
1251	    use_it = false;
1252	  else if (!TREE_ADDRESSABLE (base_m))
1253	    use_it = true;
1254	}
1255
1256      if (use_it)
1257	{
1258	  var = modify_dest;
1259	  use = NULL;
1260	  goto done;
1261	}
1262    }
1263
1264  gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1265
1266  var = copy_result_decl_to_var (result, id);
1267
1268  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1269  DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1270    = tree_cons (NULL_TREE, var,
1271		 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1272
1273  /* Do not have the rest of GCC warn about this variable as it should
1274     not be visible to the user.  */
1275  TREE_NO_WARNING (var) = 1;
1276
1277  declare_inline_vars (id->block, var);
1278
1279  /* Build the use expr.  If the return type of the function was
1280     promoted, convert it back to the expected type.  */
1281  use = var;
1282  if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1283    use = fold_convert (caller_type, var);
1284
1285  STRIP_USELESS_TYPE_CONVERSION (use);
1286
1287  if (DECL_BY_REFERENCE (result))
1288    var = build_fold_addr_expr (var);
1289
1290 done:
1291  /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1292     way, when the RESULT_DECL is encountered, it will be
1293     automatically replaced by the VAR_DECL.  */
1294  insert_decl_map (id, result, var);
1295
1296  /* Remember this so we can ignore it in remap_decls.  */
1297  id->retvar = var;
1298
1299  *use_p = use;
1300  return var;
1301}
1302
1303/* Returns nonzero if a function can be inlined as a tree.  */
1304
1305bool
1306tree_inlinable_function_p (tree fn)
1307{
1308  return inlinable_function_p (fn);
1309}
1310
1311static const char *inline_forbidden_reason;
1312
1313static tree
1314inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1315		      void *fnp)
1316{
1317  tree node = *nodep;
1318  tree fn = (tree) fnp;
1319  tree t;
1320
1321  switch (TREE_CODE (node))
1322    {
1323    case CALL_EXPR:
1324      /* Refuse to inline alloca call unless user explicitly forced so as
1325	 this may change program's memory overhead drastically when the
1326	 function using alloca is called in loop.  In GCC present in
1327	 SPEC2000 inlining into schedule_block cause it to require 2GB of
1328	 RAM instead of 256MB.  */
1329      if (alloca_call_p (node)
1330	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1331	{
1332	  inline_forbidden_reason
1333	    = G_("function %q+F can never be inlined because it uses "
1334		 "alloca (override using the always_inline attribute)");
1335	  return node;
1336	}
1337      t = get_callee_fndecl (node);
1338      if (! t)
1339	break;
1340
1341      /* We cannot inline functions that call setjmp.  */
1342      if (setjmp_call_p (t))
1343	{
1344	  inline_forbidden_reason
1345	    = G_("function %q+F can never be inlined because it uses setjmp");
1346	  return node;
1347	}
1348
1349      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1350	switch (DECL_FUNCTION_CODE (t))
1351	  {
1352	    /* We cannot inline functions that take a variable number of
1353	       arguments.  */
1354	  case BUILT_IN_VA_START:
1355	  case BUILT_IN_STDARG_START:
1356	  case BUILT_IN_NEXT_ARG:
1357	  case BUILT_IN_VA_END:
1358	    inline_forbidden_reason
1359	      = G_("function %q+F can never be inlined because it "
1360		   "uses variable argument lists");
1361	    return node;
1362
1363	  case BUILT_IN_LONGJMP:
1364	    /* We can't inline functions that call __builtin_longjmp at
1365	       all.  The non-local goto machinery really requires the
1366	       destination be in a different function.  If we allow the
1367	       function calling __builtin_longjmp to be inlined into the
1368	       function calling __builtin_setjmp, Things will Go Awry.  */
1369	    inline_forbidden_reason
1370	      = G_("function %q+F can never be inlined because "
1371		   "it uses setjmp-longjmp exception handling");
1372	    return node;
1373
1374	  case BUILT_IN_NONLOCAL_GOTO:
1375	    /* Similarly.  */
1376	    inline_forbidden_reason
1377	      = G_("function %q+F can never be inlined because "
1378		   "it uses non-local goto");
1379	    return node;
1380
1381	  case BUILT_IN_RETURN:
1382	  case BUILT_IN_APPLY_ARGS:
1383	    /* If a __builtin_apply_args caller would be inlined,
1384	       it would be saving arguments of the function it has
1385	       been inlined into.  Similarly __builtin_return would
1386	       return from the function the inline has been inlined into.  */
1387	    inline_forbidden_reason
1388	      = G_("function %q+F can never be inlined because "
1389		   "it uses __builtin_return or __builtin_apply_args");
1390	    return node;
1391
1392	  default:
1393	    break;
1394	  }
1395      break;
1396
1397    case GOTO_EXPR:
1398      t = TREE_OPERAND (node, 0);
1399
1400      /* We will not inline a function which uses computed goto.  The
1401	 addresses of its local labels, which may be tucked into
1402	 global storage, are of course not constant across
1403	 instantiations, which causes unexpected behavior.  */
1404      if (TREE_CODE (t) != LABEL_DECL)
1405	{
1406	  inline_forbidden_reason
1407	    = G_("function %q+F can never be inlined "
1408		 "because it contains a computed goto");
1409	  return node;
1410	}
1411      break;
1412
1413    case LABEL_EXPR:
1414      t = TREE_OPERAND (node, 0);
1415      if (DECL_NONLOCAL (t))
1416	{
1417	  /* We cannot inline a function that receives a non-local goto
1418	     because we cannot remap the destination label used in the
1419	     function that is performing the non-local goto.  */
1420	  inline_forbidden_reason
1421	    = G_("function %q+F can never be inlined "
1422		 "because it receives a non-local goto");
1423	  return node;
1424	}
1425      break;
1426
1427    case RECORD_TYPE:
1428    case UNION_TYPE:
1429      /* We cannot inline a function of the form
1430
1431	   void F (int i) { struct S { int ar[i]; } s; }
1432
1433	 Attempting to do so produces a catch-22.
1434	 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1435	 UNION_TYPE nodes, then it goes into infinite recursion on a
1436	 structure containing a pointer to its own type.  If it doesn't,
1437	 then the type node for S doesn't get adjusted properly when
1438	 F is inlined.
1439
1440	 ??? This is likely no longer true, but it's too late in the 4.0
1441	 cycle to try to find out.  This should be checked for 4.1.  */
1442      for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1443	if (variably_modified_type_p (TREE_TYPE (t), NULL))
1444	  {
1445	    inline_forbidden_reason
1446	      = G_("function %q+F can never be inlined "
1447		   "because it uses variable sized variables");
1448	    return node;
1449	  }
1450
1451    default:
1452      break;
1453    }
1454
1455  return NULL_TREE;
1456}
1457
1458/* Return subexpression representing possible alloca call, if any.  */
1459static tree
1460inline_forbidden_p (tree fndecl)
1461{
1462  location_t saved_loc = input_location;
1463  block_stmt_iterator bsi;
1464  basic_block bb;
1465  tree ret = NULL_TREE;
1466
1467  FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1468    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1469      {
1470	ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1471				    inline_forbidden_p_1, fndecl);
1472	if (ret)
1473	  goto egress;
1474      }
1475
1476egress:
1477  input_location = saved_loc;
1478  return ret;
1479}
1480
1481/* Returns nonzero if FN is a function that does not have any
1482   fundamental inline blocking properties.  */
1483
1484static bool
1485inlinable_function_p (tree fn)
1486{
1487  bool inlinable = true;
1488
1489  /* If we've already decided this function shouldn't be inlined,
1490     there's no need to check again.  */
1491  if (DECL_UNINLINABLE (fn))
1492    return false;
1493
1494  /* See if there is any language-specific reason it cannot be
1495     inlined.  (It is important that this hook be called early because
1496     in C++ it may result in template instantiation.)
1497     If the function is not inlinable for language-specific reasons,
1498     it is left up to the langhook to explain why.  */
1499  inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1500
1501  /* If we don't have the function body available, we can't inline it.
1502     However, this should not be recorded since we also get here for
1503     forward declared inline functions.  Therefore, return at once.  */
1504  if (!DECL_SAVED_TREE (fn))
1505    return false;
1506
1507  /* If we're not inlining at all, then we cannot inline this function.  */
1508  else if (!flag_inline_trees)
1509    inlinable = false;
1510
1511  /* Only try to inline functions if DECL_INLINE is set.  This should be
1512     true for all functions declared `inline', and for all other functions
1513     as well with -finline-functions.
1514
1515     Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1516     it's the front-end that must set DECL_INLINE in this case, because
1517     dwarf2out loses if a function that does not have DECL_INLINE set is
1518     inlined anyway.  That is why we have both DECL_INLINE and
1519     DECL_DECLARED_INLINE_P.  */
1520  /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1521	    here should be redundant.  */
1522  else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1523    inlinable = false;
1524
1525  else if (inline_forbidden_p (fn))
1526    {
1527      /* See if we should warn about uninlinable functions.  Previously,
1528	 some of these warnings would be issued while trying to expand
1529	 the function inline, but that would cause multiple warnings
1530	 about functions that would for example call alloca.  But since
1531	 this a property of the function, just one warning is enough.
1532	 As a bonus we can now give more details about the reason why a
1533	 function is not inlinable.
1534	 We only warn for functions declared `inline' by the user.  */
1535      bool do_warning = (warn_inline
1536			 && DECL_INLINE (fn)
1537			 && DECL_DECLARED_INLINE_P (fn)
1538			 && !DECL_IN_SYSTEM_HEADER (fn));
1539
1540      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1541	sorry (inline_forbidden_reason, fn);
1542      else if (do_warning)
1543	warning (OPT_Winline, inline_forbidden_reason, fn);
1544
1545      inlinable = false;
1546    }
1547
1548  /* Squirrel away the result so that we don't have to check again.  */
1549  DECL_UNINLINABLE (fn) = !inlinable;
1550
1551  return inlinable;
1552}
1553
1554/* Estimate the cost of a memory move.  Use machine dependent
1555   word size and take possible memcpy call into account.  */
1556
1557int
1558estimate_move_cost (tree type)
1559{
1560  HOST_WIDE_INT size;
1561
1562  size = int_size_in_bytes (type);
1563
1564  if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1565    /* Cost of a memcpy call, 3 arguments and the call.  */
1566    return 4;
1567  else
1568    return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1569}
1570
1571/* Used by estimate_num_insns.  Estimate number of instructions seen
1572   by given statement.  */
1573
1574static tree
1575estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1576{
1577  int *count = (int *) data;
1578  tree x = *tp;
1579
1580  if (IS_TYPE_OR_DECL_P (x))
1581    {
1582      *walk_subtrees = 0;
1583      return NULL;
1584    }
1585  /* Assume that constants and references counts nothing.  These should
1586     be majorized by amount of operations among them we count later
1587     and are common target of CSE and similar optimizations.  */
1588  else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1589    return NULL;
1590
1591  switch (TREE_CODE (x))
1592    {
1593    /* Containers have no cost.  */
1594    case TREE_LIST:
1595    case TREE_VEC:
1596    case BLOCK:
1597    case COMPONENT_REF:
1598    case BIT_FIELD_REF:
1599    case INDIRECT_REF:
1600    case ALIGN_INDIRECT_REF:
1601    case MISALIGNED_INDIRECT_REF:
1602    case ARRAY_REF:
1603    case ARRAY_RANGE_REF:
1604    case OBJ_TYPE_REF:
1605    case EXC_PTR_EXPR: /* ??? */
1606    case FILTER_EXPR: /* ??? */
1607    case COMPOUND_EXPR:
1608    case BIND_EXPR:
1609    case WITH_CLEANUP_EXPR:
1610    case NOP_EXPR:
1611    case VIEW_CONVERT_EXPR:
1612    case SAVE_EXPR:
1613    case ADDR_EXPR:
1614    case COMPLEX_EXPR:
1615    case RANGE_EXPR:
1616    case CASE_LABEL_EXPR:
1617    case SSA_NAME:
1618    case CATCH_EXPR:
1619    case EH_FILTER_EXPR:
1620    case STATEMENT_LIST:
1621    case ERROR_MARK:
1622    case NON_LVALUE_EXPR:
1623    case FDESC_EXPR:
1624    case VA_ARG_EXPR:
1625    case TRY_CATCH_EXPR:
1626    case TRY_FINALLY_EXPR:
1627    case LABEL_EXPR:
1628    case GOTO_EXPR:
1629    case RETURN_EXPR:
1630    case EXIT_EXPR:
1631    case LOOP_EXPR:
1632    case PHI_NODE:
1633    case WITH_SIZE_EXPR:
1634    case OMP_CLAUSE:
1635    case OMP_RETURN:
1636    case OMP_CONTINUE:
1637      break;
1638
1639    /* We don't account constants for now.  Assume that the cost is amortized
1640       by operations that do use them.  We may re-consider this decision once
1641       we are able to optimize the tree before estimating its size and break
1642       out static initializers.  */
1643    case IDENTIFIER_NODE:
1644    case INTEGER_CST:
1645    case REAL_CST:
1646    case COMPLEX_CST:
1647    case VECTOR_CST:
1648    case STRING_CST:
1649      *walk_subtrees = 0;
1650      return NULL;
1651
1652    /* Try to estimate the cost of assignments.  We have three cases to
1653       deal with:
1654	1) Simple assignments to registers;
1655	2) Stores to things that must live in memory.  This includes
1656	   "normal" stores to scalars, but also assignments of large
1657	   structures, or constructors of big arrays;
1658	3) TARGET_EXPRs.
1659
1660       Let us look at the first two cases, assuming we have "a = b + C":
1661       <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1662       If "a" is a GIMPLE register, the assignment to it is free on almost
1663       any target, because "a" usually ends up in a real register.  Hence
1664       the only cost of this expression comes from the PLUS_EXPR, and we
1665       can ignore the MODIFY_EXPR.
1666       If "a" is not a GIMPLE register, the assignment to "a" will most
1667       likely be a real store, so the cost of the MODIFY_EXPR is the cost
1668       of moving something into "a", which we compute using the function
1669       estimate_move_cost.
1670
1671       The third case deals with TARGET_EXPRs, for which the semantics are
1672       that a temporary is assigned, unless the TARGET_EXPR itself is being
1673       assigned to something else.  In the latter case we do not need the
1674       temporary.  E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1675       MODIFY_EXPR is free.  */
1676    case INIT_EXPR:
1677    case MODIFY_EXPR:
1678      /* Is the right and side a TARGET_EXPR?  */
1679      if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1680	break;
1681      /* ... fall through ...  */
1682
1683    case TARGET_EXPR:
1684      x = TREE_OPERAND (x, 0);
1685      /* Is this an assignments to a register?  */
1686      if (is_gimple_reg (x))
1687	break;
1688      /* Otherwise it's a store, so fall through to compute the move cost.  */
1689
1690    case CONSTRUCTOR:
1691      *count += estimate_move_cost (TREE_TYPE (x));
1692      break;
1693
1694    /* Assign cost of 1 to usual operations.
1695       ??? We may consider mapping RTL costs to this.  */
1696    case COND_EXPR:
1697    case VEC_COND_EXPR:
1698
1699    case PLUS_EXPR:
1700    case MINUS_EXPR:
1701    case MULT_EXPR:
1702
1703    case FIX_TRUNC_EXPR:
1704    case FIX_CEIL_EXPR:
1705    case FIX_FLOOR_EXPR:
1706    case FIX_ROUND_EXPR:
1707
1708    case NEGATE_EXPR:
1709    case FLOAT_EXPR:
1710    case MIN_EXPR:
1711    case MAX_EXPR:
1712    case ABS_EXPR:
1713
1714    case LSHIFT_EXPR:
1715    case RSHIFT_EXPR:
1716    case LROTATE_EXPR:
1717    case RROTATE_EXPR:
1718    case VEC_LSHIFT_EXPR:
1719    case VEC_RSHIFT_EXPR:
1720
1721    case BIT_IOR_EXPR:
1722    case BIT_XOR_EXPR:
1723    case BIT_AND_EXPR:
1724    case BIT_NOT_EXPR:
1725
1726    case TRUTH_ANDIF_EXPR:
1727    case TRUTH_ORIF_EXPR:
1728    case TRUTH_AND_EXPR:
1729    case TRUTH_OR_EXPR:
1730    case TRUTH_XOR_EXPR:
1731    case TRUTH_NOT_EXPR:
1732
1733    case LT_EXPR:
1734    case LE_EXPR:
1735    case GT_EXPR:
1736    case GE_EXPR:
1737    case EQ_EXPR:
1738    case NE_EXPR:
1739    case ORDERED_EXPR:
1740    case UNORDERED_EXPR:
1741
1742    case UNLT_EXPR:
1743    case UNLE_EXPR:
1744    case UNGT_EXPR:
1745    case UNGE_EXPR:
1746    case UNEQ_EXPR:
1747    case LTGT_EXPR:
1748
1749    case CONVERT_EXPR:
1750
1751    case CONJ_EXPR:
1752
1753    case PREDECREMENT_EXPR:
1754    case PREINCREMENT_EXPR:
1755    case POSTDECREMENT_EXPR:
1756    case POSTINCREMENT_EXPR:
1757
1758    case SWITCH_EXPR:
1759
1760    case ASM_EXPR:
1761
1762    case REALIGN_LOAD_EXPR:
1763
1764    case REDUC_MAX_EXPR:
1765    case REDUC_MIN_EXPR:
1766    case REDUC_PLUS_EXPR:
1767    case WIDEN_SUM_EXPR:
1768    case DOT_PROD_EXPR:
1769
1770    case WIDEN_MULT_EXPR:
1771
1772    case RESX_EXPR:
1773      *count += 1;
1774      break;
1775
1776    /* Few special cases of expensive operations.  This is useful
1777       to avoid inlining on functions having too many of these.  */
1778    case TRUNC_DIV_EXPR:
1779    case CEIL_DIV_EXPR:
1780    case FLOOR_DIV_EXPR:
1781    case ROUND_DIV_EXPR:
1782    case EXACT_DIV_EXPR:
1783    case TRUNC_MOD_EXPR:
1784    case CEIL_MOD_EXPR:
1785    case FLOOR_MOD_EXPR:
1786    case ROUND_MOD_EXPR:
1787    case RDIV_EXPR:
1788      *count += 10;
1789      break;
1790    case CALL_EXPR:
1791      {
1792	tree decl = get_callee_fndecl (x);
1793	tree arg;
1794
1795	if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1796	  switch (DECL_FUNCTION_CODE (decl))
1797	    {
1798	    case BUILT_IN_CONSTANT_P:
1799	      *walk_subtrees = 0;
1800	      return NULL_TREE;
1801	    case BUILT_IN_EXPECT:
1802	      return NULL_TREE;
1803	    default:
1804	      break;
1805	    }
1806
1807	/* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1808	   that does use function declaration to figure out the arguments.  */
1809	if (!decl)
1810	  {
1811	    for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1812	      *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1813	  }
1814	else
1815	  {
1816	    for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1817	      *count += estimate_move_cost (TREE_TYPE (arg));
1818	  }
1819
1820	*count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1821	break;
1822      }
1823
1824    case OMP_PARALLEL:
1825    case OMP_FOR:
1826    case OMP_SECTIONS:
1827    case OMP_SINGLE:
1828    case OMP_SECTION:
1829    case OMP_MASTER:
1830    case OMP_ORDERED:
1831    case OMP_CRITICAL:
1832    case OMP_ATOMIC:
1833      /* OpenMP directives are generally very expensive.  */
1834      *count += 40;
1835      break;
1836
1837    default:
1838      gcc_unreachable ();
1839    }
1840  return NULL;
1841}
1842
1843/* Estimate number of instructions that will be created by expanding EXPR.  */
1844
1845int
1846estimate_num_insns (tree expr)
1847{
1848  int num = 0;
1849  struct pointer_set_t *visited_nodes;
1850  basic_block bb;
1851  block_stmt_iterator bsi;
1852  struct function *my_function;
1853
1854  /* If we're given an entire function, walk the CFG.  */
1855  if (TREE_CODE (expr) == FUNCTION_DECL)
1856    {
1857      my_function = DECL_STRUCT_FUNCTION (expr);
1858      gcc_assert (my_function && my_function->cfg);
1859      visited_nodes = pointer_set_create ();
1860      FOR_EACH_BB_FN (bb, my_function)
1861	{
1862	  for (bsi = bsi_start (bb);
1863	       !bsi_end_p (bsi);
1864	       bsi_next (&bsi))
1865	    {
1866	      walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
1867			 &num, visited_nodes);
1868	    }
1869	}
1870      pointer_set_destroy (visited_nodes);
1871    }
1872  else
1873    walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1874
1875  return num;
1876}
1877
1878typedef struct function *function_p;
1879
1880DEF_VEC_P(function_p);
1881DEF_VEC_ALLOC_P(function_p,heap);
1882
1883/* Initialized with NOGC, making this poisonous to the garbage collector.  */
1884static VEC(function_p,heap) *cfun_stack;
1885
1886void
1887push_cfun (struct function *new_cfun)
1888{
1889  VEC_safe_push (function_p, heap, cfun_stack, cfun);
1890  cfun = new_cfun;
1891}
1892
1893void
1894pop_cfun (void)
1895{
1896  cfun = VEC_pop (function_p, cfun_stack);
1897}
1898
1899/* Install new lexical TREE_BLOCK underneath 'current_block'.  */
1900static void
1901add_lexical_block (tree current_block, tree new_block)
1902{
1903  tree *blk_p;
1904
1905  /* Walk to the last sub-block.  */
1906  for (blk_p = &BLOCK_SUBBLOCKS (current_block);
1907       *blk_p;
1908       blk_p = &TREE_CHAIN (*blk_p))
1909    ;
1910  *blk_p = new_block;
1911  BLOCK_SUPERCONTEXT (new_block) = current_block;
1912}
1913
1914/* If *TP is a CALL_EXPR, replace it with its inline expansion.  */
1915
1916static bool
1917expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
1918{
1919  copy_body_data *id;
1920  tree t;
1921  tree use_retvar;
1922  tree fn;
1923  splay_tree st;
1924  tree args;
1925  tree return_slot_addr;
1926  tree modify_dest;
1927  location_t saved_location;
1928  struct cgraph_edge *cg_edge;
1929  const char *reason;
1930  basic_block return_block;
1931  edge e;
1932  block_stmt_iterator bsi, stmt_bsi;
1933  bool successfully_inlined = FALSE;
1934  bool purge_dead_abnormal_edges;
1935  tree t_step;
1936  tree var;
1937
1938  /* See what we've got.  */
1939  id = (copy_body_data *) data;
1940  t = *tp;
1941
1942  /* Set input_location here so we get the right instantiation context
1943     if we call instantiate_decl from inlinable_function_p.  */
1944  saved_location = input_location;
1945  if (EXPR_HAS_LOCATION (t))
1946    input_location = EXPR_LOCATION (t);
1947
1948  /* From here on, we're only interested in CALL_EXPRs.  */
1949  if (TREE_CODE (t) != CALL_EXPR)
1950    goto egress;
1951
1952  /* First, see if we can figure out what function is being called.
1953     If we cannot, then there is no hope of inlining the function.  */
1954  fn = get_callee_fndecl (t);
1955  if (!fn)
1956    goto egress;
1957
1958  /* Turn forward declarations into real ones.  */
1959  fn = cgraph_node (fn)->decl;
1960
1961  /* If fn is a declaration of a function in a nested scope that was
1962     globally declared inline, we don't set its DECL_INITIAL.
1963     However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1964     C++ front-end uses it for cdtors to refer to their internal
1965     declarations, that are not real functions.  Fortunately those
1966     don't have trees to be saved, so we can tell by checking their
1967     DECL_SAVED_TREE.  */
1968  if (! DECL_INITIAL (fn)
1969      && DECL_ABSTRACT_ORIGIN (fn)
1970      && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1971    fn = DECL_ABSTRACT_ORIGIN (fn);
1972
1973  /* Objective C and fortran still calls tree_rest_of_compilation directly.
1974     Kill this check once this is fixed.  */
1975  if (!id->dst_node->analyzed)
1976    goto egress;
1977
1978  cg_edge = cgraph_edge (id->dst_node, stmt);
1979
1980  /* Constant propagation on argument done during previous inlining
1981     may create new direct call.  Produce an edge for it.  */
1982  if (!cg_edge)
1983    {
1984      struct cgraph_node *dest = cgraph_node (fn);
1985
1986      /* We have missing edge in the callgraph.  This can happen in one case
1987         where previous inlining turned indirect call into direct call by
1988         constant propagating arguments.  In all other cases we hit a bug
1989         (incorrect node sharing is most common reason for missing edges.  */
1990      gcc_assert (dest->needed || !flag_unit_at_a_time);
1991      cgraph_create_edge (id->dst_node, dest, stmt,
1992			  bb->count, bb->loop_depth)->inline_failed
1993	= N_("originally indirect function call not considered for inlining");
1994      goto egress;
1995    }
1996
1997  /* Don't try to inline functions that are not well-suited to
1998     inlining.  */
1999  if (!cgraph_inline_p (cg_edge, &reason))
2000    {
2001      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2002	  /* Avoid warnings during early inline pass. */
2003	  && (!flag_unit_at_a_time || cgraph_global_info_ready))
2004	{
2005	  sorry ("inlining failed in call to %q+F: %s", fn, reason);
2006	  sorry ("called from here");
2007	}
2008      else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2009	       && !DECL_IN_SYSTEM_HEADER (fn)
2010	       && strlen (reason)
2011	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2012	       /* Avoid warnings during early inline pass. */
2013	       && (!flag_unit_at_a_time || cgraph_global_info_ready))
2014	{
2015	  warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2016		   fn, reason);
2017	  warning (OPT_Winline, "called from here");
2018	}
2019      goto egress;
2020    }
2021  fn = cg_edge->callee->decl;
2022
2023#ifdef ENABLE_CHECKING
2024  if (cg_edge->callee->decl != id->dst_node->decl)
2025    verify_cgraph_node (cg_edge->callee);
2026#endif
2027
2028  /* We will be inlining this callee.  */
2029  id->eh_region = lookup_stmt_eh_region (stmt);
2030
2031  /* Split the block holding the CALL_EXPR.  */
2032  e = split_block (bb, stmt);
2033  bb = e->src;
2034  return_block = e->dest;
2035  remove_edge (e);
2036
2037  /* split_block splits after the statement; work around this by
2038     moving the call into the second block manually.  Not pretty,
2039     but seems easier than doing the CFG manipulation by hand
2040     when the CALL_EXPR is in the last statement of BB.  */
2041  stmt_bsi = bsi_last (bb);
2042  bsi_remove (&stmt_bsi, false);
2043
2044  /* If the CALL_EXPR was in the last statement of BB, it may have
2045     been the source of abnormal edges.  In this case, schedule
2046     the removal of dead abnormal edges.  */
2047  bsi = bsi_start (return_block);
2048  if (bsi_end_p (bsi))
2049    {
2050      bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2051      purge_dead_abnormal_edges = true;
2052    }
2053  else
2054    {
2055      bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2056      purge_dead_abnormal_edges = false;
2057    }
2058
2059  stmt_bsi = bsi_start (return_block);
2060
2061  /* Build a block containing code to initialize the arguments, the
2062     actual inline expansion of the body, and a label for the return
2063     statements within the function to jump to.  The type of the
2064     statement expression is the return type of the function call.  */
2065  id->block = make_node (BLOCK);
2066  BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2067  BLOCK_SOURCE_LOCATION (id->block) = input_location;
2068  add_lexical_block (TREE_BLOCK (stmt), id->block);
2069
2070  /* Local declarations will be replaced by their equivalents in this
2071     map.  */
2072  st = id->decl_map;
2073  id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2074				 NULL, NULL);
2075
2076  /* Initialize the parameters.  */
2077  args = TREE_OPERAND (t, 1);
2078
2079  /* Record the function we are about to inline.  */
2080  id->src_fn = fn;
2081  id->src_node = cg_edge->callee;
2082
2083  initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2084
2085  if (DECL_INITIAL (fn))
2086    add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2087
2088  /* Return statements in the function body will be replaced by jumps
2089     to the RET_LABEL.  */
2090
2091  gcc_assert (DECL_INITIAL (fn));
2092  gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2093
2094  /* Find the lhs to which the result of this call is assigned.  */
2095  return_slot_addr = NULL;
2096  if (TREE_CODE (stmt) == MODIFY_EXPR)
2097    {
2098      modify_dest = TREE_OPERAND (stmt, 0);
2099
2100      /* The function which we are inlining might not return a value,
2101	 in which case we should issue a warning that the function
2102	 does not return a value.  In that case the optimizers will
2103	 see that the variable to which the value is assigned was not
2104	 initialized.  We do not want to issue a warning about that
2105	 uninitialized variable.  */
2106      if (DECL_P (modify_dest))
2107	TREE_NO_WARNING (modify_dest) = 1;
2108      if (CALL_EXPR_RETURN_SLOT_OPT (t))
2109	{
2110	  return_slot_addr = build_fold_addr_expr (modify_dest);
2111	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2112	  modify_dest = NULL;
2113	}
2114    }
2115  else
2116    modify_dest = NULL;
2117
2118  /* Declare the return variable for the function.  */
2119  declare_return_variable (id, return_slot_addr,
2120			   modify_dest, &use_retvar);
2121
2122  /* This is it.  Duplicate the callee body.  Assume callee is
2123     pre-gimplified.  Note that we must not alter the caller
2124     function in any way before this point, as this CALL_EXPR may be
2125     a self-referential call; if we're calling ourselves, we need to
2126     duplicate our body before altering anything.  */
2127  copy_body (id, bb->count, bb->frequency, bb, return_block);
2128
2129  /* Add local vars in this inlined callee to caller.  */
2130  t_step = id->src_cfun->unexpanded_var_list;
2131  for (; t_step; t_step = TREE_CHAIN (t_step))
2132    {
2133      var = TREE_VALUE (t_step);
2134      if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2135	cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2136					       cfun->unexpanded_var_list);
2137      else
2138	cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2139					       cfun->unexpanded_var_list);
2140    }
2141
2142  /* Clean up.  */
2143  splay_tree_delete (id->decl_map);
2144  id->decl_map = st;
2145
2146  /* If the inlined function returns a result that we care about,
2147     clobber the CALL_EXPR with a reference to the return variable.  */
2148  if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2149    {
2150      *tp = use_retvar;
2151      maybe_clean_or_replace_eh_stmt (stmt, stmt);
2152    }
2153  else
2154    /* We're modifying a TSI owned by gimple_expand_calls_inline();
2155       tsi_delink() will leave the iterator in a sane state.  */
2156    bsi_remove (&stmt_bsi, true);
2157
2158  if (purge_dead_abnormal_edges)
2159    tree_purge_dead_abnormal_call_edges (return_block);
2160
2161  /* If the value of the new expression is ignored, that's OK.  We
2162     don't warn about this for CALL_EXPRs, so we shouldn't warn about
2163     the equivalent inlined version either.  */
2164  TREE_USED (*tp) = 1;
2165
2166  /* Output the inlining info for this abstract function, since it has been
2167     inlined.  If we don't do this now, we can lose the information about the
2168     variables in the function when the blocks get blown away as soon as we
2169     remove the cgraph node.  */
2170  (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2171
2172  /* Update callgraph if needed.  */
2173  cgraph_remove_node (cg_edge->callee);
2174
2175  id->block = NULL_TREE;
2176  successfully_inlined = TRUE;
2177
2178 egress:
2179  input_location = saved_location;
2180  return successfully_inlined;
2181}
2182
2183/* Expand call statements reachable from STMT_P.
2184   We can only have CALL_EXPRs as the "toplevel" tree code or nested
2185   in a MODIFY_EXPR.  See tree-gimple.c:get_call_expr_in().  We can
2186   unfortunately not use that function here because we need a pointer
2187   to the CALL_EXPR, not the tree itself.  */
2188
2189static bool
2190gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
2191{
2192  block_stmt_iterator bsi;
2193
2194  /* Register specific tree functions.  */
2195  tree_register_cfg_hooks ();
2196  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2197    {
2198      tree *expr_p = bsi_stmt_ptr (bsi);
2199      tree stmt = *expr_p;
2200
2201      if (TREE_CODE (*expr_p) == MODIFY_EXPR)
2202	expr_p = &TREE_OPERAND (*expr_p, 1);
2203      if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2204	expr_p = &TREE_OPERAND (*expr_p, 0);
2205      if (TREE_CODE (*expr_p) == CALL_EXPR)
2206	if (expand_call_inline (bb, stmt, expr_p, id))
2207	  return true;
2208    }
2209  return false;
2210}
2211
2212/* Expand calls to inline functions in the body of FN.  */
2213
2214void
2215optimize_inline_calls (tree fn)
2216{
2217  copy_body_data id;
2218  tree prev_fn;
2219  basic_block bb;
2220  /* There is no point in performing inlining if errors have already
2221     occurred -- and we might crash if we try to inline invalid
2222     code.  */
2223  if (errorcount || sorrycount)
2224    return;
2225
2226  /* Clear out ID.  */
2227  memset (&id, 0, sizeof (id));
2228
2229  id.src_node = id.dst_node = cgraph_node (fn);
2230  id.dst_fn = fn;
2231  /* Or any functions that aren't finished yet.  */
2232  prev_fn = NULL_TREE;
2233  if (current_function_decl)
2234    {
2235      id.dst_fn = current_function_decl;
2236      prev_fn = current_function_decl;
2237    }
2238
2239  id.copy_decl = copy_decl_maybe_to_var;
2240  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2241  id.transform_new_cfg = false;
2242  id.transform_return_to_modify = true;
2243  id.transform_lang_insert_block = false;
2244
2245  push_gimplify_context ();
2246
2247  /* Reach the trees by walking over the CFG, and note the
2248     enclosing basic-blocks in the call edges.  */
2249  /* We walk the blocks going forward, because inlined function bodies
2250     will split id->current_basic_block, and the new blocks will
2251     follow it; we'll trudge through them, processing their CALL_EXPRs
2252     along the way.  */
2253  FOR_EACH_BB (bb)
2254    gimple_expand_calls_inline (bb, &id);
2255
2256  pop_gimplify_context (NULL);
2257  /* Renumber the (code) basic_blocks consecutively.  */
2258  compact_blocks ();
2259  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2260  number_blocks (fn);
2261
2262#ifdef ENABLE_CHECKING
2263    {
2264      struct cgraph_edge *e;
2265
2266      verify_cgraph_node (id.dst_node);
2267
2268      /* Double check that we inlined everything we are supposed to inline.  */
2269      for (e = id.dst_node->callees; e; e = e->next_callee)
2270	gcc_assert (e->inline_failed);
2271    }
2272#endif
2273  /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2274     as inlining loops might increase the maximum.  */
2275  if (ENTRY_BLOCK_PTR->count)
2276    counts_to_freqs ();
2277  fold_cond_expr_cond ();
2278}
2279
2280/* FN is a function that has a complete body, and CLONE is a function whose
2281   body is to be set to a copy of FN, mapping argument declarations according
2282   to the ARG_MAP splay_tree.  */
2283
2284void
2285clone_body (tree clone, tree fn, void *arg_map)
2286{
2287  copy_body_data id;
2288
2289  /* Clone the body, as if we were making an inline call.  But, remap the
2290     parameters in the callee to the parameters of caller.  */
2291  memset (&id, 0, sizeof (id));
2292  id.src_fn = fn;
2293  id.dst_fn = clone;
2294  id.src_cfun = DECL_STRUCT_FUNCTION (fn);
2295  id.decl_map = (splay_tree)arg_map;
2296
2297  id.copy_decl = copy_decl_no_change;
2298  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2299  id.transform_new_cfg = true;
2300  id.transform_return_to_modify = false;
2301  id.transform_lang_insert_block = true;
2302
2303  /* We're not inside any EH region.  */
2304  id.eh_region = -1;
2305
2306  /* Actually copy the body.  */
2307  append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2308}
2309
2310/* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
2311
2312tree
2313copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2314{
2315  enum tree_code code = TREE_CODE (*tp);
2316
2317  /* We make copies of most nodes.  */
2318  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2319      || code == TREE_LIST
2320      || code == TREE_VEC
2321      || code == TYPE_DECL
2322      || code == OMP_CLAUSE)
2323    {
2324      /* Because the chain gets clobbered when we make a copy, we save it
2325	 here.  */
2326      tree chain = TREE_CHAIN (*tp);
2327      tree new;
2328
2329      /* Copy the node.  */
2330      new = copy_node (*tp);
2331
2332      /* Propagate mudflap marked-ness.  */
2333      if (flag_mudflap && mf_marked_p (*tp))
2334        mf_mark (new);
2335
2336      *tp = new;
2337
2338      /* Now, restore the chain, if appropriate.  That will cause
2339	 walk_tree to walk into the chain as well.  */
2340      if (code == PARM_DECL
2341	  || code == TREE_LIST
2342	  || code == OMP_CLAUSE)
2343	TREE_CHAIN (*tp) = chain;
2344
2345      /* For now, we don't update BLOCKs when we make copies.  So, we
2346	 have to nullify all BIND_EXPRs.  */
2347      if (TREE_CODE (*tp) == BIND_EXPR)
2348	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2349    }
2350  else if (code == CONSTRUCTOR)
2351    {
2352      /* CONSTRUCTOR nodes need special handling because
2353         we need to duplicate the vector of elements.  */
2354      tree new;
2355
2356      new = copy_node (*tp);
2357
2358      /* Propagate mudflap marked-ness.  */
2359      if (flag_mudflap && mf_marked_p (*tp))
2360        mf_mark (new);
2361
2362      CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2363					 CONSTRUCTOR_ELTS (*tp));
2364      *tp = new;
2365    }
2366  else if (TREE_CODE_CLASS (code) == tcc_type)
2367    *walk_subtrees = 0;
2368  else if (TREE_CODE_CLASS (code) == tcc_declaration)
2369    *walk_subtrees = 0;
2370  else if (TREE_CODE_CLASS (code) == tcc_constant)
2371    *walk_subtrees = 0;
2372  else
2373    gcc_assert (code != STATEMENT_LIST);
2374  return NULL_TREE;
2375}
2376
2377/* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
2378   information indicating to what new SAVE_EXPR this one should be mapped,
2379   use that one.  Otherwise, create a new node and enter it in ST.  FN is
2380   the function into which the copy will be placed.  */
2381
2382static void
2383remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2384{
2385  splay_tree st = (splay_tree) st_;
2386  splay_tree_node n;
2387  tree t;
2388
2389  /* See if we already encountered this SAVE_EXPR.  */
2390  n = splay_tree_lookup (st, (splay_tree_key) *tp);
2391
2392  /* If we didn't already remap this SAVE_EXPR, do so now.  */
2393  if (!n)
2394    {
2395      t = copy_node (*tp);
2396
2397      /* Remember this SAVE_EXPR.  */
2398      splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2399      /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
2400      splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2401    }
2402  else
2403    {
2404      /* We've already walked into this SAVE_EXPR; don't do it again.  */
2405      *walk_subtrees = 0;
2406      t = (tree) n->value;
2407    }
2408
2409  /* Replace this SAVE_EXPR with the copy.  */
2410  *tp = t;
2411}
2412
2413/* Called via walk_tree.  If *TP points to a DECL_STMT for a local label,
2414   copies the declaration and enters it in the splay_tree in DATA (which is
2415   really an `copy_body_data *').  */
2416
2417static tree
2418mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2419			void *data)
2420{
2421  copy_body_data *id = (copy_body_data *) data;
2422
2423  /* Don't walk into types.  */
2424  if (TYPE_P (*tp))
2425    *walk_subtrees = 0;
2426
2427  else if (TREE_CODE (*tp) == LABEL_EXPR)
2428    {
2429      tree decl = TREE_OPERAND (*tp, 0);
2430
2431      /* Copy the decl and remember the copy.  */
2432      insert_decl_map (id, decl, id->copy_decl (decl, id));
2433    }
2434
2435  return NULL_TREE;
2436}
2437
2438/* Perform any modifications to EXPR required when it is unsaved.  Does
2439   not recurse into EXPR's subtrees.  */
2440
2441static void
2442unsave_expr_1 (tree expr)
2443{
2444  switch (TREE_CODE (expr))
2445    {
2446    case TARGET_EXPR:
2447      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2448         It's OK for this to happen if it was part of a subtree that
2449         isn't immediately expanded, such as operand 2 of another
2450         TARGET_EXPR.  */
2451      if (TREE_OPERAND (expr, 1))
2452	break;
2453
2454      TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2455      TREE_OPERAND (expr, 3) = NULL_TREE;
2456      break;
2457
2458    default:
2459      break;
2460    }
2461}
2462
2463/* Called via walk_tree when an expression is unsaved.  Using the
2464   splay_tree pointed to by ST (which is really a `splay_tree'),
2465   remaps all local declarations to appropriate replacements.  */
2466
2467static tree
2468unsave_r (tree *tp, int *walk_subtrees, void *data)
2469{
2470  copy_body_data *id = (copy_body_data *) data;
2471  splay_tree st = id->decl_map;
2472  splay_tree_node n;
2473
2474  /* Only a local declaration (variable or label).  */
2475  if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2476      || TREE_CODE (*tp) == LABEL_DECL)
2477    {
2478      /* Lookup the declaration.  */
2479      n = splay_tree_lookup (st, (splay_tree_key) *tp);
2480
2481      /* If it's there, remap it.  */
2482      if (n)
2483	*tp = (tree) n->value;
2484    }
2485
2486  else if (TREE_CODE (*tp) == STATEMENT_LIST)
2487    copy_statement_list (tp);
2488  else if (TREE_CODE (*tp) == BIND_EXPR)
2489    copy_bind_expr (tp, walk_subtrees, id);
2490  else if (TREE_CODE (*tp) == SAVE_EXPR)
2491    remap_save_expr (tp, st, walk_subtrees);
2492  else
2493    {
2494      copy_tree_r (tp, walk_subtrees, NULL);
2495
2496      /* Do whatever unsaving is required.  */
2497      unsave_expr_1 (*tp);
2498    }
2499
2500  /* Keep iterating.  */
2501  return NULL_TREE;
2502}
2503
2504/* Copies everything in EXPR and replaces variables, labels
2505   and SAVE_EXPRs local to EXPR.  */
2506
2507tree
2508unsave_expr_now (tree expr)
2509{
2510  copy_body_data id;
2511
2512  /* There's nothing to do for NULL_TREE.  */
2513  if (expr == 0)
2514    return expr;
2515
2516  /* Set up ID.  */
2517  memset (&id, 0, sizeof (id));
2518  id.src_fn = current_function_decl;
2519  id.dst_fn = current_function_decl;
2520  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2521
2522  id.copy_decl = copy_decl_no_change;
2523  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2524  id.transform_new_cfg = false;
2525  id.transform_return_to_modify = false;
2526  id.transform_lang_insert_block = false;
2527
2528  /* Walk the tree once to find local labels.  */
2529  walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2530
2531  /* Walk the tree again, copying, remapping, and unsaving.  */
2532  walk_tree (&expr, unsave_r, &id, NULL);
2533
2534  /* Clean up.  */
2535  splay_tree_delete (id.decl_map);
2536
2537  return expr;
2538}
2539
2540/* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
2541
2542static tree
2543debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2544{
2545  if (*tp == data)
2546    return (tree) data;
2547  else
2548    return NULL;
2549}
2550
2551bool
2552debug_find_tree (tree top, tree search)
2553{
2554  return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2555}
2556
2557
2558/* Declare the variables created by the inliner.  Add all the variables in
2559   VARS to BIND_EXPR.  */
2560
2561static void
2562declare_inline_vars (tree block, tree vars)
2563{
2564  tree t;
2565  for (t = vars; t; t = TREE_CHAIN (t))
2566    {
2567      DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2568      gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
2569      cfun->unexpanded_var_list =
2570	tree_cons (NULL_TREE, t,
2571		   cfun->unexpanded_var_list);
2572    }
2573
2574  if (block)
2575    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
2576}
2577
2578
2579/* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
2580   but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
2581   VAR_DECL translation.  */
2582
2583static tree
2584copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
2585{
2586  /* Don't generate debug information for the copy if we wouldn't have
2587     generated it for the copy either.  */
2588  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
2589  DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
2590
2591  /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
2592     declaration inspired this copy.  */
2593  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
2594
2595  /* The new variable/label has no RTL, yet.  */
2596  if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
2597      && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2598    SET_DECL_RTL (copy, NULL_RTX);
2599
2600  /* These args would always appear unused, if not for this.  */
2601  TREE_USED (copy) = 1;
2602
2603  /* Set the context for the new declaration.  */
2604  if (!DECL_CONTEXT (decl))
2605    /* Globals stay global.  */
2606    ;
2607  else if (DECL_CONTEXT (decl) != id->src_fn)
2608    /* Things that weren't in the scope of the function we're inlining
2609       from aren't in the scope we're inlining to, either.  */
2610    ;
2611  else if (TREE_STATIC (decl))
2612    /* Function-scoped static variables should stay in the original
2613       function.  */
2614    ;
2615  else
2616    /* Ordinary automatic local variables are now in the scope of the
2617       new function.  */
2618    DECL_CONTEXT (copy) = id->dst_fn;
2619
2620  return copy;
2621}
2622
2623static tree
2624copy_decl_to_var (tree decl, copy_body_data *id)
2625{
2626  tree copy, type;
2627
2628  gcc_assert (TREE_CODE (decl) == PARM_DECL
2629	      || TREE_CODE (decl) == RESULT_DECL);
2630
2631  type = TREE_TYPE (decl);
2632
2633  copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
2634  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2635  TREE_READONLY (copy) = TREE_READONLY (decl);
2636  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2637  DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
2638
2639  return copy_decl_for_dup_finish (id, decl, copy);
2640}
2641
2642/* Like copy_decl_to_var, but create a return slot object instead of a
2643   pointer variable for return by invisible reference.  */
2644
2645static tree
2646copy_result_decl_to_var (tree decl, copy_body_data *id)
2647{
2648  tree copy, type;
2649
2650  gcc_assert (TREE_CODE (decl) == PARM_DECL
2651	      || TREE_CODE (decl) == RESULT_DECL);
2652
2653  type = TREE_TYPE (decl);
2654  if (DECL_BY_REFERENCE (decl))
2655    type = TREE_TYPE (type);
2656
2657  copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
2658  TREE_READONLY (copy) = TREE_READONLY (decl);
2659  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2660  if (!DECL_BY_REFERENCE (decl))
2661    {
2662      TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2663      DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
2664    }
2665
2666  return copy_decl_for_dup_finish (id, decl, copy);
2667}
2668
2669
2670static tree
2671copy_decl_no_change (tree decl, copy_body_data *id)
2672{
2673  tree copy;
2674
2675  copy = copy_node (decl);
2676
2677  /* The COPY is not abstract; it will be generated in DST_FN.  */
2678  DECL_ABSTRACT (copy) = 0;
2679  lang_hooks.dup_lang_specific_decl (copy);
2680
2681  /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
2682     been taken; it's for internal bookkeeping in expand_goto_internal.  */
2683  if (TREE_CODE (copy) == LABEL_DECL)
2684    {
2685      TREE_ADDRESSABLE (copy) = 0;
2686      LABEL_DECL_UID (copy) = -1;
2687    }
2688
2689  return copy_decl_for_dup_finish (id, decl, copy);
2690}
2691
2692static tree
2693copy_decl_maybe_to_var (tree decl, copy_body_data *id)
2694{
2695  if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
2696    return copy_decl_to_var (decl, id);
2697  else
2698    return copy_decl_no_change (decl, id);
2699}
2700
2701/* Return a copy of the function's argument tree.  */
2702static tree
2703copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
2704{
2705  tree *arg_copy, *parg;
2706
2707  arg_copy = &orig_parm;
2708  for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2709    {
2710      tree new = remap_decl (*parg, id);
2711      lang_hooks.dup_lang_specific_decl (new);
2712      TREE_CHAIN (new) = TREE_CHAIN (*parg);
2713      *parg = new;
2714    }
2715  return orig_parm;
2716}
2717
2718/* Return a copy of the function's static chain.  */
2719static tree
2720copy_static_chain (tree static_chain, copy_body_data * id)
2721{
2722  tree *chain_copy, *pvar;
2723
2724  chain_copy = &static_chain;
2725  for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
2726    {
2727      tree new = remap_decl (*pvar, id);
2728      lang_hooks.dup_lang_specific_decl (new);
2729      TREE_CHAIN (new) = TREE_CHAIN (*pvar);
2730      *pvar = new;
2731    }
2732  return static_chain;
2733}
2734
2735/* Return true if the function is allowed to be versioned.
2736   This is a guard for the versioning functionality.  */
2737bool
2738tree_versionable_function_p (tree fndecl)
2739{
2740  if (fndecl == NULL_TREE)
2741    return false;
2742  /* ??? There are cases where a function is
2743     uninlinable but can be versioned.  */
2744  if (!tree_inlinable_function_p (fndecl))
2745    return false;
2746
2747  return true;
2748}
2749
2750/* Create a copy of a function's tree.
2751   OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
2752   of the original function and the new copied function
2753   respectively.  In case we want to replace a DECL
2754   tree with another tree while duplicating the function's
2755   body, TREE_MAP represents the mapping between these
2756   trees. If UPDATE_CLONES is set, the call_stmt fields
2757   of edges of clones of the function will be updated.  */
2758void
2759tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
2760			  bool update_clones)
2761{
2762  struct cgraph_node *old_version_node;
2763  struct cgraph_node *new_version_node;
2764  copy_body_data id;
2765  tree p, new_fndecl;
2766  unsigned i;
2767  struct ipa_replace_map *replace_info;
2768  basic_block old_entry_block;
2769  tree t_step;
2770
2771  gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
2772	      && TREE_CODE (new_decl) == FUNCTION_DECL);
2773  DECL_POSSIBLY_INLINED (old_decl) = 1;
2774
2775  old_version_node = cgraph_node (old_decl);
2776  new_version_node = cgraph_node (new_decl);
2777
2778  allocate_struct_function (new_decl);
2779  /* Cfun points to the new allocated function struct at this point.  */
2780  cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
2781
2782  DECL_ARTIFICIAL (new_decl) = 1;
2783  DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
2784
2785  /* Generate a new name for the new version. */
2786  if (!update_clones)
2787    DECL_NAME (new_decl) = create_tmp_var_name (NULL);
2788  /* Create a new SYMBOL_REF rtx for the new name. */
2789  if (DECL_RTL (old_decl) != NULL)
2790    {
2791      SET_DECL_RTL (new_decl, copy_rtx (DECL_RTL (old_decl)));
2792      XEXP (DECL_RTL (new_decl), 0) =
2793	gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl), 0)),
2794			    IDENTIFIER_POINTER (DECL_NAME (new_decl)));
2795    }
2796
2797  /* Prepare the data structures for the tree copy.  */
2798  memset (&id, 0, sizeof (id));
2799
2800  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2801  id.src_fn = old_decl;
2802  id.dst_fn = new_decl;
2803  id.src_node = old_version_node;
2804  id.dst_node = new_version_node;
2805  id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
2806
2807  id.copy_decl = copy_decl_no_change;
2808  id.transform_call_graph_edges
2809    = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
2810  id.transform_new_cfg = true;
2811  id.transform_return_to_modify = false;
2812  id.transform_lang_insert_block = false;
2813
2814  current_function_decl = new_decl;
2815
2816  /* Copy the function's static chain.  */
2817  p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
2818  if (p)
2819    DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
2820      copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
2821			 &id);
2822  /* Copy the function's arguments.  */
2823  if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
2824    DECL_ARGUMENTS (new_decl) =
2825      copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
2826
2827  /* If there's a tree_map, prepare for substitution.  */
2828  if (tree_map)
2829    for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
2830      {
2831	replace_info = VARRAY_GENERIC_PTR (tree_map, i);
2832	if (replace_info->replace_p)
2833	  insert_decl_map (&id, replace_info->old_tree,
2834			   replace_info->new_tree);
2835      }
2836
2837  DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
2838
2839  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2840  number_blocks (id.dst_fn);
2841
2842  if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
2843    /* Add local vars.  */
2844    for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
2845	 t_step; t_step = TREE_CHAIN (t_step))
2846      {
2847	tree var = TREE_VALUE (t_step);
2848	if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2849	  cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2850						 cfun->unexpanded_var_list);
2851	else
2852	  cfun->unexpanded_var_list =
2853	    tree_cons (NULL_TREE, remap_decl (var, &id),
2854		       cfun->unexpanded_var_list);
2855      }
2856
2857  /* Copy the Function's body.  */
2858  old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
2859    (DECL_STRUCT_FUNCTION (old_decl));
2860  new_fndecl = copy_body (&id,
2861			  old_entry_block->count,
2862			  old_entry_block->frequency, NULL, NULL);
2863
2864  DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
2865
2866  DECL_STRUCT_FUNCTION (new_decl)->cfg =
2867    DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
2868  DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
2869  DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
2870    DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
2871  DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
2872    DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
2873
2874  if (DECL_RESULT (old_decl) != NULL_TREE)
2875    {
2876      tree *res_decl = &DECL_RESULT (old_decl);
2877      DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
2878      lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
2879    }
2880
2881  current_function_decl = NULL;
2882  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2883  number_blocks (new_decl);
2884
2885  /* Clean up.  */
2886  splay_tree_delete (id.decl_map);
2887  fold_cond_expr_cond ();
2888  return;
2889}
2890
2891/* Duplicate a type, fields and all.  */
2892
2893tree
2894build_duplicate_type (tree type)
2895{
2896  struct copy_body_data id;
2897
2898  memset (&id, 0, sizeof (id));
2899  id.src_fn = current_function_decl;
2900  id.dst_fn = current_function_decl;
2901  id.src_cfun = cfun;
2902  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2903
2904  type = remap_type_1 (type, &id);
2905
2906  splay_tree_delete (id.decl_map);
2907
2908  return type;
2909}
2910