1/* Pass computing data for optimizing stdarg functions.
2   Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3   Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to
19the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20Boston, MA 02110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "function.h"
28#include "langhooks.h"
29#include "diagnostic.h"
30#include "target.h"
31#include "tree-flow.h"
32#include "tree-pass.h"
33#include "tree-stdarg.h"
34
35/* A simple pass that attempts to optimize stdarg functions on architectures
36   that need to save register arguments to stack on entry to stdarg functions.
37   If the function doesn't use any va_start macros, no registers need to
38   be saved.  If va_start macros are used, the va_list variables don't escape
39   the function, it is only necessary to save registers that will be used
40   in va_arg macros.  E.g. if va_arg is only used with integral types
41   in the function, floating point registers don't need to be saved, etc.  */
42
43
44/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
45   is executed at most as many times as VA_START_BB.  */
46
47static bool
48reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
49{
50  edge *stack, e;
51  edge_iterator ei;
52  int sp;
53  sbitmap visited;
54  bool ret;
55
56  if (va_arg_bb == va_start_bb)
57    return true;
58
59  if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
60    return false;
61
62  stack = XNEWVEC (edge, n_basic_blocks + 1);
63  sp = 0;
64
65  visited = sbitmap_alloc (last_basic_block);
66  sbitmap_zero (visited);
67  ret = true;
68
69  FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
70    stack[sp++] = e;
71
72  while (sp)
73    {
74      basic_block src;
75
76      --sp;
77      e = stack[sp];
78      src = e->src;
79
80      if (e->flags & EDGE_COMPLEX)
81	{
82	  ret = false;
83	  break;
84	}
85
86      if (src == va_start_bb)
87	continue;
88
89      /* va_arg_bb can be executed more times than va_start_bb.  */
90      if (src == va_arg_bb)
91	{
92	  ret = false;
93	  break;
94	}
95
96      gcc_assert (src != ENTRY_BLOCK_PTR);
97
98      if (! TEST_BIT (visited, src->index))
99	{
100	  SET_BIT (visited, src->index);
101	  FOR_EACH_EDGE (e, ei, src->preds)
102	    stack[sp++] = e;
103	}
104    }
105
106  free (stack);
107  sbitmap_free (visited);
108  return ret;
109}
110
111
112/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
113   return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
114   GPR_P is true if this is GPR counter.  */
115
116static unsigned HOST_WIDE_INT
117va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
118		      bool gpr_p)
119{
120  tree stmt, lhs, orig_lhs;
121  unsigned HOST_WIDE_INT ret = 0, val, counter_val;
122  unsigned int max_size;
123
124  if (si->offsets == NULL)
125    {
126      unsigned int i;
127
128      si->offsets = XNEWVEC (int, num_ssa_names);
129      for (i = 0; i < num_ssa_names; ++i)
130	si->offsets[i] = -1;
131    }
132
133  counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
134  max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
135  orig_lhs = lhs = rhs;
136  while (lhs)
137    {
138      if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
139	{
140	  if (counter_val >= max_size)
141	    {
142	      ret = max_size;
143	      break;
144	    }
145
146	  ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
147	  break;
148	}
149
150      stmt = SSA_NAME_DEF_STMT (lhs);
151
152      if (TREE_CODE (stmt) != MODIFY_EXPR
153	  || TREE_OPERAND (stmt, 0) != lhs)
154	return (unsigned HOST_WIDE_INT) -1;
155
156      rhs = TREE_OPERAND (stmt, 1);
157      if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
158	rhs = TREE_OPERAND (rhs, 0);
159
160      if (TREE_CODE (rhs) == SSA_NAME)
161	{
162	  lhs = rhs;
163	  continue;
164	}
165
166      if ((TREE_CODE (rhs) == NOP_EXPR
167	   || TREE_CODE (rhs) == CONVERT_EXPR)
168	  && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
169	{
170	  lhs = TREE_OPERAND (rhs, 0);
171	  continue;
172	}
173
174      if (TREE_CODE (rhs) == PLUS_EXPR
175	  && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
176	  && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
177	  && host_integerp (TREE_OPERAND (rhs, 1), 1))
178	{
179	  ret += tree_low_cst (TREE_OPERAND (rhs, 1), 1);
180	  lhs = TREE_OPERAND (rhs, 0);
181	  continue;
182	}
183
184      if (TREE_CODE (counter) != TREE_CODE (rhs))
185	return (unsigned HOST_WIDE_INT) -1;
186
187      if (TREE_CODE (counter) == COMPONENT_REF)
188	{
189	  if (get_base_address (counter) != get_base_address (rhs)
190	      || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
191	      || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
192	    return (unsigned HOST_WIDE_INT) -1;
193	}
194      else if (counter != rhs)
195	return (unsigned HOST_WIDE_INT) -1;
196
197      lhs = NULL;
198    }
199
200  lhs = orig_lhs;
201  val = ret + counter_val;
202  while (lhs)
203    {
204      if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
205	break;
206
207      if (val >= max_size)
208	si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
209      else
210	si->offsets[SSA_NAME_VERSION (lhs)] = val;
211
212      stmt = SSA_NAME_DEF_STMT (lhs);
213
214      rhs = TREE_OPERAND (stmt, 1);
215      if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
216	rhs = TREE_OPERAND (rhs, 0);
217
218      if (TREE_CODE (rhs) == SSA_NAME)
219	{
220	  lhs = rhs;
221	  continue;
222	}
223
224      if ((TREE_CODE (rhs) == NOP_EXPR
225	   || TREE_CODE (rhs) == CONVERT_EXPR)
226	  && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
227	{
228	  lhs = TREE_OPERAND (rhs, 0);
229	  continue;
230	}
231
232      if (TREE_CODE (rhs) == PLUS_EXPR
233	  && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
234	  && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
235	  && host_integerp (TREE_OPERAND (rhs, 1), 1))
236	{
237	  val -= tree_low_cst (TREE_OPERAND (rhs, 1), 1);
238	  lhs = TREE_OPERAND (rhs, 0);
239	  continue;
240	}
241
242      lhs = NULL;
243    }
244
245  return ret;
246}
247
248
249/* Called by walk_tree to look for references to va_list variables.  */
250
251static tree
252find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
253			void *data)
254{
255  bitmap va_list_vars = (bitmap) data;
256  tree var = *tp;
257
258  if (TREE_CODE (var) == SSA_NAME)
259    var = SSA_NAME_VAR (var);
260
261  if (TREE_CODE (var) == VAR_DECL
262      && bitmap_bit_p (va_list_vars, DECL_UID (var)))
263    return var;
264
265  return NULL_TREE;
266}
267
268
269/* Helper function of va_list_counter_struct_op.  Compute
270   cfun->va_list_{g,f}pr_size.  AP is a va_list GPR/FPR counter,
271   if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
272   statement.  GPR_P is true if AP is a GPR counter, false if it is
273   a FPR counter.  */
274
275static void
276va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
277		    bool write_p)
278{
279  unsigned HOST_WIDE_INT increment;
280
281  if (si->compute_sizes < 0)
282    {
283      si->compute_sizes = 0;
284      if (si->va_start_count == 1
285	  && reachable_at_most_once (si->bb, si->va_start_bb))
286	si->compute_sizes = 1;
287
288      if (dump_file && (dump_flags & TDF_DETAILS))
289	fprintf (dump_file,
290		 "bb%d will %sbe executed at most once for each va_start "
291		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
292		 si->va_start_bb->index);
293    }
294
295  if (write_p
296      && si->compute_sizes
297      && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
298    {
299      if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
300	{
301	  cfun->va_list_gpr_size += increment;
302	  return;
303	}
304
305      if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
306	{
307	  cfun->va_list_fpr_size += increment;
308	  return;
309	}
310    }
311
312  if (write_p || !si->compute_sizes)
313    {
314      if (gpr_p)
315	cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
316      else
317	cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
318    }
319}
320
321
322/* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
323   If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
324   is false, AP has been seen in VAR = AP assignment.
325   Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
326   va_arg operation that doesn't cause the va_list variable to escape
327   current function.  */
328
329static bool
330va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
331			   bool write_p)
332{
333  tree base;
334
335  if (TREE_CODE (ap) != COMPONENT_REF
336      || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
337    return false;
338
339  if (TREE_CODE (var) != SSA_NAME
340      || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
341    return false;
342
343  base = get_base_address (ap);
344  if (TREE_CODE (base) != VAR_DECL
345      || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
346    return false;
347
348  if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
349    va_list_counter_op (si, ap, var, true, write_p);
350  else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
351    va_list_counter_op (si, ap, var, false, write_p);
352
353  return true;
354}
355
356
357/* Check for TEM = AP.  Return true if found and the caller shouldn't
358   search for va_list references in the statement.  */
359
360static bool
361va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
362{
363  if (TREE_CODE (ap) != VAR_DECL
364      || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
365    return false;
366
367  if (TREE_CODE (tem) != SSA_NAME
368      || bitmap_bit_p (si->va_list_vars,
369		       DECL_UID (SSA_NAME_VAR (tem)))
370      || is_global_var (SSA_NAME_VAR (tem)))
371    return false;
372
373  if (si->compute_sizes < 0)
374    {
375      si->compute_sizes = 0;
376      if (si->va_start_count == 1
377	  && reachable_at_most_once (si->bb, si->va_start_bb))
378	si->compute_sizes = 1;
379
380      if (dump_file && (dump_flags & TDF_DETAILS))
381	fprintf (dump_file,
382		 "bb%d will %sbe executed at most once for each va_start "
383		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
384		 si->va_start_bb->index);
385    }
386
387  /* For void * or char * va_list types, there is just one counter.
388     If va_arg is used in a loop, we don't know how many registers need
389     saving.  */
390  if (! si->compute_sizes)
391    return false;
392
393  if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
394    return false;
395
396  /* Note the temporary, as we need to track whether it doesn't escape
397     the current function.  */
398  bitmap_set_bit (si->va_list_escape_vars,
399		  DECL_UID (SSA_NAME_VAR (tem)));
400  return true;
401}
402
403
404/* Check for:
405     tem1 = AP;
406     TEM2 = tem1 + CST;
407     AP = TEM2;
408   sequence and update cfun->va_list_gpr_size.  Return true if found.  */
409
410static bool
411va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
412{
413  unsigned HOST_WIDE_INT increment;
414
415  if (TREE_CODE (ap) != VAR_DECL
416      || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
417    return false;
418
419  if (TREE_CODE (tem2) != SSA_NAME
420      || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
421    return false;
422
423  if (si->compute_sizes <= 0)
424    return false;
425
426  increment = va_list_counter_bump (si, ap, tem2, true);
427  if (increment + 1 <= 1)
428    return false;
429
430  if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
431    cfun->va_list_gpr_size += increment;
432  else
433    cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
434
435  return true;
436}
437
438
439/* If RHS is X, (some type *) X or X + CST for X a temporary variable
440   containing value of some va_list variable plus optionally some constant,
441   either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
442   depending whether LHS is a function local temporary.  */
443
444static void
445check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
446{
447  if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
448    return;
449
450  if ((TREE_CODE (rhs) == PLUS_EXPR
451       && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
452      || TREE_CODE (rhs) == NOP_EXPR
453      || TREE_CODE (rhs) == CONVERT_EXPR)
454    rhs = TREE_OPERAND (rhs, 0);
455
456  if (TREE_CODE (rhs) != SSA_NAME
457      || ! bitmap_bit_p (si->va_list_escape_vars,
458			 DECL_UID (SSA_NAME_VAR (rhs))))
459    return;
460
461  if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
462    {
463      si->va_list_escapes = true;
464      return;
465    }
466
467  if (si->compute_sizes < 0)
468    {
469      si->compute_sizes = 0;
470      if (si->va_start_count == 1
471	  && reachable_at_most_once (si->bb, si->va_start_bb))
472	si->compute_sizes = 1;
473
474      if (dump_file && (dump_flags & TDF_DETAILS))
475	fprintf (dump_file,
476		 "bb%d will %sbe executed at most once for each va_start "
477		 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
478		 si->va_start_bb->index);
479    }
480
481  /* For void * or char * va_list types, there is just one counter.
482     If va_arg is used in a loop, we don't know how many registers need
483     saving.  */
484  if (! si->compute_sizes)
485    {
486      si->va_list_escapes = true;
487      return;
488    }
489
490  if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
491      == (unsigned HOST_WIDE_INT) -1)
492    {
493      si->va_list_escapes = true;
494      return;
495    }
496
497  bitmap_set_bit (si->va_list_escape_vars,
498		  DECL_UID (SSA_NAME_VAR (lhs)));
499}
500
501
502/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
503   Return true if va_list might be escaping.  */
504
505static bool
506check_all_va_list_escapes (struct stdarg_info *si)
507{
508  basic_block bb;
509
510  FOR_EACH_BB (bb)
511    {
512      block_stmt_iterator i;
513
514      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
515	{
516	  tree stmt = bsi_stmt (i), use;
517	  ssa_op_iter iter;
518
519	  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
520	    {
521	      if (! bitmap_bit_p (si->va_list_escape_vars,
522				  DECL_UID (SSA_NAME_VAR (use))))
523		continue;
524
525	      if (TREE_CODE (stmt) == MODIFY_EXPR)
526		{
527		  tree lhs = TREE_OPERAND (stmt, 0);
528		  tree rhs = TREE_OPERAND (stmt, 1);
529
530		  if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
531		    rhs = TREE_OPERAND (rhs, 0);
532
533		  /* x = *ap_temp;  */
534		  if (TREE_CODE (rhs) == INDIRECT_REF
535		      && TREE_OPERAND (rhs, 0) == use
536		      && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
537		      && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
538		      && si->offsets[SSA_NAME_VERSION (use)] != -1)
539		    {
540		      unsigned HOST_WIDE_INT gpr_size;
541		      tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
542
543		      gpr_size = si->offsets[SSA_NAME_VERSION (use)]
544				 + tree_low_cst (access_size, 1);
545		      if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
546			cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
547		      else if (gpr_size > cfun->va_list_gpr_size)
548			cfun->va_list_gpr_size = gpr_size;
549		      continue;
550		    }
551
552		  /* va_arg sequences may contain
553		     other_ap_temp = ap_temp;
554		     other_ap_temp = ap_temp + constant;
555		     other_ap_temp = (some_type *) ap_temp;
556		     ap = ap_temp;
557		     statements.  */
558		  if ((TREE_CODE (rhs) == PLUS_EXPR
559		       && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
560		      || TREE_CODE (rhs) == NOP_EXPR
561		      || TREE_CODE (rhs) == CONVERT_EXPR)
562		    rhs = TREE_OPERAND (rhs, 0);
563
564		  if (rhs == use)
565		    {
566		      if (TREE_CODE (lhs) == SSA_NAME
567			  && bitmap_bit_p (si->va_list_escape_vars,
568					   DECL_UID (SSA_NAME_VAR (lhs))))
569			continue;
570
571		      if (TREE_CODE (lhs) == VAR_DECL
572			  && bitmap_bit_p (si->va_list_vars,
573					   DECL_UID (lhs)))
574			continue;
575		    }
576		}
577
578	      if (dump_file && (dump_flags & TDF_DETAILS))
579		{
580		  fputs ("va_list escapes in ", dump_file);
581		  print_generic_expr (dump_file, stmt, dump_flags);
582		  fputc ('\n', dump_file);
583		}
584	      return true;
585	    }
586	}
587    }
588
589  return false;
590}
591
592
593/* Return true if this optimization pass should be done.
594   It makes only sense for stdarg functions.  */
595
596static bool
597gate_optimize_stdarg (void)
598{
599  /* This optimization is only for stdarg functions.  */
600  return current_function_stdarg != 0;
601}
602
603
604/* Entry point to the stdarg optimization pass.  */
605
606static unsigned int
607execute_optimize_stdarg (void)
608{
609  basic_block bb;
610  bool va_list_escapes = false;
611  bool va_list_simple_ptr;
612  struct stdarg_info si;
613  const char *funcname = NULL;
614
615  cfun->va_list_gpr_size = 0;
616  cfun->va_list_fpr_size = 0;
617  memset (&si, 0, sizeof (si));
618  si.va_list_vars = BITMAP_ALLOC (NULL);
619  si.va_list_escape_vars = BITMAP_ALLOC (NULL);
620
621  if (dump_file)
622    funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
623
624  va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
625		       && (TREE_TYPE (va_list_type_node) == void_type_node
626			   || TREE_TYPE (va_list_type_node) == char_type_node);
627  gcc_assert (is_gimple_reg_type (va_list_type_node) == va_list_simple_ptr);
628
629  FOR_EACH_BB (bb)
630    {
631      block_stmt_iterator i;
632
633      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
634	{
635	  tree stmt = bsi_stmt (i);
636	  tree call = get_call_expr_in (stmt), callee;
637	  tree ap;
638
639	  if (!call)
640	    continue;
641
642	  callee = get_callee_fndecl (call);
643	  if (!callee
644	      || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
645	    continue;
646
647	  switch (DECL_FUNCTION_CODE (callee))
648	    {
649	    case BUILT_IN_VA_START:
650	      break;
651	      /* If old style builtins are used, don't optimize anything.  */
652	    case BUILT_IN_SAVEREGS:
653	    case BUILT_IN_STDARG_START:
654	    case BUILT_IN_ARGS_INFO:
655	    case BUILT_IN_NEXT_ARG:
656	      va_list_escapes = true;
657	      continue;
658	    default:
659	      continue;
660	    }
661
662	  si.va_start_count++;
663	  ap = TREE_VALUE (TREE_OPERAND (call, 1));
664
665	  if (TREE_CODE (ap) != ADDR_EXPR)
666	    {
667	      va_list_escapes = true;
668	      break;
669	    }
670	  ap = TREE_OPERAND (ap, 0);
671	  if (TREE_CODE (ap) == ARRAY_REF)
672	    {
673	      if (! integer_zerop (TREE_OPERAND (ap, 1)))
674	        {
675	          va_list_escapes = true;
676	          break;
677		}
678	      ap = TREE_OPERAND (ap, 0);
679	    }
680	  if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
681	      != TYPE_MAIN_VARIANT (va_list_type_node)
682	      || TREE_CODE (ap) != VAR_DECL)
683	    {
684	      va_list_escapes = true;
685	      break;
686	    }
687
688	  if (is_global_var (ap))
689	    {
690	      va_list_escapes = true;
691	      break;
692	    }
693
694	  bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
695
696	  /* VA_START_BB and VA_START_AP will be only used if there is just
697	     one va_start in the function.  */
698	  si.va_start_bb = bb;
699	  si.va_start_ap = ap;
700	}
701
702      if (va_list_escapes)
703	break;
704    }
705
706  /* If there were no va_start uses in the function, there is no need to
707     save anything.  */
708  if (si.va_start_count == 0)
709    goto finish;
710
711  /* If some va_list arguments weren't local, we can't optimize.  */
712  if (va_list_escapes)
713    goto finish;
714
715  /* For void * or char * va_list, something useful can be done only
716     if there is just one va_start.  */
717  if (va_list_simple_ptr && si.va_start_count > 1)
718    {
719      va_list_escapes = true;
720      goto finish;
721    }
722
723  /* For struct * va_list, if the backend didn't tell us what the counter fields
724     are, there is nothing more we can do.  */
725  if (!va_list_simple_ptr
726      && va_list_gpr_counter_field == NULL_TREE
727      && va_list_fpr_counter_field == NULL_TREE)
728    {
729      va_list_escapes = true;
730      goto finish;
731    }
732
733  /* For void * or char * va_list there is just one counter
734     (va_list itself).  Use VA_LIST_GPR_SIZE for it.  */
735  if (va_list_simple_ptr)
736    cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
737
738  calculate_dominance_info (CDI_DOMINATORS);
739
740  FOR_EACH_BB (bb)
741    {
742      block_stmt_iterator i;
743
744      si.compute_sizes = -1;
745      si.bb = bb;
746
747      /* For va_list_simple_ptr, we have to check PHI nodes too.  We treat
748	 them as assignments for the purpose of escape analysis.  This is
749	 not needed for non-simple va_list because virtual phis don't perform
750	 any real data movement.  */
751      if (va_list_simple_ptr)
752	{
753	  tree phi, lhs, rhs;
754	  use_operand_p uop;
755	  ssa_op_iter soi;
756
757	  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
758	    {
759	      lhs = PHI_RESULT (phi);
760
761	      if (!is_gimple_reg (lhs))
762		continue;
763
764	      FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
765		{
766		  rhs = USE_FROM_PTR (uop);
767		  if (va_list_ptr_read (&si, rhs, lhs))
768		    continue;
769		  else if (va_list_ptr_write (&si, lhs, rhs))
770		    continue;
771		  else
772		    check_va_list_escapes (&si, lhs, rhs);
773
774		  if (si.va_list_escapes
775		      || walk_tree (&phi, find_va_list_reference,
776				    si.va_list_vars, NULL))
777		    {
778		      if (dump_file && (dump_flags & TDF_DETAILS))
779			{
780			  fputs ("va_list escapes in ", dump_file);
781			  print_generic_expr (dump_file, phi, dump_flags);
782			  fputc ('\n', dump_file);
783			}
784		      va_list_escapes = true;
785		    }
786		}
787	    }
788	}
789
790      for (i = bsi_start (bb);
791	   !bsi_end_p (i) && !va_list_escapes;
792	   bsi_next (&i))
793	{
794	  tree stmt = bsi_stmt (i);
795	  tree call;
796
797	  /* Don't look at __builtin_va_{start,end}, they are ok.  */
798	  call = get_call_expr_in (stmt);
799	  if (call)
800	    {
801	      tree callee = get_callee_fndecl (call);
802
803	      if (callee
804		  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
805		  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
806		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
807		continue;
808	    }
809
810	  if (TREE_CODE (stmt) == MODIFY_EXPR)
811	    {
812	      tree lhs = TREE_OPERAND (stmt, 0);
813	      tree rhs = TREE_OPERAND (stmt, 1);
814
815	      if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
816		rhs = TREE_OPERAND (rhs, 0);
817
818	      if (va_list_simple_ptr)
819		{
820		  /* Check for tem = ap.  */
821		  if (va_list_ptr_read (&si, rhs, lhs))
822		    continue;
823
824		  /* Check for the last insn in:
825		     tem1 = ap;
826		     tem2 = tem1 + CST;
827		     ap = tem2;
828		     sequence.  */
829		  else if (va_list_ptr_write (&si, lhs, rhs))
830		    continue;
831
832		  else
833		    check_va_list_escapes (&si, lhs, rhs);
834		}
835	      else
836		{
837		  /* Check for ap[0].field = temp.  */
838		  if (va_list_counter_struct_op (&si, lhs, rhs, true))
839		    continue;
840
841		  /* Check for temp = ap[0].field.  */
842		  else if (va_list_counter_struct_op (&si, rhs, lhs, false))
843		    continue;
844
845		  /* Do any architecture specific checking.  */
846		  else if (targetm.stdarg_optimize_hook
847			   && targetm.stdarg_optimize_hook (&si, lhs, rhs))
848		    continue;
849		}
850	    }
851
852	  /* All other uses of va_list are either va_copy (that is not handled
853	     in this optimization), taking address of va_list variable or
854	     passing va_list to other functions (in that case va_list might
855	     escape the function and therefore va_start needs to set it up
856	     fully), or some unexpected use of va_list.  None of these should
857	     happen in a gimplified VA_ARG_EXPR.  */
858	  if (si.va_list_escapes
859	      || walk_tree (&stmt, find_va_list_reference,
860			    si.va_list_vars, NULL))
861	    {
862	      if (dump_file && (dump_flags & TDF_DETAILS))
863		{
864		  fputs ("va_list escapes in ", dump_file);
865		  print_generic_expr (dump_file, stmt, dump_flags);
866		  fputc ('\n', dump_file);
867		}
868	      va_list_escapes = true;
869	    }
870	}
871
872      if (va_list_escapes)
873	break;
874    }
875
876  if (! va_list_escapes
877      && va_list_simple_ptr
878      && ! bitmap_empty_p (si.va_list_escape_vars)
879      && check_all_va_list_escapes (&si))
880    va_list_escapes = true;
881
882finish:
883  if (va_list_escapes)
884    {
885      cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
886      cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
887    }
888  BITMAP_FREE (si.va_list_vars);
889  BITMAP_FREE (si.va_list_escape_vars);
890  free (si.offsets);
891  if (dump_file)
892    {
893      fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
894	       funcname, (int) va_list_escapes);
895      if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
896	fputs ("all", dump_file);
897      else
898	fprintf (dump_file, "%d", cfun->va_list_gpr_size);
899      fputs (" GPR units and ", dump_file);
900      if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
901	fputs ("all", dump_file);
902      else
903	fprintf (dump_file, "%d", cfun->va_list_fpr_size);
904      fputs (" FPR units.\n", dump_file);
905    }
906  return 0;
907}
908
909
910struct tree_opt_pass pass_stdarg =
911{
912  "stdarg",				/* name */
913  gate_optimize_stdarg,			/* gate */
914  execute_optimize_stdarg,		/* execute */
915  NULL,					/* sub */
916  NULL,					/* next */
917  0,					/* static_pass_number */
918  0,					/* tv_id */
919  PROP_cfg | PROP_ssa | PROP_alias,	/* properties_required */
920  0,					/* properties_provided */
921  0,					/* properties_destroyed */
922  0,					/* todo_flags_start */
923  TODO_dump_func,			/* todo_flags_finish */
924  0					/* letter */
925};
926