1/* Expands front end tree to back end RTL for GCC.
2   Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20/* This file handles the generation of rtl code from tree structure
21   at the level of the function as a whole.
22   It creates the rtl expressions for parameters and auto variables
23   and has full responsibility for allocating stack slots.
24
25   `expand_function_start' is called at the beginning of a function,
26   before the function body is parsed, and `expand_function_end' is
27   called after parsing the body.
28
29   Call `assign_stack_local' to allocate a stack slot for a local variable.
30   This is usually done during the RTL generation for the function body,
31   but it can also be done in the reload pass when a pseudo-register does
32   not get a hard register.  */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
37#include "tm.h"
38#include "rtl-error.h"
39#include "hash-set.h"
40#include "machmode.h"
41#include "vec.h"
42#include "double-int.h"
43#include "input.h"
44#include "alias.h"
45#include "symtab.h"
46#include "wide-int.h"
47#include "inchash.h"
48#include "tree.h"
49#include "fold-const.h"
50#include "stor-layout.h"
51#include "varasm.h"
52#include "stringpool.h"
53#include "flags.h"
54#include "except.h"
55#include "hashtab.h"
56#include "hard-reg-set.h"
57#include "function.h"
58#include "rtl.h"
59#include "statistics.h"
60#include "real.h"
61#include "fixed-value.h"
62#include "insn-config.h"
63#include "expmed.h"
64#include "dojump.h"
65#include "explow.h"
66#include "calls.h"
67#include "emit-rtl.h"
68#include "stmt.h"
69#include "expr.h"
70#include "insn-codes.h"
71#include "optabs.h"
72#include "libfuncs.h"
73#include "regs.h"
74#include "recog.h"
75#include "output.h"
76#include "tm_p.h"
77#include "langhooks.h"
78#include "target.h"
79#include "common/common-target.h"
80#include "gimple-expr.h"
81#include "gimplify.h"
82#include "tree-pass.h"
83#include "predict.h"
84#include "dominance.h"
85#include "cfg.h"
86#include "cfgrtl.h"
87#include "cfganal.h"
88#include "cfgbuild.h"
89#include "cfgcleanup.h"
90#include "basic-block.h"
91#include "df.h"
92#include "params.h"
93#include "bb-reorder.h"
94#include "shrink-wrap.h"
95#include "toplev.h"
96#include "rtl-iter.h"
97#include "tree-chkp.h"
98#include "rtl-chkp.h"
99
100/* So we can assign to cfun in this file.  */
101#undef cfun
102
103#ifndef STACK_ALIGNMENT_NEEDED
104#define STACK_ALIGNMENT_NEEDED 1
105#endif
106
107#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
108
109/* Round a value to the lowest integer less than it that is a multiple of
110   the required alignment.  Avoid using division in case the value is
111   negative.  Assume the alignment is a power of two.  */
112#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
113
114/* Similar, but round to the next highest integer that meets the
115   alignment.  */
116#define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
117
118/* Nonzero once virtual register instantiation has been done.
119   assign_stack_local uses frame_pointer_rtx when this is nonzero.
120   calls.c:emit_library_call_value_1 uses it to set up
121   post-instantiation libcalls.  */
122int virtuals_instantiated;
123
124/* Assign unique numbers to labels generated for profiling, debugging, etc.  */
125static GTY(()) int funcdef_no;
126
127/* These variables hold pointers to functions to create and destroy
128   target specific, per-function data structures.  */
129struct machine_function * (*init_machine_status) (void);
130
131/* The currently compiled function.  */
132struct function *cfun = 0;
133
134/* These hashes record the prologue and epilogue insns.  */
135
136struct insn_cache_hasher : ggc_cache_hasher<rtx>
137{
138  static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
139  static bool equal (rtx a, rtx b) { return a == b; }
140};
141
142static GTY((cache))
143  hash_table<insn_cache_hasher> *prologue_insn_hash;
144static GTY((cache))
145  hash_table<insn_cache_hasher> *epilogue_insn_hash;
146
147
148hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
149vec<tree, va_gc> *types_used_by_cur_var_decl;
150
151/* Forward declarations.  */
152
153static struct temp_slot *find_temp_slot_from_address (rtx);
154static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
155static void pad_below (struct args_size *, machine_mode, tree);
156static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
157static int all_blocks (tree, tree *);
158static tree *get_block_vector (tree, int *);
159extern tree debug_find_var_in_block_tree (tree, tree);
160/* We always define `record_insns' even if it's not used so that we
161   can always export `prologue_epilogue_contains'.  */
162static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
163     ATTRIBUTE_UNUSED;
164static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
165static void prepare_function_start (void);
166static void do_clobber_return_reg (rtx, void *);
167static void do_use_return_reg (rtx, void *);
168
169/* Stack of nested functions.  */
170/* Keep track of the cfun stack.  */
171
172typedef struct function *function_p;
173
174static vec<function_p> function_context_stack;
175
176/* Save the current context for compilation of a nested function.
177   This is called from language-specific code.  */
178
179void
180push_function_context (void)
181{
182  if (cfun == 0)
183    allocate_struct_function (NULL, false);
184
185  function_context_stack.safe_push (cfun);
186  set_cfun (NULL);
187}
188
189/* Restore the last saved context, at the end of a nested function.
190   This function is called from language-specific code.  */
191
192void
193pop_function_context (void)
194{
195  struct function *p = function_context_stack.pop ();
196  set_cfun (p);
197  current_function_decl = p->decl;
198
199  /* Reset variables that have known state during rtx generation.  */
200  virtuals_instantiated = 0;
201  generating_concat_p = 1;
202}
203
204/* Clear out all parts of the state in F that can safely be discarded
205   after the function has been parsed, but not compiled, to let
206   garbage collection reclaim the memory.  */
207
208void
209free_after_parsing (struct function *f)
210{
211  f->language = 0;
212}
213
214/* Clear out all parts of the state in F that can safely be discarded
215   after the function has been compiled, to let garbage collection
216   reclaim the memory.  */
217
218void
219free_after_compilation (struct function *f)
220{
221  prologue_insn_hash = NULL;
222  epilogue_insn_hash = NULL;
223
224  free (crtl->emit.regno_pointer_align);
225
226  memset (crtl, 0, sizeof (struct rtl_data));
227  f->eh = NULL;
228  f->machine = NULL;
229  f->cfg = NULL;
230
231  regno_reg_rtx = NULL;
232}
233
234/* Return size needed for stack frame based on slots so far allocated.
235   This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
236   the caller may have to do that.  */
237
238HOST_WIDE_INT
239get_frame_size (void)
240{
241  if (FRAME_GROWS_DOWNWARD)
242    return -frame_offset;
243  else
244    return frame_offset;
245}
246
247/* Issue an error message and return TRUE if frame OFFSET overflows in
248   the signed target pointer arithmetics for function FUNC.  Otherwise
249   return FALSE.  */
250
251bool
252frame_offset_overflow (HOST_WIDE_INT offset, tree func)
253{
254  unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
255
256  if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
257	       /* Leave room for the fixed part of the frame.  */
258	       - 64 * UNITS_PER_WORD)
259    {
260      error_at (DECL_SOURCE_LOCATION (func),
261		"total size of local objects too large");
262      return TRUE;
263    }
264
265  return FALSE;
266}
267
268/* Return stack slot alignment in bits for TYPE and MODE.  */
269
270static unsigned int
271get_stack_local_alignment (tree type, machine_mode mode)
272{
273  unsigned int alignment;
274
275  if (mode == BLKmode)
276    alignment = BIGGEST_ALIGNMENT;
277  else
278    alignment = GET_MODE_ALIGNMENT (mode);
279
280  /* Allow the frond-end to (possibly) increase the alignment of this
281     stack slot.  */
282  if (! type)
283    type = lang_hooks.types.type_for_mode (mode, 0);
284
285  return STACK_SLOT_ALIGNMENT (type, mode, alignment);
286}
287
288/* Determine whether it is possible to fit a stack slot of size SIZE and
289   alignment ALIGNMENT into an area in the stack frame that starts at
290   frame offset START and has a length of LENGTH.  If so, store the frame
291   offset to be used for the stack slot in *POFFSET and return true;
292   return false otherwise.  This function will extend the frame size when
293   given a start/length pair that lies at the end of the frame.  */
294
295static bool
296try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
297		     HOST_WIDE_INT size, unsigned int alignment,
298		     HOST_WIDE_INT *poffset)
299{
300  HOST_WIDE_INT this_frame_offset;
301  int frame_off, frame_alignment, frame_phase;
302
303  /* Calculate how many bytes the start of local variables is off from
304     stack alignment.  */
305  frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
306  frame_off = STARTING_FRAME_OFFSET % frame_alignment;
307  frame_phase = frame_off ? frame_alignment - frame_off : 0;
308
309  /* Round the frame offset to the specified alignment.  */
310
311  /*  We must be careful here, since FRAME_OFFSET might be negative and
312      division with a negative dividend isn't as well defined as we might
313      like.  So we instead assume that ALIGNMENT is a power of two and
314      use logical operations which are unambiguous.  */
315  if (FRAME_GROWS_DOWNWARD)
316    this_frame_offset
317      = (FLOOR_ROUND (start + length - size - frame_phase,
318		      (unsigned HOST_WIDE_INT) alignment)
319	 + frame_phase);
320  else
321    this_frame_offset
322      = (CEIL_ROUND (start - frame_phase,
323		     (unsigned HOST_WIDE_INT) alignment)
324	 + frame_phase);
325
326  /* See if it fits.  If this space is at the edge of the frame,
327     consider extending the frame to make it fit.  Our caller relies on
328     this when allocating a new slot.  */
329  if (frame_offset == start && this_frame_offset < frame_offset)
330    frame_offset = this_frame_offset;
331  else if (this_frame_offset < start)
332    return false;
333  else if (start + length == frame_offset
334	   && this_frame_offset + size > start + length)
335    frame_offset = this_frame_offset + size;
336  else if (this_frame_offset + size > start + length)
337    return false;
338
339  *poffset = this_frame_offset;
340  return true;
341}
342
343/* Create a new frame_space structure describing free space in the stack
344   frame beginning at START and ending at END, and chain it into the
345   function's frame_space_list.  */
346
347static void
348add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
349{
350  struct frame_space *space = ggc_alloc<frame_space> ();
351  space->next = crtl->frame_space_list;
352  crtl->frame_space_list = space;
353  space->start = start;
354  space->length = end - start;
355}
356
357/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
358   with machine mode MODE.
359
360   ALIGN controls the amount of alignment for the address of the slot:
361   0 means according to MODE,
362   -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
363   -2 means use BITS_PER_UNIT,
364   positive specifies alignment boundary in bits.
365
366   KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
367   alignment and ASLK_RECORD_PAD bit set if we should remember
368   extra space we allocated for alignment purposes.  When we are
369   called from assign_stack_temp_for_type, it is not set so we don't
370   track the same stack slot in two independent lists.
371
372   We do not round to stack_boundary here.  */
373
374rtx
375assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
376		      int align, int kind)
377{
378  rtx x, addr;
379  int bigend_correction = 0;
380  HOST_WIDE_INT slot_offset = 0, old_frame_offset;
381  unsigned int alignment, alignment_in_bits;
382
383  if (align == 0)
384    {
385      alignment = get_stack_local_alignment (NULL, mode);
386      alignment /= BITS_PER_UNIT;
387    }
388  else if (align == -1)
389    {
390      alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
391      size = CEIL_ROUND (size, alignment);
392    }
393  else if (align == -2)
394    alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
395  else
396    alignment = align / BITS_PER_UNIT;
397
398  alignment_in_bits = alignment * BITS_PER_UNIT;
399
400  /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.  */
401  if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
402    {
403      alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
404      alignment = alignment_in_bits / BITS_PER_UNIT;
405    }
406
407  if (SUPPORTS_STACK_ALIGNMENT)
408    {
409      if (crtl->stack_alignment_estimated < alignment_in_bits)
410	{
411          if (!crtl->stack_realign_processed)
412	    crtl->stack_alignment_estimated = alignment_in_bits;
413          else
414	    {
415	      /* If stack is realigned and stack alignment value
416		 hasn't been finalized, it is OK not to increase
417		 stack_alignment_estimated.  The bigger alignment
418		 requirement is recorded in stack_alignment_needed
419		 below.  */
420	      gcc_assert (!crtl->stack_realign_finalized);
421	      if (!crtl->stack_realign_needed)
422		{
423		  /* It is OK to reduce the alignment as long as the
424		     requested size is 0 or the estimated stack
425		     alignment >= mode alignment.  */
426		  gcc_assert ((kind & ASLK_REDUCE_ALIGN)
427		              || size == 0
428			      || (crtl->stack_alignment_estimated
429				  >= GET_MODE_ALIGNMENT (mode)));
430		  alignment_in_bits = crtl->stack_alignment_estimated;
431		  alignment = alignment_in_bits / BITS_PER_UNIT;
432		}
433	    }
434	}
435    }
436
437  if (crtl->stack_alignment_needed < alignment_in_bits)
438    crtl->stack_alignment_needed = alignment_in_bits;
439  if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
440    crtl->max_used_stack_slot_alignment = alignment_in_bits;
441
442  if (mode != BLKmode || size != 0)
443    {
444      if (kind & ASLK_RECORD_PAD)
445	{
446	  struct frame_space **psp;
447
448	  for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
449	    {
450	      struct frame_space *space = *psp;
451	      if (!try_fit_stack_local (space->start, space->length, size,
452					alignment, &slot_offset))
453		continue;
454	      *psp = space->next;
455	      if (slot_offset > space->start)
456		add_frame_space (space->start, slot_offset);
457	      if (slot_offset + size < space->start + space->length)
458		add_frame_space (slot_offset + size,
459				 space->start + space->length);
460	      goto found_space;
461	    }
462	}
463    }
464  else if (!STACK_ALIGNMENT_NEEDED)
465    {
466      slot_offset = frame_offset;
467      goto found_space;
468    }
469
470  old_frame_offset = frame_offset;
471
472  if (FRAME_GROWS_DOWNWARD)
473    {
474      frame_offset -= size;
475      try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
476
477      if (kind & ASLK_RECORD_PAD)
478	{
479	  if (slot_offset > frame_offset)
480	    add_frame_space (frame_offset, slot_offset);
481	  if (slot_offset + size < old_frame_offset)
482	    add_frame_space (slot_offset + size, old_frame_offset);
483	}
484    }
485  else
486    {
487      frame_offset += size;
488      try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
489
490      if (kind & ASLK_RECORD_PAD)
491	{
492	  if (slot_offset > old_frame_offset)
493	    add_frame_space (old_frame_offset, slot_offset);
494	  if (slot_offset + size < frame_offset)
495	    add_frame_space (slot_offset + size, frame_offset);
496	}
497    }
498
499 found_space:
500  /* On a big-endian machine, if we are allocating more space than we will use,
501     use the least significant bytes of those that are allocated.  */
502  if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
503    bigend_correction = size - GET_MODE_SIZE (mode);
504
505  /* If we have already instantiated virtual registers, return the actual
506     address relative to the frame pointer.  */
507  if (virtuals_instantiated)
508    addr = plus_constant (Pmode, frame_pointer_rtx,
509			  trunc_int_for_mode
510			  (slot_offset + bigend_correction
511			   + STARTING_FRAME_OFFSET, Pmode));
512  else
513    addr = plus_constant (Pmode, virtual_stack_vars_rtx,
514			  trunc_int_for_mode
515			  (slot_offset + bigend_correction,
516			   Pmode));
517
518  x = gen_rtx_MEM (mode, addr);
519  set_mem_align (x, alignment_in_bits);
520  MEM_NOTRAP_P (x) = 1;
521
522  stack_slot_list
523    = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
524
525  if (frame_offset_overflow (frame_offset, current_function_decl))
526    frame_offset = 0;
527
528  return x;
529}
530
531/* Wrap up assign_stack_local_1 with last parameter as false.  */
532
533rtx
534assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
535{
536  return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
537}
538
539/* In order to evaluate some expressions, such as function calls returning
540   structures in memory, we need to temporarily allocate stack locations.
541   We record each allocated temporary in the following structure.
542
543   Associated with each temporary slot is a nesting level.  When we pop up
544   one level, all temporaries associated with the previous level are freed.
545   Normally, all temporaries are freed after the execution of the statement
546   in which they were created.  However, if we are inside a ({...}) grouping,
547   the result may be in a temporary and hence must be preserved.  If the
548   result could be in a temporary, we preserve it if we can determine which
549   one it is in.  If we cannot determine which temporary may contain the
550   result, all temporaries are preserved.  A temporary is preserved by
551   pretending it was allocated at the previous nesting level.  */
552
553struct GTY(()) temp_slot {
554  /* Points to next temporary slot.  */
555  struct temp_slot *next;
556  /* Points to previous temporary slot.  */
557  struct temp_slot *prev;
558  /* The rtx to used to reference the slot.  */
559  rtx slot;
560  /* The size, in units, of the slot.  */
561  HOST_WIDE_INT size;
562  /* The type of the object in the slot, or zero if it doesn't correspond
563     to a type.  We use this to determine whether a slot can be reused.
564     It can be reused if objects of the type of the new slot will always
565     conflict with objects of the type of the old slot.  */
566  tree type;
567  /* The alignment (in bits) of the slot.  */
568  unsigned int align;
569  /* Nonzero if this temporary is currently in use.  */
570  char in_use;
571  /* Nesting level at which this slot is being used.  */
572  int level;
573  /* The offset of the slot from the frame_pointer, including extra space
574     for alignment.  This info is for combine_temp_slots.  */
575  HOST_WIDE_INT base_offset;
576  /* The size of the slot, including extra space for alignment.  This
577     info is for combine_temp_slots.  */
578  HOST_WIDE_INT full_size;
579};
580
581/* Entry for the below hash table.  */
582struct GTY((for_user)) temp_slot_address_entry {
583  hashval_t hash;
584  rtx address;
585  struct temp_slot *temp_slot;
586};
587
588struct temp_address_hasher : ggc_hasher<temp_slot_address_entry *>
589{
590  static hashval_t hash (temp_slot_address_entry *);
591  static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
592};
593
594/* A table of addresses that represent a stack slot.  The table is a mapping
595   from address RTXen to a temp slot.  */
596static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
597static size_t n_temp_slots_in_use;
598
599/* Removes temporary slot TEMP from LIST.  */
600
601static void
602cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
603{
604  if (temp->next)
605    temp->next->prev = temp->prev;
606  if (temp->prev)
607    temp->prev->next = temp->next;
608  else
609    *list = temp->next;
610
611  temp->prev = temp->next = NULL;
612}
613
614/* Inserts temporary slot TEMP to LIST.  */
615
616static void
617insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
618{
619  temp->next = *list;
620  if (*list)
621    (*list)->prev = temp;
622  temp->prev = NULL;
623  *list = temp;
624}
625
626/* Returns the list of used temp slots at LEVEL.  */
627
628static struct temp_slot **
629temp_slots_at_level (int level)
630{
631  if (level >= (int) vec_safe_length (used_temp_slots))
632    vec_safe_grow_cleared (used_temp_slots, level + 1);
633
634  return &(*used_temp_slots)[level];
635}
636
637/* Returns the maximal temporary slot level.  */
638
639static int
640max_slot_level (void)
641{
642  if (!used_temp_slots)
643    return -1;
644
645  return used_temp_slots->length () - 1;
646}
647
648/* Moves temporary slot TEMP to LEVEL.  */
649
650static void
651move_slot_to_level (struct temp_slot *temp, int level)
652{
653  cut_slot_from_list (temp, temp_slots_at_level (temp->level));
654  insert_slot_to_list (temp, temp_slots_at_level (level));
655  temp->level = level;
656}
657
658/* Make temporary slot TEMP available.  */
659
660static void
661make_slot_available (struct temp_slot *temp)
662{
663  cut_slot_from_list (temp, temp_slots_at_level (temp->level));
664  insert_slot_to_list (temp, &avail_temp_slots);
665  temp->in_use = 0;
666  temp->level = -1;
667  n_temp_slots_in_use--;
668}
669
670/* Compute the hash value for an address -> temp slot mapping.
671   The value is cached on the mapping entry.  */
672static hashval_t
673temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
674{
675  int do_not_record = 0;
676  return hash_rtx (t->address, GET_MODE (t->address),
677		   &do_not_record, NULL, false);
678}
679
680/* Return the hash value for an address -> temp slot mapping.  */
681hashval_t
682temp_address_hasher::hash (temp_slot_address_entry *t)
683{
684  return t->hash;
685}
686
687/* Compare two address -> temp slot mapping entries.  */
688bool
689temp_address_hasher::equal (temp_slot_address_entry *t1,
690			    temp_slot_address_entry *t2)
691{
692  return exp_equiv_p (t1->address, t2->address, 0, true);
693}
694
695/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping.  */
696static void
697insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
698{
699  struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
700  t->address = address;
701  t->temp_slot = temp_slot;
702  t->hash = temp_slot_address_compute_hash (t);
703  *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
704}
705
706/* Remove an address -> temp slot mapping entry if the temp slot is
707   not in use anymore.  Callback for remove_unused_temp_slot_addresses.  */
708int
709remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
710{
711  const struct temp_slot_address_entry *t = *slot;
712  if (! t->temp_slot->in_use)
713    temp_slot_address_table->clear_slot (slot);
714  return 1;
715}
716
717/* Remove all mappings of addresses to unused temp slots.  */
718static void
719remove_unused_temp_slot_addresses (void)
720{
721  /* Use quicker clearing if there aren't any active temp slots.  */
722  if (n_temp_slots_in_use)
723    temp_slot_address_table->traverse
724      <void *, remove_unused_temp_slot_addresses_1> (NULL);
725  else
726    temp_slot_address_table->empty ();
727}
728
729/* Find the temp slot corresponding to the object at address X.  */
730
731static struct temp_slot *
732find_temp_slot_from_address (rtx x)
733{
734  struct temp_slot *p;
735  struct temp_slot_address_entry tmp, *t;
736
737  /* First try the easy way:
738     See if X exists in the address -> temp slot mapping.  */
739  tmp.address = x;
740  tmp.temp_slot = NULL;
741  tmp.hash = temp_slot_address_compute_hash (&tmp);
742  t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
743  if (t)
744    return t->temp_slot;
745
746  /* If we have a sum involving a register, see if it points to a temp
747     slot.  */
748  if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
749      && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
750    return p;
751  else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
752	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
753    return p;
754
755  /* Last resort: Address is a virtual stack var address.  */
756  if (GET_CODE (x) == PLUS
757      && XEXP (x, 0) == virtual_stack_vars_rtx
758      && CONST_INT_P (XEXP (x, 1)))
759    {
760      int i;
761      for (i = max_slot_level (); i >= 0; i--)
762	for (p = *temp_slots_at_level (i); p; p = p->next)
763	  {
764	    if (INTVAL (XEXP (x, 1)) >= p->base_offset
765		&& INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
766	      return p;
767	  }
768    }
769
770  return NULL;
771}
772
773/* Allocate a temporary stack slot and record it for possible later
774   reuse.
775
776   MODE is the machine mode to be given to the returned rtx.
777
778   SIZE is the size in units of the space required.  We do no rounding here
779   since assign_stack_local will do any required rounding.
780
781   TYPE is the type that will be used for the stack slot.  */
782
783rtx
784assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
785			    tree type)
786{
787  unsigned int align;
788  struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
789  rtx slot;
790
791  /* If SIZE is -1 it means that somebody tried to allocate a temporary
792     of a variable size.  */
793  gcc_assert (size != -1);
794
795  align = get_stack_local_alignment (type, mode);
796
797  /* Try to find an available, already-allocated temporary of the proper
798     mode which meets the size and alignment requirements.  Choose the
799     smallest one with the closest alignment.
800
801     If assign_stack_temp is called outside of the tree->rtl expansion,
802     we cannot reuse the stack slots (that may still refer to
803     VIRTUAL_STACK_VARS_REGNUM).  */
804  if (!virtuals_instantiated)
805    {
806      for (p = avail_temp_slots; p; p = p->next)
807	{
808	  if (p->align >= align && p->size >= size
809	      && GET_MODE (p->slot) == mode
810	      && objects_must_conflict_p (p->type, type)
811	      && (best_p == 0 || best_p->size > p->size
812		  || (best_p->size == p->size && best_p->align > p->align)))
813	    {
814	      if (p->align == align && p->size == size)
815		{
816		  selected = p;
817		  cut_slot_from_list (selected, &avail_temp_slots);
818		  best_p = 0;
819		  break;
820		}
821	      best_p = p;
822	    }
823	}
824    }
825
826  /* Make our best, if any, the one to use.  */
827  if (best_p)
828    {
829      selected = best_p;
830      cut_slot_from_list (selected, &avail_temp_slots);
831
832      /* If there are enough aligned bytes left over, make them into a new
833	 temp_slot so that the extra bytes don't get wasted.  Do this only
834	 for BLKmode slots, so that we can be sure of the alignment.  */
835      if (GET_MODE (best_p->slot) == BLKmode)
836	{
837	  int alignment = best_p->align / BITS_PER_UNIT;
838	  HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
839
840	  if (best_p->size - rounded_size >= alignment)
841	    {
842	      p = ggc_alloc<temp_slot> ();
843	      p->in_use = 0;
844	      p->size = best_p->size - rounded_size;
845	      p->base_offset = best_p->base_offset + rounded_size;
846	      p->full_size = best_p->full_size - rounded_size;
847	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
848	      p->align = best_p->align;
849	      p->type = best_p->type;
850	      insert_slot_to_list (p, &avail_temp_slots);
851
852	      stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
853						   stack_slot_list);
854
855	      best_p->size = rounded_size;
856	      best_p->full_size = rounded_size;
857	    }
858	}
859    }
860
861  /* If we still didn't find one, make a new temporary.  */
862  if (selected == 0)
863    {
864      HOST_WIDE_INT frame_offset_old = frame_offset;
865
866      p = ggc_alloc<temp_slot> ();
867
868      /* We are passing an explicit alignment request to assign_stack_local.
869	 One side effect of that is assign_stack_local will not round SIZE
870	 to ensure the frame offset remains suitably aligned.
871
872	 So for requests which depended on the rounding of SIZE, we go ahead
873	 and round it now.  We also make sure ALIGNMENT is at least
874	 BIGGEST_ALIGNMENT.  */
875      gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
876      p->slot = assign_stack_local_1 (mode,
877				      (mode == BLKmode
878				       ? CEIL_ROUND (size,
879						     (int) align
880						     / BITS_PER_UNIT)
881				       : size),
882				      align, 0);
883
884      p->align = align;
885
886      /* The following slot size computation is necessary because we don't
887	 know the actual size of the temporary slot until assign_stack_local
888	 has performed all the frame alignment and size rounding for the
889	 requested temporary.  Note that extra space added for alignment
890	 can be either above or below this stack slot depending on which
891	 way the frame grows.  We include the extra space if and only if it
892	 is above this slot.  */
893      if (FRAME_GROWS_DOWNWARD)
894	p->size = frame_offset_old - frame_offset;
895      else
896	p->size = size;
897
898      /* Now define the fields used by combine_temp_slots.  */
899      if (FRAME_GROWS_DOWNWARD)
900	{
901	  p->base_offset = frame_offset;
902	  p->full_size = frame_offset_old - frame_offset;
903	}
904      else
905	{
906	  p->base_offset = frame_offset_old;
907	  p->full_size = frame_offset - frame_offset_old;
908	}
909
910      selected = p;
911    }
912
913  p = selected;
914  p->in_use = 1;
915  p->type = type;
916  p->level = temp_slot_level;
917  n_temp_slots_in_use++;
918
919  pp = temp_slots_at_level (p->level);
920  insert_slot_to_list (p, pp);
921  insert_temp_slot_address (XEXP (p->slot, 0), p);
922
923  /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
924  slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
925  stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
926
927  /* If we know the alias set for the memory that will be used, use
928     it.  If there's no TYPE, then we don't know anything about the
929     alias set for the memory.  */
930  set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
931  set_mem_align (slot, align);
932
933  /* If a type is specified, set the relevant flags.  */
934  if (type != 0)
935    MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
936  MEM_NOTRAP_P (slot) = 1;
937
938  return slot;
939}
940
941/* Allocate a temporary stack slot and record it for possible later
942   reuse.  First two arguments are same as in preceding function.  */
943
944rtx
945assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
946{
947  return assign_stack_temp_for_type (mode, size, NULL_TREE);
948}
949
950/* Assign a temporary.
951   If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
952   and so that should be used in error messages.  In either case, we
953   allocate of the given type.
954   MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
955   it is 0 if a register is OK.
956   DONT_PROMOTE is 1 if we should not promote values in register
957   to wider modes.  */
958
959rtx
960assign_temp (tree type_or_decl, int memory_required,
961	     int dont_promote ATTRIBUTE_UNUSED)
962{
963  tree type, decl;
964  machine_mode mode;
965#ifdef PROMOTE_MODE
966  int unsignedp;
967#endif
968
969  if (DECL_P (type_or_decl))
970    decl = type_or_decl, type = TREE_TYPE (decl);
971  else
972    decl = NULL, type = type_or_decl;
973
974  mode = TYPE_MODE (type);
975#ifdef PROMOTE_MODE
976  unsignedp = TYPE_UNSIGNED (type);
977#endif
978
979  if (mode == BLKmode || memory_required)
980    {
981      HOST_WIDE_INT size = int_size_in_bytes (type);
982      rtx tmp;
983
984      /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
985	 problems with allocating the stack space.  */
986      if (size == 0)
987	size = 1;
988
989      /* Unfortunately, we don't yet know how to allocate variable-sized
990	 temporaries.  However, sometimes we can find a fixed upper limit on
991	 the size, so try that instead.  */
992      else if (size == -1)
993	size = max_int_size_in_bytes (type);
994
995      /* The size of the temporary may be too large to fit into an integer.  */
996      /* ??? Not sure this should happen except for user silliness, so limit
997	 this to things that aren't compiler-generated temporaries.  The
998	 rest of the time we'll die in assign_stack_temp_for_type.  */
999      if (decl && size == -1
1000	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1001	{
1002	  error ("size of variable %q+D is too large", decl);
1003	  size = 1;
1004	}
1005
1006      tmp = assign_stack_temp_for_type (mode, size, type);
1007      return tmp;
1008    }
1009
1010#ifdef PROMOTE_MODE
1011  if (! dont_promote)
1012    mode = promote_mode (type, mode, &unsignedp);
1013#endif
1014
1015  return gen_reg_rtx (mode);
1016}
1017
1018/* Combine temporary stack slots which are adjacent on the stack.
1019
1020   This allows for better use of already allocated stack space.  This is only
1021   done for BLKmode slots because we can be sure that we won't have alignment
1022   problems in this case.  */
1023
1024static void
1025combine_temp_slots (void)
1026{
1027  struct temp_slot *p, *q, *next, *next_q;
1028  int num_slots;
1029
1030  /* We can't combine slots, because the information about which slot
1031     is in which alias set will be lost.  */
1032  if (flag_strict_aliasing)
1033    return;
1034
1035  /* If there are a lot of temp slots, don't do anything unless
1036     high levels of optimization.  */
1037  if (! flag_expensive_optimizations)
1038    for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1039      if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1040	return;
1041
1042  for (p = avail_temp_slots; p; p = next)
1043    {
1044      int delete_p = 0;
1045
1046      next = p->next;
1047
1048      if (GET_MODE (p->slot) != BLKmode)
1049	continue;
1050
1051      for (q = p->next; q; q = next_q)
1052	{
1053       	  int delete_q = 0;
1054
1055	  next_q = q->next;
1056
1057	  if (GET_MODE (q->slot) != BLKmode)
1058	    continue;
1059
1060	  if (p->base_offset + p->full_size == q->base_offset)
1061	    {
1062	      /* Q comes after P; combine Q into P.  */
1063	      p->size += q->size;
1064	      p->full_size += q->full_size;
1065	      delete_q = 1;
1066	    }
1067	  else if (q->base_offset + q->full_size == p->base_offset)
1068	    {
1069	      /* P comes after Q; combine P into Q.  */
1070	      q->size += p->size;
1071	      q->full_size += p->full_size;
1072	      delete_p = 1;
1073	      break;
1074	    }
1075	  if (delete_q)
1076	    cut_slot_from_list (q, &avail_temp_slots);
1077	}
1078
1079      /* Either delete P or advance past it.  */
1080      if (delete_p)
1081	cut_slot_from_list (p, &avail_temp_slots);
1082    }
1083}
1084
1085/* Indicate that NEW_RTX is an alternate way of referring to the temp
1086   slot that previously was known by OLD_RTX.  */
1087
1088void
1089update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1090{
1091  struct temp_slot *p;
1092
1093  if (rtx_equal_p (old_rtx, new_rtx))
1094    return;
1095
1096  p = find_temp_slot_from_address (old_rtx);
1097
1098  /* If we didn't find one, see if both OLD_RTX is a PLUS.  If so, and
1099     NEW_RTX is a register, see if one operand of the PLUS is a
1100     temporary location.  If so, NEW_RTX points into it.  Otherwise,
1101     if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1102     in common between them.  If so, try a recursive call on those
1103     values.  */
1104  if (p == 0)
1105    {
1106      if (GET_CODE (old_rtx) != PLUS)
1107	return;
1108
1109      if (REG_P (new_rtx))
1110	{
1111	  update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1112	  update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1113	  return;
1114	}
1115      else if (GET_CODE (new_rtx) != PLUS)
1116	return;
1117
1118      if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1119	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1120      else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1121	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1122      else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1123	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1124      else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1125	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1126
1127      return;
1128    }
1129
1130  /* Otherwise add an alias for the temp's address.  */
1131  insert_temp_slot_address (new_rtx, p);
1132}
1133
1134/* If X could be a reference to a temporary slot, mark that slot as
1135   belonging to the to one level higher than the current level.  If X
1136   matched one of our slots, just mark that one.  Otherwise, we can't
1137   easily predict which it is, so upgrade all of them.
1138
1139   This is called when an ({...}) construct occurs and a statement
1140   returns a value in memory.  */
1141
1142void
1143preserve_temp_slots (rtx x)
1144{
1145  struct temp_slot *p = 0, *next;
1146
1147  if (x == 0)
1148    return;
1149
1150  /* If X is a register that is being used as a pointer, see if we have
1151     a temporary slot we know it points to.  */
1152  if (REG_P (x) && REG_POINTER (x))
1153    p = find_temp_slot_from_address (x);
1154
1155  /* If X is not in memory or is at a constant address, it cannot be in
1156     a temporary slot.  */
1157  if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1158    return;
1159
1160  /* First see if we can find a match.  */
1161  if (p == 0)
1162    p = find_temp_slot_from_address (XEXP (x, 0));
1163
1164  if (p != 0)
1165    {
1166      if (p->level == temp_slot_level)
1167	move_slot_to_level (p, temp_slot_level - 1);
1168      return;
1169    }
1170
1171  /* Otherwise, preserve all non-kept slots at this level.  */
1172  for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1173    {
1174      next = p->next;
1175      move_slot_to_level (p, temp_slot_level - 1);
1176    }
1177}
1178
1179/* Free all temporaries used so far.  This is normally called at the
1180   end of generating code for a statement.  */
1181
1182void
1183free_temp_slots (void)
1184{
1185  struct temp_slot *p, *next;
1186  bool some_available = false;
1187
1188  for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1189    {
1190      next = p->next;
1191      make_slot_available (p);
1192      some_available = true;
1193    }
1194
1195  if (some_available)
1196    {
1197      remove_unused_temp_slot_addresses ();
1198      combine_temp_slots ();
1199    }
1200}
1201
1202/* Push deeper into the nesting level for stack temporaries.  */
1203
1204void
1205push_temp_slots (void)
1206{
1207  temp_slot_level++;
1208}
1209
1210/* Pop a temporary nesting level.  All slots in use in the current level
1211   are freed.  */
1212
1213void
1214pop_temp_slots (void)
1215{
1216  free_temp_slots ();
1217  temp_slot_level--;
1218}
1219
1220/* Initialize temporary slots.  */
1221
1222void
1223init_temp_slots (void)
1224{
1225  /* We have not allocated any temporaries yet.  */
1226  avail_temp_slots = 0;
1227  vec_alloc (used_temp_slots, 0);
1228  temp_slot_level = 0;
1229  n_temp_slots_in_use = 0;
1230
1231  /* Set up the table to map addresses to temp slots.  */
1232  if (! temp_slot_address_table)
1233    temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1234  else
1235    temp_slot_address_table->empty ();
1236}
1237
1238/* Functions and data structures to keep track of the values hard regs
1239   had at the start of the function.  */
1240
1241/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1242   and has_hard_reg_initial_val..  */
1243typedef struct GTY(()) initial_value_pair {
1244  rtx hard_reg;
1245  rtx pseudo;
1246} initial_value_pair;
1247/* ???  This could be a VEC but there is currently no way to define an
1248   opaque VEC type.  This could be worked around by defining struct
1249   initial_value_pair in function.h.  */
1250typedef struct GTY(()) initial_value_struct {
1251  int num_entries;
1252  int max_entries;
1253  initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1254} initial_value_struct;
1255
1256/* If a pseudo represents an initial hard reg (or expression), return
1257   it, else return NULL_RTX.  */
1258
1259rtx
1260get_hard_reg_initial_reg (rtx reg)
1261{
1262  struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1263  int i;
1264
1265  if (ivs == 0)
1266    return NULL_RTX;
1267
1268  for (i = 0; i < ivs->num_entries; i++)
1269    if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1270      return ivs->entries[i].hard_reg;
1271
1272  return NULL_RTX;
1273}
1274
1275/* Make sure that there's a pseudo register of mode MODE that stores the
1276   initial value of hard register REGNO.  Return an rtx for such a pseudo.  */
1277
1278rtx
1279get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1280{
1281  struct initial_value_struct *ivs;
1282  rtx rv;
1283
1284  rv = has_hard_reg_initial_val (mode, regno);
1285  if (rv)
1286    return rv;
1287
1288  ivs = crtl->hard_reg_initial_vals;
1289  if (ivs == 0)
1290    {
1291      ivs = ggc_alloc<initial_value_struct> ();
1292      ivs->num_entries = 0;
1293      ivs->max_entries = 5;
1294      ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1295      crtl->hard_reg_initial_vals = ivs;
1296    }
1297
1298  if (ivs->num_entries >= ivs->max_entries)
1299    {
1300      ivs->max_entries += 5;
1301      ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1302				    ivs->max_entries);
1303    }
1304
1305  ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1306  ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1307
1308  return ivs->entries[ivs->num_entries++].pseudo;
1309}
1310
1311/* See if get_hard_reg_initial_val has been used to create a pseudo
1312   for the initial value of hard register REGNO in mode MODE.  Return
1313   the associated pseudo if so, otherwise return NULL.  */
1314
1315rtx
1316has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1317{
1318  struct initial_value_struct *ivs;
1319  int i;
1320
1321  ivs = crtl->hard_reg_initial_vals;
1322  if (ivs != 0)
1323    for (i = 0; i < ivs->num_entries; i++)
1324      if (GET_MODE (ivs->entries[i].hard_reg) == mode
1325	  && REGNO (ivs->entries[i].hard_reg) == regno)
1326	return ivs->entries[i].pseudo;
1327
1328  return NULL_RTX;
1329}
1330
1331unsigned int
1332emit_initial_value_sets (void)
1333{
1334  struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1335  int i;
1336  rtx_insn *seq;
1337
1338  if (ivs == 0)
1339    return 0;
1340
1341  start_sequence ();
1342  for (i = 0; i < ivs->num_entries; i++)
1343    emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1344  seq = get_insns ();
1345  end_sequence ();
1346
1347  emit_insn_at_entry (seq);
1348  return 0;
1349}
1350
1351/* Return the hardreg-pseudoreg initial values pair entry I and
1352   TRUE if I is a valid entry, or FALSE if I is not a valid entry.  */
1353bool
1354initial_value_entry (int i, rtx *hreg, rtx *preg)
1355{
1356  struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1357  if (!ivs || i >= ivs->num_entries)
1358    return false;
1359
1360  *hreg = ivs->entries[i].hard_reg;
1361  *preg = ivs->entries[i].pseudo;
1362  return true;
1363}
1364
1365/* These routines are responsible for converting virtual register references
1366   to the actual hard register references once RTL generation is complete.
1367
1368   The following four variables are used for communication between the
1369   routines.  They contain the offsets of the virtual registers from their
1370   respective hard registers.  */
1371
1372static int in_arg_offset;
1373static int var_offset;
1374static int dynamic_offset;
1375static int out_arg_offset;
1376static int cfa_offset;
1377
1378/* In most machines, the stack pointer register is equivalent to the bottom
1379   of the stack.  */
1380
1381#ifndef STACK_POINTER_OFFSET
1382#define STACK_POINTER_OFFSET	0
1383#endif
1384
1385#if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1386#define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1387#endif
1388
1389/* If not defined, pick an appropriate default for the offset of dynamically
1390   allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1391   INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
1392
1393#ifndef STACK_DYNAMIC_OFFSET
1394
1395/* The bottom of the stack points to the actual arguments.  If
1396   REG_PARM_STACK_SPACE is defined, this includes the space for the register
1397   parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
1398   stack space for register parameters is not pushed by the caller, but
1399   rather part of the fixed stack areas and hence not included in
1400   `crtl->outgoing_args_size'.  Nevertheless, we must allow
1401   for it when allocating stack dynamic objects.  */
1402
1403#ifdef INCOMING_REG_PARM_STACK_SPACE
1404#define STACK_DYNAMIC_OFFSET(FNDECL)	\
1405((ACCUMULATE_OUTGOING_ARGS						      \
1406  ? (crtl->outgoing_args_size				      \
1407     + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1408					       : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1409  : 0) + (STACK_POINTER_OFFSET))
1410#else
1411#define STACK_DYNAMIC_OFFSET(FNDECL)	\
1412((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0)	      \
1413 + (STACK_POINTER_OFFSET))
1414#endif
1415#endif
1416
1417
1418/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1419   is a virtual register, return the equivalent hard register and set the
1420   offset indirectly through the pointer.  Otherwise, return 0.  */
1421
1422static rtx
1423instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1424{
1425  rtx new_rtx;
1426  HOST_WIDE_INT offset;
1427
1428  if (x == virtual_incoming_args_rtx)
1429    {
1430      if (stack_realign_drap)
1431        {
1432	  /* Replace virtual_incoming_args_rtx with internal arg
1433	     pointer if DRAP is used to realign stack.  */
1434          new_rtx = crtl->args.internal_arg_pointer;
1435          offset = 0;
1436        }
1437      else
1438        new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1439    }
1440  else if (x == virtual_stack_vars_rtx)
1441    new_rtx = frame_pointer_rtx, offset = var_offset;
1442  else if (x == virtual_stack_dynamic_rtx)
1443    new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1444  else if (x == virtual_outgoing_args_rtx)
1445    new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1446  else if (x == virtual_cfa_rtx)
1447    {
1448#ifdef FRAME_POINTER_CFA_OFFSET
1449      new_rtx = frame_pointer_rtx;
1450#else
1451      new_rtx = arg_pointer_rtx;
1452#endif
1453      offset = cfa_offset;
1454    }
1455  else if (x == virtual_preferred_stack_boundary_rtx)
1456    {
1457      new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1458      offset = 0;
1459    }
1460  else
1461    return NULL_RTX;
1462
1463  *poffset = offset;
1464  return new_rtx;
1465}
1466
1467/* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1468   registers present inside of *LOC.  The expression is simplified,
1469   as much as possible, but is not to be considered "valid" in any sense
1470   implied by the target.  Return true if any change is made.  */
1471
1472static bool
1473instantiate_virtual_regs_in_rtx (rtx *loc)
1474{
1475  if (!*loc)
1476    return false;
1477  bool changed = false;
1478  subrtx_ptr_iterator::array_type array;
1479  FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1480    {
1481      rtx *loc = *iter;
1482      if (rtx x = *loc)
1483	{
1484	  rtx new_rtx;
1485	  HOST_WIDE_INT offset;
1486	  switch (GET_CODE (x))
1487	    {
1488	    case REG:
1489	      new_rtx = instantiate_new_reg (x, &offset);
1490	      if (new_rtx)
1491		{
1492		  *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1493		  changed = true;
1494		}
1495	      iter.skip_subrtxes ();
1496	      break;
1497
1498	    case PLUS:
1499	      new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1500	      if (new_rtx)
1501		{
1502		  XEXP (x, 0) = new_rtx;
1503		  *loc = plus_constant (GET_MODE (x), x, offset, true);
1504		  changed = true;
1505		  iter.skip_subrtxes ();
1506		  break;
1507		}
1508
1509	      /* FIXME -- from old code */
1510	      /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1511		 we can commute the PLUS and SUBREG because pointers into the
1512		 frame are well-behaved.  */
1513	      break;
1514
1515	    default:
1516	      break;
1517	    }
1518	}
1519    }
1520  return changed;
1521}
1522
1523/* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
1524   matches the predicate for insn CODE operand OPERAND.  */
1525
1526static int
1527safe_insn_predicate (int code, int operand, rtx x)
1528{
1529  return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1530}
1531
1532/* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1533   registers present inside of insn.  The result will be a valid insn.  */
1534
1535static void
1536instantiate_virtual_regs_in_insn (rtx_insn *insn)
1537{
1538  HOST_WIDE_INT offset;
1539  int insn_code, i;
1540  bool any_change = false;
1541  rtx set, new_rtx, x;
1542  rtx_insn *seq;
1543
1544  /* There are some special cases to be handled first.  */
1545  set = single_set (insn);
1546  if (set)
1547    {
1548      /* We're allowed to assign to a virtual register.  This is interpreted
1549	 to mean that the underlying register gets assigned the inverse
1550	 transformation.  This is used, for example, in the handling of
1551	 non-local gotos.  */
1552      new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1553      if (new_rtx)
1554	{
1555	  start_sequence ();
1556
1557	  instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1558	  x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1559				   gen_int_mode (-offset, GET_MODE (new_rtx)));
1560	  x = force_operand (x, new_rtx);
1561	  if (x != new_rtx)
1562	    emit_move_insn (new_rtx, x);
1563
1564	  seq = get_insns ();
1565	  end_sequence ();
1566
1567	  emit_insn_before (seq, insn);
1568	  delete_insn (insn);
1569	  return;
1570	}
1571
1572      /* Handle a straight copy from a virtual register by generating a
1573	 new add insn.  The difference between this and falling through
1574	 to the generic case is avoiding a new pseudo and eliminating a
1575	 move insn in the initial rtl stream.  */
1576      new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1577      if (new_rtx && offset != 0
1578	  && REG_P (SET_DEST (set))
1579	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1580	{
1581	  start_sequence ();
1582
1583	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1584				   gen_int_mode (offset,
1585						 GET_MODE (SET_DEST (set))),
1586				   SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1587	  if (x != SET_DEST (set))
1588	    emit_move_insn (SET_DEST (set), x);
1589
1590	  seq = get_insns ();
1591	  end_sequence ();
1592
1593	  emit_insn_before (seq, insn);
1594	  delete_insn (insn);
1595	  return;
1596	}
1597
1598      extract_insn (insn);
1599      insn_code = INSN_CODE (insn);
1600
1601      /* Handle a plus involving a virtual register by determining if the
1602	 operands remain valid if they're modified in place.  */
1603      if (GET_CODE (SET_SRC (set)) == PLUS
1604	  && recog_data.n_operands >= 3
1605	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1606	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1607	  && CONST_INT_P (recog_data.operand[2])
1608	  && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1609	{
1610	  offset += INTVAL (recog_data.operand[2]);
1611
1612	  /* If the sum is zero, then replace with a plain move.  */
1613	  if (offset == 0
1614	      && REG_P (SET_DEST (set))
1615	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1616	    {
1617	      start_sequence ();
1618	      emit_move_insn (SET_DEST (set), new_rtx);
1619	      seq = get_insns ();
1620	      end_sequence ();
1621
1622	      emit_insn_before (seq, insn);
1623	      delete_insn (insn);
1624	      return;
1625	    }
1626
1627	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
1628
1629	  /* Using validate_change and apply_change_group here leaves
1630	     recog_data in an invalid state.  Since we know exactly what
1631	     we want to check, do those two by hand.  */
1632	  if (safe_insn_predicate (insn_code, 1, new_rtx)
1633	      && safe_insn_predicate (insn_code, 2, x))
1634	    {
1635	      *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1636	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1637	      any_change = true;
1638
1639	      /* Fall through into the regular operand fixup loop in
1640		 order to take care of operands other than 1 and 2.  */
1641	    }
1642	}
1643    }
1644  else
1645    {
1646      extract_insn (insn);
1647      insn_code = INSN_CODE (insn);
1648    }
1649
1650  /* In the general case, we expect virtual registers to appear only in
1651     operands, and then only as either bare registers or inside memories.  */
1652  for (i = 0; i < recog_data.n_operands; ++i)
1653    {
1654      x = recog_data.operand[i];
1655      switch (GET_CODE (x))
1656	{
1657	case MEM:
1658	  {
1659	    rtx addr = XEXP (x, 0);
1660
1661	    if (!instantiate_virtual_regs_in_rtx (&addr))
1662	      continue;
1663
1664	    start_sequence ();
1665	    x = replace_equiv_address (x, addr, true);
1666	    /* It may happen that the address with the virtual reg
1667	       was valid (e.g. based on the virtual stack reg, which might
1668	       be acceptable to the predicates with all offsets), whereas
1669	       the address now isn't anymore, for instance when the address
1670	       is still offsetted, but the base reg isn't virtual-stack-reg
1671	       anymore.  Below we would do a force_reg on the whole operand,
1672	       but this insn might actually only accept memory.  Hence,
1673	       before doing that last resort, try to reload the address into
1674	       a register, so this operand stays a MEM.  */
1675	    if (!safe_insn_predicate (insn_code, i, x))
1676	      {
1677		addr = force_reg (GET_MODE (addr), addr);
1678		x = replace_equiv_address (x, addr, true);
1679	      }
1680	    seq = get_insns ();
1681	    end_sequence ();
1682	    if (seq)
1683	      emit_insn_before (seq, insn);
1684	  }
1685	  break;
1686
1687	case REG:
1688	  new_rtx = instantiate_new_reg (x, &offset);
1689	  if (new_rtx == NULL)
1690	    continue;
1691	  if (offset == 0)
1692	    x = new_rtx;
1693	  else
1694	    {
1695	      start_sequence ();
1696
1697	      /* Careful, special mode predicates may have stuff in
1698		 insn_data[insn_code].operand[i].mode that isn't useful
1699		 to us for computing a new value.  */
1700	      /* ??? Recognize address_operand and/or "p" constraints
1701		 to see if (plus new offset) is a valid before we put
1702		 this through expand_simple_binop.  */
1703	      x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1704				       gen_int_mode (offset, GET_MODE (x)),
1705				       NULL_RTX, 1, OPTAB_LIB_WIDEN);
1706	      seq = get_insns ();
1707	      end_sequence ();
1708	      emit_insn_before (seq, insn);
1709	    }
1710	  break;
1711
1712	case SUBREG:
1713	  new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1714	  if (new_rtx == NULL)
1715	    continue;
1716	  if (offset != 0)
1717	    {
1718	      start_sequence ();
1719	      new_rtx = expand_simple_binop
1720		(GET_MODE (new_rtx), PLUS, new_rtx,
1721		 gen_int_mode (offset, GET_MODE (new_rtx)),
1722		 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1723	      seq = get_insns ();
1724	      end_sequence ();
1725	      emit_insn_before (seq, insn);
1726	    }
1727	  x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1728				   GET_MODE (new_rtx), SUBREG_BYTE (x));
1729	  gcc_assert (x);
1730	  break;
1731
1732	default:
1733	  continue;
1734	}
1735
1736      /* At this point, X contains the new value for the operand.
1737	 Validate the new value vs the insn predicate.  Note that
1738	 asm insns will have insn_code -1 here.  */
1739      if (!safe_insn_predicate (insn_code, i, x))
1740	{
1741	  start_sequence ();
1742	  if (REG_P (x))
1743	    {
1744	      gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1745	      x = copy_to_reg (x);
1746	    }
1747	  else
1748	    x = force_reg (insn_data[insn_code].operand[i].mode, x);
1749	  seq = get_insns ();
1750	  end_sequence ();
1751	  if (seq)
1752	    emit_insn_before (seq, insn);
1753	}
1754
1755      *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1756      any_change = true;
1757    }
1758
1759  if (any_change)
1760    {
1761      /* Propagate operand changes into the duplicates.  */
1762      for (i = 0; i < recog_data.n_dups; ++i)
1763	*recog_data.dup_loc[i]
1764	  = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1765
1766      /* Force re-recognition of the instruction for validation.  */
1767      INSN_CODE (insn) = -1;
1768    }
1769
1770  if (asm_noperands (PATTERN (insn)) >= 0)
1771    {
1772      if (!check_asm_operands (PATTERN (insn)))
1773	{
1774	  error_for_asm (insn, "impossible constraint in %<asm%>");
1775	  /* For asm goto, instead of fixing up all the edges
1776	     just clear the template and clear input operands
1777	     (asm goto doesn't have any output operands).  */
1778	  if (JUMP_P (insn))
1779	    {
1780	      rtx asm_op = extract_asm_operands (PATTERN (insn));
1781	      ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1782	      ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1783	      ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1784	    }
1785	  else
1786	    delete_insn (insn);
1787	}
1788    }
1789  else
1790    {
1791      if (recog_memoized (insn) < 0)
1792	fatal_insn_not_found (insn);
1793    }
1794}
1795
1796/* Subroutine of instantiate_decls.  Given RTL representing a decl,
1797   do any instantiation required.  */
1798
1799void
1800instantiate_decl_rtl (rtx x)
1801{
1802  rtx addr;
1803
1804  if (x == 0)
1805    return;
1806
1807  /* If this is a CONCAT, recurse for the pieces.  */
1808  if (GET_CODE (x) == CONCAT)
1809    {
1810      instantiate_decl_rtl (XEXP (x, 0));
1811      instantiate_decl_rtl (XEXP (x, 1));
1812      return;
1813    }
1814
1815  /* If this is not a MEM, no need to do anything.  Similarly if the
1816     address is a constant or a register that is not a virtual register.  */
1817  if (!MEM_P (x))
1818    return;
1819
1820  addr = XEXP (x, 0);
1821  if (CONSTANT_P (addr)
1822      || (REG_P (addr)
1823	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1824	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1825    return;
1826
1827  instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1828}
1829
1830/* Helper for instantiate_decls called via walk_tree: Process all decls
1831   in the given DECL_VALUE_EXPR.  */
1832
1833static tree
1834instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1835{
1836  tree t = *tp;
1837  if (! EXPR_P (t))
1838    {
1839      *walk_subtrees = 0;
1840      if (DECL_P (t))
1841	{
1842	  if (DECL_RTL_SET_P (t))
1843	    instantiate_decl_rtl (DECL_RTL (t));
1844	  if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1845	      && DECL_INCOMING_RTL (t))
1846	    instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1847	  if ((TREE_CODE (t) == VAR_DECL
1848	       || TREE_CODE (t) == RESULT_DECL)
1849	      && DECL_HAS_VALUE_EXPR_P (t))
1850	    {
1851	      tree v = DECL_VALUE_EXPR (t);
1852	      walk_tree (&v, instantiate_expr, NULL, NULL);
1853	    }
1854	}
1855    }
1856  return NULL;
1857}
1858
1859/* Subroutine of instantiate_decls: Process all decls in the given
1860   BLOCK node and all its subblocks.  */
1861
1862static void
1863instantiate_decls_1 (tree let)
1864{
1865  tree t;
1866
1867  for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1868    {
1869      if (DECL_RTL_SET_P (t))
1870	instantiate_decl_rtl (DECL_RTL (t));
1871      if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1872	{
1873	  tree v = DECL_VALUE_EXPR (t);
1874	  walk_tree (&v, instantiate_expr, NULL, NULL);
1875	}
1876    }
1877
1878  /* Process all subblocks.  */
1879  for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1880    instantiate_decls_1 (t);
1881}
1882
1883/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1884   all virtual registers in their DECL_RTL's.  */
1885
1886static void
1887instantiate_decls (tree fndecl)
1888{
1889  tree decl;
1890  unsigned ix;
1891
1892  /* Process all parameters of the function.  */
1893  for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1894    {
1895      instantiate_decl_rtl (DECL_RTL (decl));
1896      instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1897      if (DECL_HAS_VALUE_EXPR_P (decl))
1898	{
1899	  tree v = DECL_VALUE_EXPR (decl);
1900	  walk_tree (&v, instantiate_expr, NULL, NULL);
1901	}
1902    }
1903
1904  if ((decl = DECL_RESULT (fndecl))
1905      && TREE_CODE (decl) == RESULT_DECL)
1906    {
1907      if (DECL_RTL_SET_P (decl))
1908	instantiate_decl_rtl (DECL_RTL (decl));
1909      if (DECL_HAS_VALUE_EXPR_P (decl))
1910	{
1911	  tree v = DECL_VALUE_EXPR (decl);
1912	  walk_tree (&v, instantiate_expr, NULL, NULL);
1913	}
1914    }
1915
1916  /* Process the saved static chain if it exists.  */
1917  decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1918  if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1919    instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1920
1921  /* Now process all variables defined in the function or its subblocks.  */
1922  instantiate_decls_1 (DECL_INITIAL (fndecl));
1923
1924  FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1925    if (DECL_RTL_SET_P (decl))
1926      instantiate_decl_rtl (DECL_RTL (decl));
1927  vec_free (cfun->local_decls);
1928}
1929
1930/* Pass through the INSNS of function FNDECL and convert virtual register
1931   references to hard register references.  */
1932
1933static unsigned int
1934instantiate_virtual_regs (void)
1935{
1936  rtx_insn *insn;
1937
1938  /* Compute the offsets to use for this function.  */
1939  in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1940  var_offset = STARTING_FRAME_OFFSET;
1941  dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1942  out_arg_offset = STACK_POINTER_OFFSET;
1943#ifdef FRAME_POINTER_CFA_OFFSET
1944  cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1945#else
1946  cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1947#endif
1948
1949  /* Initialize recognition, indicating that volatile is OK.  */
1950  init_recog ();
1951
1952  /* Scan through all the insns, instantiating every virtual register still
1953     present.  */
1954  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1955    if (INSN_P (insn))
1956      {
1957	/* These patterns in the instruction stream can never be recognized.
1958	   Fortunately, they shouldn't contain virtual registers either.  */
1959        if (GET_CODE (PATTERN (insn)) == USE
1960	    || GET_CODE (PATTERN (insn)) == CLOBBER
1961	    || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1962	  continue;
1963	else if (DEBUG_INSN_P (insn))
1964	  instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
1965	else
1966	  instantiate_virtual_regs_in_insn (insn);
1967
1968	if (insn->deleted ())
1969	  continue;
1970
1971	instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1972
1973	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
1974	if (CALL_P (insn))
1975	  instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1976      }
1977
1978  /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
1979  instantiate_decls (current_function_decl);
1980
1981  targetm.instantiate_decls ();
1982
1983  /* Indicate that, from now on, assign_stack_local should use
1984     frame_pointer_rtx.  */
1985  virtuals_instantiated = 1;
1986
1987  return 0;
1988}
1989
1990namespace {
1991
1992const pass_data pass_data_instantiate_virtual_regs =
1993{
1994  RTL_PASS, /* type */
1995  "vregs", /* name */
1996  OPTGROUP_NONE, /* optinfo_flags */
1997  TV_NONE, /* tv_id */
1998  0, /* properties_required */
1999  0, /* properties_provided */
2000  0, /* properties_destroyed */
2001  0, /* todo_flags_start */
2002  0, /* todo_flags_finish */
2003};
2004
2005class pass_instantiate_virtual_regs : public rtl_opt_pass
2006{
2007public:
2008  pass_instantiate_virtual_regs (gcc::context *ctxt)
2009    : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2010  {}
2011
2012  /* opt_pass methods: */
2013  virtual unsigned int execute (function *)
2014    {
2015      return instantiate_virtual_regs ();
2016    }
2017
2018}; // class pass_instantiate_virtual_regs
2019
2020} // anon namespace
2021
2022rtl_opt_pass *
2023make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2024{
2025  return new pass_instantiate_virtual_regs (ctxt);
2026}
2027
2028
2029/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2030   This means a type for which function calls must pass an address to the
2031   function or get an address back from the function.
2032   EXP may be a type node or an expression (whose type is tested).  */
2033
2034int
2035aggregate_value_p (const_tree exp, const_tree fntype)
2036{
2037  const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2038  int i, regno, nregs;
2039  rtx reg;
2040
2041  if (fntype)
2042    switch (TREE_CODE (fntype))
2043      {
2044      case CALL_EXPR:
2045	{
2046	  tree fndecl = get_callee_fndecl (fntype);
2047	  if (fndecl)
2048	    fntype = TREE_TYPE (fndecl);
2049	  else if (CALL_EXPR_FN (fntype))
2050	    fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2051	  else
2052	    /* For internal functions, assume nothing needs to be
2053	       returned in memory.  */
2054	    return 0;
2055	}
2056	break;
2057      case FUNCTION_DECL:
2058	fntype = TREE_TYPE (fntype);
2059	break;
2060      case FUNCTION_TYPE:
2061      case METHOD_TYPE:
2062        break;
2063      case IDENTIFIER_NODE:
2064	fntype = NULL_TREE;
2065	break;
2066      default:
2067	/* We don't expect other tree types here.  */
2068	gcc_unreachable ();
2069      }
2070
2071  if (VOID_TYPE_P (type))
2072    return 0;
2073
2074  /* If a record should be passed the same as its first (and only) member
2075     don't pass it as an aggregate.  */
2076  if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2077    return aggregate_value_p (first_field (type), fntype);
2078
2079  /* If the front end has decided that this needs to be passed by
2080     reference, do so.  */
2081  if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2082      && DECL_BY_REFERENCE (exp))
2083    return 1;
2084
2085  /* Function types that are TREE_ADDRESSABLE force return in memory.  */
2086  if (fntype && TREE_ADDRESSABLE (fntype))
2087    return 1;
2088
2089  /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2090     and thus can't be returned in registers.  */
2091  if (TREE_ADDRESSABLE (type))
2092    return 1;
2093
2094  if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2095    return 1;
2096
2097  if (targetm.calls.return_in_memory (type, fntype))
2098    return 1;
2099
2100  /* Make sure we have suitable call-clobbered regs to return
2101     the value in; if not, we must return it in memory.  */
2102  reg = hard_function_value (type, 0, fntype, 0);
2103
2104  /* If we have something other than a REG (e.g. a PARALLEL), then assume
2105     it is OK.  */
2106  if (!REG_P (reg))
2107    return 0;
2108
2109  regno = REGNO (reg);
2110  nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2111  for (i = 0; i < nregs; i++)
2112    if (! call_used_regs[regno + i])
2113      return 1;
2114
2115  return 0;
2116}
2117
2118/* Return true if we should assign DECL a pseudo register; false if it
2119   should live on the local stack.  */
2120
2121bool
2122use_register_for_decl (const_tree decl)
2123{
2124  if (!targetm.calls.allocate_stack_slots_for_args ())
2125    return true;
2126
2127  /* Honor volatile.  */
2128  if (TREE_SIDE_EFFECTS (decl))
2129    return false;
2130
2131  /* Honor addressability.  */
2132  if (TREE_ADDRESSABLE (decl))
2133    return false;
2134
2135  /* Decl is implicitly addressible by bound stores and loads
2136     if it is an aggregate holding bounds.  */
2137  if (chkp_function_instrumented_p (current_function_decl)
2138      && TREE_TYPE (decl)
2139      && !BOUNDED_P (decl)
2140      && chkp_type_has_pointer (TREE_TYPE (decl)))
2141    return false;
2142
2143  /* Only register-like things go in registers.  */
2144  if (DECL_MODE (decl) == BLKmode)
2145    return false;
2146
2147  /* If -ffloat-store specified, don't put explicit float variables
2148     into registers.  */
2149  /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2150     propagates values across these stores, and it probably shouldn't.  */
2151  if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2152    return false;
2153
2154  /* If we're not interested in tracking debugging information for
2155     this decl, then we can certainly put it in a register.  */
2156  if (DECL_IGNORED_P (decl))
2157    return true;
2158
2159  if (optimize)
2160    return true;
2161
2162  if (!DECL_REGISTER (decl))
2163    return false;
2164
2165  switch (TREE_CODE (TREE_TYPE (decl)))
2166    {
2167    case RECORD_TYPE:
2168    case UNION_TYPE:
2169    case QUAL_UNION_TYPE:
2170      /* When not optimizing, disregard register keyword for variables with
2171	 types containing methods, otherwise the methods won't be callable
2172	 from the debugger.  */
2173      if (TYPE_METHODS (TREE_TYPE (decl)))
2174	return false;
2175      break;
2176    default:
2177      break;
2178    }
2179
2180  return true;
2181}
2182
2183/* Return true if TYPE should be passed by invisible reference.  */
2184
2185bool
2186pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
2187		   tree type, bool named_arg)
2188{
2189  if (type)
2190    {
2191      /* If this type contains non-trivial constructors, then it is
2192	 forbidden for the middle-end to create any new copies.  */
2193      if (TREE_ADDRESSABLE (type))
2194	return true;
2195
2196      /* GCC post 3.4 passes *all* variable sized types by reference.  */
2197      if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2198	return true;
2199
2200      /* If a record type should be passed the same as its first (and only)
2201	 member, use the type and mode of that member.  */
2202      if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2203	{
2204	  type = TREE_TYPE (first_field (type));
2205	  mode = TYPE_MODE (type);
2206	}
2207    }
2208
2209  return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2210					  type, named_arg);
2211}
2212
2213/* Return true if TYPE, which is passed by reference, should be callee
2214   copied instead of caller copied.  */
2215
2216bool
2217reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
2218			 tree type, bool named_arg)
2219{
2220  if (type && TREE_ADDRESSABLE (type))
2221    return false;
2222  return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2223				      named_arg);
2224}
2225
2226/* Structures to communicate between the subroutines of assign_parms.
2227   The first holds data persistent across all parameters, the second
2228   is cleared out for each parameter.  */
2229
2230struct assign_parm_data_all
2231{
2232  /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2233     should become a job of the target or otherwise encapsulated.  */
2234  CUMULATIVE_ARGS args_so_far_v;
2235  cumulative_args_t args_so_far;
2236  struct args_size stack_args_size;
2237  tree function_result_decl;
2238  tree orig_fnargs;
2239  rtx_insn *first_conversion_insn;
2240  rtx_insn *last_conversion_insn;
2241  HOST_WIDE_INT pretend_args_size;
2242  HOST_WIDE_INT extra_pretend_bytes;
2243  int reg_parm_stack_space;
2244};
2245
2246struct assign_parm_data_one
2247{
2248  tree nominal_type;
2249  tree passed_type;
2250  rtx entry_parm;
2251  rtx stack_parm;
2252  machine_mode nominal_mode;
2253  machine_mode passed_mode;
2254  machine_mode promoted_mode;
2255  struct locate_and_pad_arg_data locate;
2256  int partial;
2257  BOOL_BITFIELD named_arg : 1;
2258  BOOL_BITFIELD passed_pointer : 1;
2259  BOOL_BITFIELD on_stack : 1;
2260  BOOL_BITFIELD loaded_in_reg : 1;
2261};
2262
2263struct bounds_parm_data
2264{
2265  assign_parm_data_one parm_data;
2266  tree bounds_parm;
2267  tree ptr_parm;
2268  rtx ptr_entry;
2269  int bound_no;
2270};
2271
2272/* A subroutine of assign_parms.  Initialize ALL.  */
2273
2274static void
2275assign_parms_initialize_all (struct assign_parm_data_all *all)
2276{
2277  tree fntype ATTRIBUTE_UNUSED;
2278
2279  memset (all, 0, sizeof (*all));
2280
2281  fntype = TREE_TYPE (current_function_decl);
2282
2283#ifdef INIT_CUMULATIVE_INCOMING_ARGS
2284  INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2285#else
2286  INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2287			current_function_decl, -1);
2288#endif
2289  all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2290
2291#ifdef INCOMING_REG_PARM_STACK_SPACE
2292  all->reg_parm_stack_space
2293    = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2294#endif
2295}
2296
2297/* If ARGS contains entries with complex types, split the entry into two
2298   entries of the component type.  Return a new list of substitutions are
2299   needed, else the old list.  */
2300
2301static void
2302split_complex_args (vec<tree> *args)
2303{
2304  unsigned i;
2305  tree p;
2306
2307  FOR_EACH_VEC_ELT (*args, i, p)
2308    {
2309      tree type = TREE_TYPE (p);
2310      if (TREE_CODE (type) == COMPLEX_TYPE
2311	  && targetm.calls.split_complex_arg (type))
2312	{
2313	  tree decl;
2314	  tree subtype = TREE_TYPE (type);
2315	  bool addressable = TREE_ADDRESSABLE (p);
2316
2317	  /* Rewrite the PARM_DECL's type with its component.  */
2318	  p = copy_node (p);
2319	  TREE_TYPE (p) = subtype;
2320	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2321	  DECL_MODE (p) = VOIDmode;
2322	  DECL_SIZE (p) = NULL;
2323	  DECL_SIZE_UNIT (p) = NULL;
2324	  /* If this arg must go in memory, put it in a pseudo here.
2325	     We can't allow it to go in memory as per normal parms,
2326	     because the usual place might not have the imag part
2327	     adjacent to the real part.  */
2328	  DECL_ARTIFICIAL (p) = addressable;
2329	  DECL_IGNORED_P (p) = addressable;
2330	  TREE_ADDRESSABLE (p) = 0;
2331	  layout_decl (p, 0);
2332	  (*args)[i] = p;
2333
2334	  /* Build a second synthetic decl.  */
2335	  decl = build_decl (EXPR_LOCATION (p),
2336			     PARM_DECL, NULL_TREE, subtype);
2337	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2338	  DECL_ARTIFICIAL (decl) = addressable;
2339	  DECL_IGNORED_P (decl) = addressable;
2340	  layout_decl (decl, 0);
2341	  args->safe_insert (++i, decl);
2342	}
2343    }
2344}
2345
2346/* A subroutine of assign_parms.  Adjust the parameter list to incorporate
2347   the hidden struct return argument, and (abi willing) complex args.
2348   Return the new parameter list.  */
2349
2350static vec<tree>
2351assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2352{
2353  tree fndecl = current_function_decl;
2354  tree fntype = TREE_TYPE (fndecl);
2355  vec<tree> fnargs = vNULL;
2356  tree arg;
2357
2358  for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2359    fnargs.safe_push (arg);
2360
2361  all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2362
2363  /* If struct value address is treated as the first argument, make it so.  */
2364  if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2365      && ! cfun->returns_pcc_struct
2366      && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2367    {
2368      tree type = build_pointer_type (TREE_TYPE (fntype));
2369      tree decl;
2370
2371      decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2372			 PARM_DECL, get_identifier (".result_ptr"), type);
2373      DECL_ARG_TYPE (decl) = type;
2374      DECL_ARTIFICIAL (decl) = 1;
2375      DECL_NAMELESS (decl) = 1;
2376      TREE_CONSTANT (decl) = 1;
2377
2378      DECL_CHAIN (decl) = all->orig_fnargs;
2379      all->orig_fnargs = decl;
2380      fnargs.safe_insert (0, decl);
2381
2382      all->function_result_decl = decl;
2383
2384      /* If function is instrumented then bounds of the
2385	 passed structure address is the second argument.  */
2386      if (chkp_function_instrumented_p (fndecl))
2387	{
2388	  decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2389			     PARM_DECL, get_identifier (".result_bnd"),
2390			     pointer_bounds_type_node);
2391	  DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2392	  DECL_ARTIFICIAL (decl) = 1;
2393	  DECL_NAMELESS (decl) = 1;
2394	  TREE_CONSTANT (decl) = 1;
2395
2396	  DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2397	  DECL_CHAIN (all->orig_fnargs) = decl;
2398	  fnargs.safe_insert (1, decl);
2399	}
2400    }
2401
2402  /* If the target wants to split complex arguments into scalars, do so.  */
2403  if (targetm.calls.split_complex_arg)
2404    split_complex_args (&fnargs);
2405
2406  return fnargs;
2407}
2408
2409/* A subroutine of assign_parms.  Examine PARM and pull out type and mode
2410   data for the parameter.  Incorporate ABI specifics such as pass-by-
2411   reference and type promotion.  */
2412
2413static void
2414assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2415			     struct assign_parm_data_one *data)
2416{
2417  tree nominal_type, passed_type;
2418  machine_mode nominal_mode, passed_mode, promoted_mode;
2419  int unsignedp;
2420
2421  memset (data, 0, sizeof (*data));
2422
2423  /* NAMED_ARG is a misnomer.  We really mean 'non-variadic'. */
2424  if (!cfun->stdarg)
2425    data->named_arg = 1;  /* No variadic parms.  */
2426  else if (DECL_CHAIN (parm))
2427    data->named_arg = 1;  /* Not the last non-variadic parm. */
2428  else if (targetm.calls.strict_argument_naming (all->args_so_far))
2429    data->named_arg = 1;  /* Only variadic ones are unnamed.  */
2430  else
2431    data->named_arg = 0;  /* Treat as variadic.  */
2432
2433  nominal_type = TREE_TYPE (parm);
2434  passed_type = DECL_ARG_TYPE (parm);
2435
2436  /* Look out for errors propagating this far.  Also, if the parameter's
2437     type is void then its value doesn't matter.  */
2438  if (TREE_TYPE (parm) == error_mark_node
2439      /* This can happen after weird syntax errors
2440	 or if an enum type is defined among the parms.  */
2441      || TREE_CODE (parm) != PARM_DECL
2442      || passed_type == NULL
2443      || VOID_TYPE_P (nominal_type))
2444    {
2445      nominal_type = passed_type = void_type_node;
2446      nominal_mode = passed_mode = promoted_mode = VOIDmode;
2447      goto egress;
2448    }
2449
2450  /* Find mode of arg as it is passed, and mode of arg as it should be
2451     during execution of this function.  */
2452  passed_mode = TYPE_MODE (passed_type);
2453  nominal_mode = TYPE_MODE (nominal_type);
2454
2455  /* If the parm is to be passed as a transparent union or record, use the
2456     type of the first field for the tests below.  We have already verified
2457     that the modes are the same.  */
2458  if ((TREE_CODE (passed_type) == UNION_TYPE
2459       || TREE_CODE (passed_type) == RECORD_TYPE)
2460      && TYPE_TRANSPARENT_AGGR (passed_type))
2461    passed_type = TREE_TYPE (first_field (passed_type));
2462
2463  /* See if this arg was passed by invisible reference.  */
2464  if (pass_by_reference (&all->args_so_far_v, passed_mode,
2465			 passed_type, data->named_arg))
2466    {
2467      passed_type = nominal_type = build_pointer_type (passed_type);
2468      data->passed_pointer = true;
2469      passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2470    }
2471
2472  /* Find mode as it is passed by the ABI.  */
2473  unsignedp = TYPE_UNSIGNED (passed_type);
2474  promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2475				         TREE_TYPE (current_function_decl), 0);
2476
2477 egress:
2478  data->nominal_type = nominal_type;
2479  data->passed_type = passed_type;
2480  data->nominal_mode = nominal_mode;
2481  data->passed_mode = passed_mode;
2482  data->promoted_mode = promoted_mode;
2483}
2484
2485/* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
2486
2487static void
2488assign_parms_setup_varargs (struct assign_parm_data_all *all,
2489			    struct assign_parm_data_one *data, bool no_rtl)
2490{
2491  int varargs_pretend_bytes = 0;
2492
2493  targetm.calls.setup_incoming_varargs (all->args_so_far,
2494					data->promoted_mode,
2495					data->passed_type,
2496					&varargs_pretend_bytes, no_rtl);
2497
2498  /* If the back-end has requested extra stack space, record how much is
2499     needed.  Do not change pretend_args_size otherwise since it may be
2500     nonzero from an earlier partial argument.  */
2501  if (varargs_pretend_bytes > 0)
2502    all->pretend_args_size = varargs_pretend_bytes;
2503}
2504
2505/* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
2506   the incoming location of the current parameter.  */
2507
2508static void
2509assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2510			    struct assign_parm_data_one *data)
2511{
2512  HOST_WIDE_INT pretend_bytes = 0;
2513  rtx entry_parm;
2514  bool in_regs;
2515
2516  if (data->promoted_mode == VOIDmode)
2517    {
2518      data->entry_parm = data->stack_parm = const0_rtx;
2519      return;
2520    }
2521
2522  entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2523						    data->promoted_mode,
2524						    data->passed_type,
2525						    data->named_arg);
2526
2527  if (entry_parm == 0)
2528    data->promoted_mode = data->passed_mode;
2529
2530  /* Determine parm's home in the stack, in case it arrives in the stack
2531     or we should pretend it did.  Compute the stack position and rtx where
2532     the argument arrives and its size.
2533
2534     There is one complexity here:  If this was a parameter that would
2535     have been passed in registers, but wasn't only because it is
2536     __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2537     it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2538     In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2539     as it was the previous time.  */
2540  in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2541#ifdef STACK_PARMS_IN_REG_PARM_AREA
2542  in_regs = true;
2543#endif
2544  if (!in_regs && !data->named_arg)
2545    {
2546      if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2547	{
2548	  rtx tem;
2549	  tem = targetm.calls.function_incoming_arg (all->args_so_far,
2550						     data->promoted_mode,
2551						     data->passed_type, true);
2552	  in_regs = tem != NULL;
2553	}
2554    }
2555
2556  /* If this parameter was passed both in registers and in the stack, use
2557     the copy on the stack.  */
2558  if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2559					data->passed_type))
2560    entry_parm = 0;
2561
2562  if (entry_parm)
2563    {
2564      int partial;
2565
2566      partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2567						 data->promoted_mode,
2568						 data->passed_type,
2569						 data->named_arg);
2570      data->partial = partial;
2571
2572      /* The caller might already have allocated stack space for the
2573	 register parameters.  */
2574      if (partial != 0 && all->reg_parm_stack_space == 0)
2575	{
2576	  /* Part of this argument is passed in registers and part
2577	     is passed on the stack.  Ask the prologue code to extend
2578	     the stack part so that we can recreate the full value.
2579
2580	     PRETEND_BYTES is the size of the registers we need to store.
2581	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2582	     stack space that the prologue should allocate.
2583
2584	     Internally, gcc assumes that the argument pointer is aligned
2585	     to STACK_BOUNDARY bits.  This is used both for alignment
2586	     optimizations (see init_emit) and to locate arguments that are
2587	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
2588	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2589	     a stack boundary.  */
2590
2591	  /* We assume at most one partial arg, and it must be the first
2592	     argument on the stack.  */
2593	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2594
2595	  pretend_bytes = partial;
2596	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2597
2598	  /* We want to align relative to the actual stack pointer, so
2599	     don't include this in the stack size until later.  */
2600	  all->extra_pretend_bytes = all->pretend_args_size;
2601	}
2602    }
2603
2604  locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2605		       all->reg_parm_stack_space,
2606		       entry_parm ? data->partial : 0, current_function_decl,
2607		       &all->stack_args_size, &data->locate);
2608
2609  /* Update parm_stack_boundary if this parameter is passed in the
2610     stack.  */
2611  if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2612    crtl->parm_stack_boundary = data->locate.boundary;
2613
2614  /* Adjust offsets to include the pretend args.  */
2615  pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2616  data->locate.slot_offset.constant += pretend_bytes;
2617  data->locate.offset.constant += pretend_bytes;
2618
2619  data->entry_parm = entry_parm;
2620}
2621
2622/* A subroutine of assign_parms.  If there is actually space on the stack
2623   for this parm, count it in stack_args_size and return true.  */
2624
2625static bool
2626assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2627			   struct assign_parm_data_one *data)
2628{
2629  /* Bounds are never passed on the stack to keep compatibility
2630     with not instrumented code.  */
2631  if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2632    return false;
2633  /* Trivially true if we've no incoming register.  */
2634  else if (data->entry_parm == NULL)
2635    ;
2636  /* Also true if we're partially in registers and partially not,
2637     since we've arranged to drop the entire argument on the stack.  */
2638  else if (data->partial != 0)
2639    ;
2640  /* Also true if the target says that it's passed in both registers
2641     and on the stack.  */
2642  else if (GET_CODE (data->entry_parm) == PARALLEL
2643	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2644    ;
2645  /* Also true if the target says that there's stack allocated for
2646     all register parameters.  */
2647  else if (all->reg_parm_stack_space > 0)
2648    ;
2649  /* Otherwise, no, this parameter has no ABI defined stack slot.  */
2650  else
2651    return false;
2652
2653  all->stack_args_size.constant += data->locate.size.constant;
2654  if (data->locate.size.var)
2655    ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2656
2657  return true;
2658}
2659
2660/* A subroutine of assign_parms.  Given that this parameter is allocated
2661   stack space by the ABI, find it.  */
2662
2663static void
2664assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2665{
2666  rtx offset_rtx, stack_parm;
2667  unsigned int align, boundary;
2668
2669  /* If we're passing this arg using a reg, make its stack home the
2670     aligned stack slot.  */
2671  if (data->entry_parm)
2672    offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2673  else
2674    offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2675
2676  stack_parm = crtl->args.internal_arg_pointer;
2677  if (offset_rtx != const0_rtx)
2678    stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2679  stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2680
2681  if (!data->passed_pointer)
2682    {
2683      set_mem_attributes (stack_parm, parm, 1);
2684      /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2685	 while promoted mode's size is needed.  */
2686      if (data->promoted_mode != BLKmode
2687	  && data->promoted_mode != DECL_MODE (parm))
2688	{
2689	  set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2690	  if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2691	    {
2692	      int offset = subreg_lowpart_offset (DECL_MODE (parm),
2693						  data->promoted_mode);
2694	      if (offset)
2695		set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2696	    }
2697	}
2698    }
2699
2700  boundary = data->locate.boundary;
2701  align = BITS_PER_UNIT;
2702
2703  /* If we're padding upward, we know that the alignment of the slot
2704     is TARGET_FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
2705     intentionally forcing upward padding.  Otherwise we have to come
2706     up with a guess at the alignment based on OFFSET_RTX.  */
2707  if (data->locate.where_pad != downward || data->entry_parm)
2708    align = boundary;
2709  else if (CONST_INT_P (offset_rtx))
2710    {
2711      align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2712      align = align & -align;
2713    }
2714  set_mem_align (stack_parm, align);
2715
2716  if (data->entry_parm)
2717    set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2718
2719  data->stack_parm = stack_parm;
2720}
2721
2722/* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
2723   always valid and contiguous.  */
2724
2725static void
2726assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2727{
2728  rtx entry_parm = data->entry_parm;
2729  rtx stack_parm = data->stack_parm;
2730
2731  /* If this parm was passed part in regs and part in memory, pretend it
2732     arrived entirely in memory by pushing the register-part onto the stack.
2733     In the special case of a DImode or DFmode that is split, we could put
2734     it together in a pseudoreg directly, but for now that's not worth
2735     bothering with.  */
2736  if (data->partial != 0)
2737    {
2738      /* Handle calls that pass values in multiple non-contiguous
2739	 locations.  The Irix 6 ABI has examples of this.  */
2740      if (GET_CODE (entry_parm) == PARALLEL)
2741	emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2742			  data->passed_type,
2743			  int_size_in_bytes (data->passed_type));
2744      else
2745	{
2746	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
2747	  move_block_from_reg (REGNO (entry_parm),
2748			       validize_mem (copy_rtx (stack_parm)),
2749			       data->partial / UNITS_PER_WORD);
2750	}
2751
2752      entry_parm = stack_parm;
2753    }
2754
2755  /* If we didn't decide this parm came in a register, by default it came
2756     on the stack.  */
2757  else if (entry_parm == NULL)
2758    entry_parm = stack_parm;
2759
2760  /* When an argument is passed in multiple locations, we can't make use
2761     of this information, but we can save some copying if the whole argument
2762     is passed in a single register.  */
2763  else if (GET_CODE (entry_parm) == PARALLEL
2764	   && data->nominal_mode != BLKmode
2765	   && data->passed_mode != BLKmode)
2766    {
2767      size_t i, len = XVECLEN (entry_parm, 0);
2768
2769      for (i = 0; i < len; i++)
2770	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2771	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2772	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2773		== data->passed_mode)
2774	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2775	  {
2776	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2777	    break;
2778	  }
2779    }
2780
2781  data->entry_parm = entry_parm;
2782}
2783
2784/* A subroutine of assign_parms.  Reconstitute any values which were
2785   passed in multiple registers and would fit in a single register.  */
2786
2787static void
2788assign_parm_remove_parallels (struct assign_parm_data_one *data)
2789{
2790  rtx entry_parm = data->entry_parm;
2791
2792  /* Convert the PARALLEL to a REG of the same mode as the parallel.
2793     This can be done with register operations rather than on the
2794     stack, even if we will store the reconstituted parameter on the
2795     stack later.  */
2796  if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2797    {
2798      rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2799      emit_group_store (parmreg, entry_parm, data->passed_type,
2800			GET_MODE_SIZE (GET_MODE (entry_parm)));
2801      entry_parm = parmreg;
2802    }
2803
2804  data->entry_parm = entry_parm;
2805}
2806
2807/* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
2808   always valid and properly aligned.  */
2809
2810static void
2811assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2812{
2813  rtx stack_parm = data->stack_parm;
2814
2815  /* If we can't trust the parm stack slot to be aligned enough for its
2816     ultimate type, don't use that slot after entry.  We'll make another
2817     stack slot, if we need one.  */
2818  if (stack_parm
2819      && ((STRICT_ALIGNMENT
2820	   && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2821	  || (data->nominal_type
2822	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2823	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2824    stack_parm = NULL;
2825
2826  /* If parm was passed in memory, and we need to convert it on entry,
2827     don't store it back in that same slot.  */
2828  else if (data->entry_parm == stack_parm
2829	   && data->nominal_mode != BLKmode
2830	   && data->nominal_mode != data->passed_mode)
2831    stack_parm = NULL;
2832
2833  /* If stack protection is in effect for this function, don't leave any
2834     pointers in their passed stack slots.  */
2835  else if (crtl->stack_protect_guard
2836	   && (flag_stack_protect == 2
2837	       || data->passed_pointer
2838	       || POINTER_TYPE_P (data->nominal_type)))
2839    stack_parm = NULL;
2840
2841  data->stack_parm = stack_parm;
2842}
2843
2844/* A subroutine of assign_parms.  Return true if the current parameter
2845   should be stored as a BLKmode in the current frame.  */
2846
2847static bool
2848assign_parm_setup_block_p (struct assign_parm_data_one *data)
2849{
2850  if (data->nominal_mode == BLKmode)
2851    return true;
2852  if (GET_MODE (data->entry_parm) == BLKmode)
2853    return true;
2854
2855#ifdef BLOCK_REG_PADDING
2856  /* Only assign_parm_setup_block knows how to deal with register arguments
2857     that are padded at the least significant end.  */
2858  if (REG_P (data->entry_parm)
2859      && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2860      && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2861	  == (BYTES_BIG_ENDIAN ? upward : downward)))
2862    return true;
2863#endif
2864
2865  return false;
2866}
2867
2868/* A subroutine of assign_parms.  Arrange for the parameter to be
2869   present and valid in DATA->STACK_RTL.  */
2870
2871static void
2872assign_parm_setup_block (struct assign_parm_data_all *all,
2873			 tree parm, struct assign_parm_data_one *data)
2874{
2875  rtx entry_parm = data->entry_parm;
2876  rtx stack_parm = data->stack_parm;
2877  HOST_WIDE_INT size;
2878  HOST_WIDE_INT size_stored;
2879
2880  if (GET_CODE (entry_parm) == PARALLEL)
2881    entry_parm = emit_group_move_into_temps (entry_parm);
2882
2883  size = int_size_in_bytes (data->passed_type);
2884  size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2885  if (stack_parm == 0)
2886    {
2887      DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2888      stack_parm = assign_stack_local (BLKmode, size_stored,
2889				       DECL_ALIGN (parm));
2890      if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2891	PUT_MODE (stack_parm, GET_MODE (entry_parm));
2892      set_mem_attributes (stack_parm, parm, 1);
2893    }
2894
2895  /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
2896     calls that pass values in multiple non-contiguous locations.  */
2897  if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2898    {
2899      rtx mem;
2900
2901      /* Note that we will be storing an integral number of words.
2902	 So we have to be careful to ensure that we allocate an
2903	 integral number of words.  We do this above when we call
2904	 assign_stack_local if space was not allocated in the argument
2905	 list.  If it was, this will not work if PARM_BOUNDARY is not
2906	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
2907	 if it becomes a problem.  Exception is when BLKmode arrives
2908	 with arguments not conforming to word_mode.  */
2909
2910      if (data->stack_parm == 0)
2911	;
2912      else if (GET_CODE (entry_parm) == PARALLEL)
2913	;
2914      else
2915	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2916
2917      mem = validize_mem (copy_rtx (stack_parm));
2918
2919      /* Handle values in multiple non-contiguous locations.  */
2920      if (GET_CODE (entry_parm) == PARALLEL)
2921	{
2922	  push_to_sequence2 (all->first_conversion_insn,
2923			     all->last_conversion_insn);
2924	  emit_group_store (mem, entry_parm, data->passed_type, size);
2925	  all->first_conversion_insn = get_insns ();
2926	  all->last_conversion_insn = get_last_insn ();
2927	  end_sequence ();
2928	}
2929
2930      else if (size == 0)
2931	;
2932
2933      /* If SIZE is that of a mode no bigger than a word, just use
2934	 that mode's store operation.  */
2935      else if (size <= UNITS_PER_WORD)
2936	{
2937	  machine_mode mode
2938	    = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2939
2940	  if (mode != BLKmode
2941#ifdef BLOCK_REG_PADDING
2942	      && (size == UNITS_PER_WORD
2943		  || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2944		      != (BYTES_BIG_ENDIAN ? upward : downward)))
2945#endif
2946	      )
2947	    {
2948	      rtx reg;
2949
2950	      /* We are really truncating a word_mode value containing
2951		 SIZE bytes into a value of mode MODE.  If such an
2952		 operation requires no actual instructions, we can refer
2953		 to the value directly in mode MODE, otherwise we must
2954		 start with the register in word_mode and explicitly
2955		 convert it.  */
2956	      if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2957		reg = gen_rtx_REG (mode, REGNO (entry_parm));
2958	      else
2959		{
2960		  reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2961		  reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2962		}
2963	      emit_move_insn (change_address (mem, mode, 0), reg);
2964	    }
2965
2966	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2967	     machine must be aligned to the left before storing
2968	     to memory.  Note that the previous test doesn't
2969	     handle all cases (e.g. SIZE == 3).  */
2970	  else if (size != UNITS_PER_WORD
2971#ifdef BLOCK_REG_PADDING
2972		   && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2973		       == downward)
2974#else
2975		   && BYTES_BIG_ENDIAN
2976#endif
2977		   )
2978	    {
2979	      rtx tem, x;
2980	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2981	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2982
2983	      x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2984	      tem = change_address (mem, word_mode, 0);
2985	      emit_move_insn (tem, x);
2986	    }
2987	  else
2988	    move_block_from_reg (REGNO (entry_parm), mem,
2989				 size_stored / UNITS_PER_WORD);
2990	}
2991      else
2992	move_block_from_reg (REGNO (entry_parm), mem,
2993			     size_stored / UNITS_PER_WORD);
2994    }
2995  else if (data->stack_parm == 0)
2996    {
2997      push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2998      emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2999		       BLOCK_OP_NORMAL);
3000      all->first_conversion_insn = get_insns ();
3001      all->last_conversion_insn = get_last_insn ();
3002      end_sequence ();
3003    }
3004
3005  data->stack_parm = stack_parm;
3006  SET_DECL_RTL (parm, stack_parm);
3007}
3008
3009/* A subroutine of assign_parms.  Allocate a pseudo to hold the current
3010   parameter.  Get it there.  Perform all ABI specified conversions.  */
3011
3012static void
3013assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3014		       struct assign_parm_data_one *data)
3015{
3016  rtx parmreg, validated_mem;
3017  rtx equiv_stack_parm;
3018  machine_mode promoted_nominal_mode;
3019  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3020  bool did_conversion = false;
3021  bool need_conversion, moved;
3022
3023  /* Store the parm in a pseudoregister during the function, but we may
3024     need to do it in a wider mode.  Using 2 here makes the result
3025     consistent with promote_decl_mode and thus expand_expr_real_1.  */
3026  promoted_nominal_mode
3027    = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3028			     TREE_TYPE (current_function_decl), 2);
3029
3030  parmreg = gen_reg_rtx (promoted_nominal_mode);
3031
3032  if (!DECL_ARTIFICIAL (parm))
3033    mark_user_reg (parmreg);
3034
3035  /* If this was an item that we received a pointer to,
3036     set DECL_RTL appropriately.  */
3037  if (data->passed_pointer)
3038    {
3039      rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3040      set_mem_attributes (x, parm, 1);
3041      SET_DECL_RTL (parm, x);
3042    }
3043  else
3044    SET_DECL_RTL (parm, parmreg);
3045
3046  assign_parm_remove_parallels (data);
3047
3048  /* Copy the value into the register, thus bridging between
3049     assign_parm_find_data_types and expand_expr_real_1.  */
3050
3051  equiv_stack_parm = data->stack_parm;
3052  validated_mem = validize_mem (copy_rtx (data->entry_parm));
3053
3054  need_conversion = (data->nominal_mode != data->passed_mode
3055		     || promoted_nominal_mode != data->promoted_mode);
3056  moved = false;
3057
3058  if (need_conversion
3059      && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3060      && data->nominal_mode == data->passed_mode
3061      && data->nominal_mode == GET_MODE (data->entry_parm))
3062    {
3063      /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3064	 mode, by the caller.  We now have to convert it to
3065	 NOMINAL_MODE, if different.  However, PARMREG may be in
3066	 a different mode than NOMINAL_MODE if it is being stored
3067	 promoted.
3068
3069	 If ENTRY_PARM is a hard register, it might be in a register
3070	 not valid for operating in its mode (e.g., an odd-numbered
3071	 register for a DFmode).  In that case, moves are the only
3072	 thing valid, so we can't do a convert from there.  This
3073	 occurs when the calling sequence allow such misaligned
3074	 usages.
3075
3076	 In addition, the conversion may involve a call, which could
3077	 clobber parameters which haven't been copied to pseudo
3078	 registers yet.
3079
3080	 First, we try to emit an insn which performs the necessary
3081	 conversion.  We verify that this insn does not clobber any
3082	 hard registers.  */
3083
3084      enum insn_code icode;
3085      rtx op0, op1;
3086
3087      icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3088			    unsignedp);
3089
3090      op0 = parmreg;
3091      op1 = validated_mem;
3092      if (icode != CODE_FOR_nothing
3093	  && insn_operand_matches (icode, 0, op0)
3094	  && insn_operand_matches (icode, 1, op1))
3095	{
3096	  enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3097	  rtx_insn *insn, *insns;
3098	  rtx t = op1;
3099	  HARD_REG_SET hardregs;
3100
3101	  start_sequence ();
3102	  /* If op1 is a hard register that is likely spilled, first
3103	     force it into a pseudo, otherwise combiner might extend
3104	     its lifetime too much.  */
3105	  if (GET_CODE (t) == SUBREG)
3106	    t = SUBREG_REG (t);
3107	  if (REG_P (t)
3108	      && HARD_REGISTER_P (t)
3109	      && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3110	      && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3111	    {
3112	      t = gen_reg_rtx (GET_MODE (op1));
3113	      emit_move_insn (t, op1);
3114	    }
3115	  else
3116	    t = op1;
3117	  rtx pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3118				     data->passed_mode, unsignedp);
3119	  emit_insn (pat);
3120	  insns = get_insns ();
3121
3122	  moved = true;
3123	  CLEAR_HARD_REG_SET (hardregs);
3124	  for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3125	    {
3126	      if (INSN_P (insn))
3127		note_stores (PATTERN (insn), record_hard_reg_sets,
3128			     &hardregs);
3129	      if (!hard_reg_set_empty_p (hardregs))
3130		moved = false;
3131	    }
3132
3133	  end_sequence ();
3134
3135	  if (moved)
3136	    {
3137	      emit_insn (insns);
3138	      if (equiv_stack_parm != NULL_RTX)
3139		equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3140						  equiv_stack_parm);
3141	    }
3142	}
3143    }
3144
3145  if (moved)
3146    /* Nothing to do.  */
3147    ;
3148  else if (need_conversion)
3149    {
3150      /* We did not have an insn to convert directly, or the sequence
3151	 generated appeared unsafe.  We must first copy the parm to a
3152	 pseudo reg, and save the conversion until after all
3153	 parameters have been moved.  */
3154
3155      int save_tree_used;
3156      rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3157
3158      emit_move_insn (tempreg, validated_mem);
3159
3160      push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3161      tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3162
3163      if (GET_CODE (tempreg) == SUBREG
3164	  && GET_MODE (tempreg) == data->nominal_mode
3165	  && REG_P (SUBREG_REG (tempreg))
3166	  && data->nominal_mode == data->passed_mode
3167	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3168	  && GET_MODE_SIZE (GET_MODE (tempreg))
3169	     < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3170	{
3171	  /* The argument is already sign/zero extended, so note it
3172	     into the subreg.  */
3173	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3174	  SUBREG_PROMOTED_SET (tempreg, unsignedp);
3175	}
3176
3177      /* TREE_USED gets set erroneously during expand_assignment.  */
3178      save_tree_used = TREE_USED (parm);
3179      expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3180      TREE_USED (parm) = save_tree_used;
3181      all->first_conversion_insn = get_insns ();
3182      all->last_conversion_insn = get_last_insn ();
3183      end_sequence ();
3184
3185      did_conversion = true;
3186    }
3187  else
3188    emit_move_insn (parmreg, validated_mem);
3189
3190  /* If we were passed a pointer but the actual value can safely live
3191     in a register, retrieve it and use it directly.  */
3192  if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3193    {
3194      /* We can't use nominal_mode, because it will have been set to
3195	 Pmode above.  We must use the actual mode of the parm.  */
3196      if (use_register_for_decl (parm))
3197	{
3198	  parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3199	  mark_user_reg (parmreg);
3200	}
3201      else
3202	{
3203	  int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3204					    TYPE_MODE (TREE_TYPE (parm)),
3205					    TYPE_ALIGN (TREE_TYPE (parm)));
3206	  parmreg
3207	    = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3208				  GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3209				  align);
3210	  set_mem_attributes (parmreg, parm, 1);
3211	}
3212
3213      if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3214	{
3215	  rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3216	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3217
3218	  push_to_sequence2 (all->first_conversion_insn,
3219			     all->last_conversion_insn);
3220	  emit_move_insn (tempreg, DECL_RTL (parm));
3221	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3222	  emit_move_insn (parmreg, tempreg);
3223	  all->first_conversion_insn = get_insns ();
3224	  all->last_conversion_insn = get_last_insn ();
3225	  end_sequence ();
3226
3227	  did_conversion = true;
3228	}
3229      else
3230	emit_move_insn (parmreg, DECL_RTL (parm));
3231
3232      SET_DECL_RTL (parm, parmreg);
3233
3234      /* STACK_PARM is the pointer, not the parm, and PARMREG is
3235	 now the parm.  */
3236      data->stack_parm = NULL;
3237    }
3238
3239  /* Mark the register as eliminable if we did no conversion and it was
3240     copied from memory at a fixed offset, and the arg pointer was not
3241     copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
3242     offset formed an invalid address, such memory-equivalences as we
3243     make here would screw up life analysis for it.  */
3244  if (data->nominal_mode == data->passed_mode
3245      && !did_conversion
3246      && data->stack_parm != 0
3247      && MEM_P (data->stack_parm)
3248      && data->locate.offset.var == 0
3249      && reg_mentioned_p (virtual_incoming_args_rtx,
3250			  XEXP (data->stack_parm, 0)))
3251    {
3252      rtx_insn *linsn = get_last_insn ();
3253      rtx_insn *sinsn;
3254      rtx set;
3255
3256      /* Mark complex types separately.  */
3257      if (GET_CODE (parmreg) == CONCAT)
3258	{
3259	  machine_mode submode
3260	    = GET_MODE_INNER (GET_MODE (parmreg));
3261	  int regnor = REGNO (XEXP (parmreg, 0));
3262	  int regnoi = REGNO (XEXP (parmreg, 1));
3263	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3264	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
3265					  GET_MODE_SIZE (submode));
3266
3267	  /* Scan backwards for the set of the real and
3268	     imaginary parts.  */
3269	  for (sinsn = linsn; sinsn != 0;
3270	       sinsn = prev_nonnote_insn (sinsn))
3271	    {
3272	      set = single_set (sinsn);
3273	      if (set == 0)
3274		continue;
3275
3276	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
3277		set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3278	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
3279		set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3280	    }
3281	}
3282      else
3283	set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3284    }
3285
3286  /* For pointer data type, suggest pointer register.  */
3287  if (POINTER_TYPE_P (TREE_TYPE (parm)))
3288    mark_reg_pointer (parmreg,
3289		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3290}
3291
3292/* A subroutine of assign_parms.  Allocate stack space to hold the current
3293   parameter.  Get it there.  Perform all ABI specified conversions.  */
3294
3295static void
3296assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3297		         struct assign_parm_data_one *data)
3298{
3299  /* Value must be stored in the stack slot STACK_PARM during function
3300     execution.  */
3301  bool to_conversion = false;
3302
3303  assign_parm_remove_parallels (data);
3304
3305  if (data->promoted_mode != data->nominal_mode)
3306    {
3307      /* Conversion is required.  */
3308      rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3309
3310      emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3311
3312      push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3313      to_conversion = true;
3314
3315      data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3316					  TYPE_UNSIGNED (TREE_TYPE (parm)));
3317
3318      if (data->stack_parm)
3319	{
3320	  int offset = subreg_lowpart_offset (data->nominal_mode,
3321					      GET_MODE (data->stack_parm));
3322	  /* ??? This may need a big-endian conversion on sparc64.  */
3323	  data->stack_parm
3324	    = adjust_address (data->stack_parm, data->nominal_mode, 0);
3325	  if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3326	    set_mem_offset (data->stack_parm,
3327			    MEM_OFFSET (data->stack_parm) + offset);
3328	}
3329    }
3330
3331  if (data->entry_parm != data->stack_parm)
3332    {
3333      rtx src, dest;
3334
3335      if (data->stack_parm == 0)
3336	{
3337	  int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3338					    GET_MODE (data->entry_parm),
3339					    TYPE_ALIGN (data->passed_type));
3340	  data->stack_parm
3341	    = assign_stack_local (GET_MODE (data->entry_parm),
3342				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3343				  align);
3344	  set_mem_attributes (data->stack_parm, parm, 1);
3345	}
3346
3347      dest = validize_mem (copy_rtx (data->stack_parm));
3348      src = validize_mem (copy_rtx (data->entry_parm));
3349
3350      if (MEM_P (src))
3351	{
3352	  /* Use a block move to handle potentially misaligned entry_parm.  */
3353	  if (!to_conversion)
3354	    push_to_sequence2 (all->first_conversion_insn,
3355			       all->last_conversion_insn);
3356	  to_conversion = true;
3357
3358	  emit_block_move (dest, src,
3359			   GEN_INT (int_size_in_bytes (data->passed_type)),
3360			   BLOCK_OP_NORMAL);
3361	}
3362      else
3363	emit_move_insn (dest, src);
3364    }
3365
3366  if (to_conversion)
3367    {
3368      all->first_conversion_insn = get_insns ();
3369      all->last_conversion_insn = get_last_insn ();
3370      end_sequence ();
3371    }
3372
3373  SET_DECL_RTL (parm, data->stack_parm);
3374}
3375
3376/* A subroutine of assign_parms.  If the ABI splits complex arguments, then
3377   undo the frobbing that we did in assign_parms_augmented_arg_list.  */
3378
3379static void
3380assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3381			      vec<tree> fnargs)
3382{
3383  tree parm;
3384  tree orig_fnargs = all->orig_fnargs;
3385  unsigned i = 0;
3386
3387  for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3388    {
3389      if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3390	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3391	{
3392	  rtx tmp, real, imag;
3393	  machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3394
3395	  real = DECL_RTL (fnargs[i]);
3396	  imag = DECL_RTL (fnargs[i + 1]);
3397	  if (inner != GET_MODE (real))
3398	    {
3399	      real = gen_lowpart_SUBREG (inner, real);
3400	      imag = gen_lowpart_SUBREG (inner, imag);
3401	    }
3402
3403	  if (TREE_ADDRESSABLE (parm))
3404	    {
3405	      rtx rmem, imem;
3406	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3407	      int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3408						DECL_MODE (parm),
3409						TYPE_ALIGN (TREE_TYPE (parm)));
3410
3411	      /* split_complex_arg put the real and imag parts in
3412		 pseudos.  Move them to memory.  */
3413	      tmp = assign_stack_local (DECL_MODE (parm), size, align);
3414	      set_mem_attributes (tmp, parm, 1);
3415	      rmem = adjust_address_nv (tmp, inner, 0);
3416	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3417	      push_to_sequence2 (all->first_conversion_insn,
3418				 all->last_conversion_insn);
3419	      emit_move_insn (rmem, real);
3420	      emit_move_insn (imem, imag);
3421	      all->first_conversion_insn = get_insns ();
3422	      all->last_conversion_insn = get_last_insn ();
3423	      end_sequence ();
3424	    }
3425	  else
3426	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3427	  SET_DECL_RTL (parm, tmp);
3428
3429	  real = DECL_INCOMING_RTL (fnargs[i]);
3430	  imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3431	  if (inner != GET_MODE (real))
3432	    {
3433	      real = gen_lowpart_SUBREG (inner, real);
3434	      imag = gen_lowpart_SUBREG (inner, imag);
3435	    }
3436	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3437	  set_decl_incoming_rtl (parm, tmp, false);
3438	  i++;
3439	}
3440    }
3441}
3442
3443/* Load bounds of PARM from bounds table.  */
3444static void
3445assign_parm_load_bounds (struct assign_parm_data_one *data,
3446			 tree parm,
3447			 rtx entry,
3448			 unsigned bound_no)
3449{
3450  bitmap_iterator bi;
3451  unsigned i, offs = 0;
3452  int bnd_no = -1;
3453  rtx slot = NULL, ptr = NULL;
3454
3455  if (parm)
3456    {
3457      bitmap slots;
3458      bitmap_obstack_initialize (NULL);
3459      slots = BITMAP_ALLOC (NULL);
3460      chkp_find_bound_slots (TREE_TYPE (parm), slots);
3461      EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3462	{
3463	  if (bound_no)
3464	    bound_no--;
3465	  else
3466	    {
3467	      bnd_no = i;
3468	      break;
3469	    }
3470	}
3471      BITMAP_FREE (slots);
3472      bitmap_obstack_release (NULL);
3473    }
3474
3475  /* We may have bounds not associated with any pointer.  */
3476  if (bnd_no != -1)
3477    offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3478
3479  /* Find associated pointer.  */
3480  if (bnd_no == -1)
3481    {
3482      /* If bounds are not associated with any bounds,
3483	 then it is passed in a register or special slot.  */
3484      gcc_assert (data->entry_parm);
3485      ptr = const0_rtx;
3486    }
3487  else if (MEM_P (entry))
3488    slot = adjust_address (entry, Pmode, offs);
3489  else if (REG_P (entry))
3490    ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3491  else if (GET_CODE (entry) == PARALLEL)
3492    ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3493  else
3494    gcc_unreachable ();
3495  data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3496							data->entry_parm);
3497}
3498
3499/* Assign RTL expressions to the function's bounds parameters BNDARGS.  */
3500
3501static void
3502assign_bounds (vec<bounds_parm_data> &bndargs,
3503	       struct assign_parm_data_all &all,
3504	       bool assign_regs, bool assign_special,
3505	       bool assign_bt)
3506{
3507  unsigned i, pass;
3508  bounds_parm_data *pbdata;
3509
3510  if (!bndargs.exists ())
3511    return;
3512
3513  /* We make few passes to store input bounds.  Firstly handle bounds
3514     passed in registers.  After that we load bounds passed in special
3515     slots.  Finally we load bounds from Bounds Table.  */
3516  for (pass = 0; pass < 3; pass++)
3517    FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3518      {
3519	/* Pass 0 => regs only.  */
3520	if (pass == 0
3521	    && (!assign_regs
3522		||(!pbdata->parm_data.entry_parm
3523		   || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
3524	  continue;
3525	/* Pass 1 => slots only.  */
3526	else if (pass == 1
3527		 && (!assign_special
3528		     || (!pbdata->parm_data.entry_parm
3529			 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
3530	  continue;
3531	/* Pass 2 => BT only.  */
3532	else if (pass == 2
3533		 && (!assign_bt
3534		     || pbdata->parm_data.entry_parm))
3535	  continue;
3536
3537	if (!pbdata->parm_data.entry_parm
3538	    || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3539	  assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3540				   pbdata->ptr_entry, pbdata->bound_no);
3541
3542	set_decl_incoming_rtl (pbdata->bounds_parm,
3543			       pbdata->parm_data.entry_parm, false);
3544
3545	if (assign_parm_setup_block_p (&pbdata->parm_data))
3546	  assign_parm_setup_block (&all, pbdata->bounds_parm,
3547				   &pbdata->parm_data);
3548	else if (pbdata->parm_data.passed_pointer
3549		 || use_register_for_decl (pbdata->bounds_parm))
3550	  assign_parm_setup_reg (&all, pbdata->bounds_parm,
3551				 &pbdata->parm_data);
3552	else
3553	  assign_parm_setup_stack (&all, pbdata->bounds_parm,
3554				   &pbdata->parm_data);
3555      }
3556}
3557
3558/* Assign RTL expressions to the function's parameters.  This may involve
3559   copying them into registers and using those registers as the DECL_RTL.  */
3560
3561static void
3562assign_parms (tree fndecl)
3563{
3564  struct assign_parm_data_all all;
3565  tree parm;
3566  vec<tree> fnargs;
3567  unsigned i, bound_no = 0;
3568  tree last_arg = NULL;
3569  rtx last_arg_entry = NULL;
3570  vec<bounds_parm_data> bndargs = vNULL;
3571  bounds_parm_data bdata;
3572
3573  crtl->args.internal_arg_pointer
3574    = targetm.calls.internal_arg_pointer ();
3575
3576  assign_parms_initialize_all (&all);
3577  fnargs = assign_parms_augmented_arg_list (&all);
3578
3579  FOR_EACH_VEC_ELT (fnargs, i, parm)
3580    {
3581      struct assign_parm_data_one data;
3582
3583      /* Extract the type of PARM; adjust it according to ABI.  */
3584      assign_parm_find_data_types (&all, parm, &data);
3585
3586      /* Early out for errors and void parameters.  */
3587      if (data.passed_mode == VOIDmode)
3588	{
3589	  SET_DECL_RTL (parm, const0_rtx);
3590	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3591	  continue;
3592	}
3593
3594      /* Estimate stack alignment from parameter alignment.  */
3595      if (SUPPORTS_STACK_ALIGNMENT)
3596        {
3597          unsigned int align
3598	    = targetm.calls.function_arg_boundary (data.promoted_mode,
3599						   data.passed_type);
3600	  align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3601				     align);
3602	  if (TYPE_ALIGN (data.nominal_type) > align)
3603	    align = MINIMUM_ALIGNMENT (data.nominal_type,
3604				       TYPE_MODE (data.nominal_type),
3605				       TYPE_ALIGN (data.nominal_type));
3606	  if (crtl->stack_alignment_estimated < align)
3607	    {
3608	      gcc_assert (!crtl->stack_realign_processed);
3609	      crtl->stack_alignment_estimated = align;
3610	    }
3611	}
3612
3613      /* Find out where the parameter arrives in this function.  */
3614      assign_parm_find_entry_rtl (&all, &data);
3615
3616      /* Find out where stack space for this parameter might be.  */
3617      if (assign_parm_is_stack_parm (&all, &data))
3618	{
3619	  assign_parm_find_stack_rtl (parm, &data);
3620	  assign_parm_adjust_entry_rtl (&data);
3621	}
3622      if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3623	{
3624	  /* Remember where last non bounds arg was passed in case
3625	     we have to load associated bounds for it from Bounds
3626	     Table.  */
3627	  last_arg = parm;
3628	  last_arg_entry = data.entry_parm;
3629	  bound_no = 0;
3630	}
3631      /* Record permanently how this parm was passed.  */
3632      if (data.passed_pointer)
3633	{
3634	  rtx incoming_rtl
3635	    = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3636			   data.entry_parm);
3637	  set_decl_incoming_rtl (parm, incoming_rtl, true);
3638	}
3639      else
3640	set_decl_incoming_rtl (parm, data.entry_parm, false);
3641
3642      /* Boudns should be loaded in the particular order to
3643	 have registers allocated correctly.  Collect info about
3644	 input bounds and load them later.  */
3645      if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3646	{
3647	  /* Expect bounds in instrumented functions only.  */
3648	  gcc_assert (chkp_function_instrumented_p (fndecl));
3649
3650	  bdata.parm_data = data;
3651	  bdata.bounds_parm = parm;
3652	  bdata.ptr_parm = last_arg;
3653	  bdata.ptr_entry = last_arg_entry;
3654	  bdata.bound_no = bound_no;
3655	  bndargs.safe_push (bdata);
3656	}
3657      else
3658	{
3659	  assign_parm_adjust_stack_rtl (&data);
3660
3661	  if (assign_parm_setup_block_p (&data))
3662	    assign_parm_setup_block (&all, parm, &data);
3663	  else if (data.passed_pointer || use_register_for_decl (parm))
3664	    assign_parm_setup_reg (&all, parm, &data);
3665	  else
3666	    assign_parm_setup_stack (&all, parm, &data);
3667	}
3668
3669      if (cfun->stdarg && !DECL_CHAIN (parm))
3670	{
3671	  int pretend_bytes = 0;
3672
3673	  assign_parms_setup_varargs (&all, &data, false);
3674
3675	  if (chkp_function_instrumented_p (fndecl))
3676	    {
3677	      /* We expect this is the last parm.  Otherwise it is wrong
3678		 to assign bounds right now.  */
3679	      gcc_assert (i == (fnargs.length () - 1));
3680	      assign_bounds (bndargs, all, true, false, false);
3681	      targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3682							  data.promoted_mode,
3683							  data.passed_type,
3684							  &pretend_bytes,
3685							  false);
3686	      assign_bounds (bndargs, all, false, true, true);
3687	      bndargs.release ();
3688	    }
3689	}
3690
3691      /* Update info on where next arg arrives in registers.  */
3692      targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3693					  data.passed_type, data.named_arg);
3694
3695      if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3696	bound_no++;
3697    }
3698
3699  assign_bounds (bndargs, all, true, true, true);
3700  bndargs.release ();
3701
3702  if (targetm.calls.split_complex_arg)
3703    assign_parms_unsplit_complex (&all, fnargs);
3704
3705  fnargs.release ();
3706
3707  /* Output all parameter conversion instructions (possibly including calls)
3708     now that all parameters have been copied out of hard registers.  */
3709  emit_insn (all.first_conversion_insn);
3710
3711  /* Estimate reload stack alignment from scalar return mode.  */
3712  if (SUPPORTS_STACK_ALIGNMENT)
3713    {
3714      if (DECL_RESULT (fndecl))
3715	{
3716	  tree type = TREE_TYPE (DECL_RESULT (fndecl));
3717	  machine_mode mode = TYPE_MODE (type);
3718
3719	  if (mode != BLKmode
3720	      && mode != VOIDmode
3721	      && !AGGREGATE_TYPE_P (type))
3722	    {
3723	      unsigned int align = GET_MODE_ALIGNMENT (mode);
3724	      if (crtl->stack_alignment_estimated < align)
3725		{
3726		  gcc_assert (!crtl->stack_realign_processed);
3727		  crtl->stack_alignment_estimated = align;
3728		}
3729	    }
3730	}
3731    }
3732
3733  /* If we are receiving a struct value address as the first argument, set up
3734     the RTL for the function result. As this might require code to convert
3735     the transmitted address to Pmode, we do this here to ensure that possible
3736     preliminary conversions of the address have been emitted already.  */
3737  if (all.function_result_decl)
3738    {
3739      tree result = DECL_RESULT (current_function_decl);
3740      rtx addr = DECL_RTL (all.function_result_decl);
3741      rtx x;
3742
3743      if (DECL_BY_REFERENCE (result))
3744	{
3745	  SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3746	  x = addr;
3747	}
3748      else
3749	{
3750	  SET_DECL_VALUE_EXPR (result,
3751			       build1 (INDIRECT_REF, TREE_TYPE (result),
3752				       all.function_result_decl));
3753	  addr = convert_memory_address (Pmode, addr);
3754	  x = gen_rtx_MEM (DECL_MODE (result), addr);
3755	  set_mem_attributes (x, result, 1);
3756	}
3757
3758      DECL_HAS_VALUE_EXPR_P (result) = 1;
3759
3760      SET_DECL_RTL (result, x);
3761    }
3762
3763  /* We have aligned all the args, so add space for the pretend args.  */
3764  crtl->args.pretend_args_size = all.pretend_args_size;
3765  all.stack_args_size.constant += all.extra_pretend_bytes;
3766  crtl->args.size = all.stack_args_size.constant;
3767
3768  /* Adjust function incoming argument size for alignment and
3769     minimum length.  */
3770
3771  crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3772  crtl->args.size = CEIL_ROUND (crtl->args.size,
3773					   PARM_BOUNDARY / BITS_PER_UNIT);
3774
3775#ifdef ARGS_GROW_DOWNWARD
3776  crtl->args.arg_offset_rtx
3777    = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3778       : expand_expr (size_diffop (all.stack_args_size.var,
3779				   size_int (-all.stack_args_size.constant)),
3780		      NULL_RTX, VOIDmode, EXPAND_NORMAL));
3781#else
3782  crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3783#endif
3784
3785  /* See how many bytes, if any, of its args a function should try to pop
3786     on return.  */
3787
3788  crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3789							 TREE_TYPE (fndecl),
3790							 crtl->args.size);
3791
3792  /* For stdarg.h function, save info about
3793     regs and stack space used by the named args.  */
3794
3795  crtl->args.info = all.args_so_far_v;
3796
3797  /* Set the rtx used for the function return value.  Put this in its
3798     own variable so any optimizers that need this information don't have
3799     to include tree.h.  Do this here so it gets done when an inlined
3800     function gets output.  */
3801
3802  crtl->return_rtx
3803    = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3804       ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3805
3806  /* If scalar return value was computed in a pseudo-reg, or was a named
3807     return value that got dumped to the stack, copy that to the hard
3808     return register.  */
3809  if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3810    {
3811      tree decl_result = DECL_RESULT (fndecl);
3812      rtx decl_rtl = DECL_RTL (decl_result);
3813
3814      if (REG_P (decl_rtl)
3815	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3816	  : DECL_REGISTER (decl_result))
3817	{
3818	  rtx real_decl_rtl;
3819
3820	  real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3821							fndecl, true);
3822	  if (chkp_function_instrumented_p (fndecl))
3823	    crtl->return_bnd
3824	      = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3825							  fndecl, true);
3826	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3827	  /* The delay slot scheduler assumes that crtl->return_rtx
3828	     holds the hard register containing the return value, not a
3829	     temporary pseudo.  */
3830	  crtl->return_rtx = real_decl_rtl;
3831	}
3832    }
3833}
3834
3835/* A subroutine of gimplify_parameters, invoked via walk_tree.
3836   For all seen types, gimplify their sizes.  */
3837
3838static tree
3839gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3840{
3841  tree t = *tp;
3842
3843  *walk_subtrees = 0;
3844  if (TYPE_P (t))
3845    {
3846      if (POINTER_TYPE_P (t))
3847	*walk_subtrees = 1;
3848      else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3849	       && !TYPE_SIZES_GIMPLIFIED (t))
3850	{
3851	  gimplify_type_sizes (t, (gimple_seq *) data);
3852	  *walk_subtrees = 1;
3853	}
3854    }
3855
3856  return NULL;
3857}
3858
3859/* Gimplify the parameter list for current_function_decl.  This involves
3860   evaluating SAVE_EXPRs of variable sized parameters and generating code
3861   to implement callee-copies reference parameters.  Returns a sequence of
3862   statements to add to the beginning of the function.  */
3863
3864gimple_seq
3865gimplify_parameters (void)
3866{
3867  struct assign_parm_data_all all;
3868  tree parm;
3869  gimple_seq stmts = NULL;
3870  vec<tree> fnargs;
3871  unsigned i;
3872
3873  assign_parms_initialize_all (&all);
3874  fnargs = assign_parms_augmented_arg_list (&all);
3875
3876  FOR_EACH_VEC_ELT (fnargs, i, parm)
3877    {
3878      struct assign_parm_data_one data;
3879
3880      /* Extract the type of PARM; adjust it according to ABI.  */
3881      assign_parm_find_data_types (&all, parm, &data);
3882
3883      /* Early out for errors and void parameters.  */
3884      if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3885	continue;
3886
3887      /* Update info on where next arg arrives in registers.  */
3888      targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3889					  data.passed_type, data.named_arg);
3890
3891      /* ??? Once upon a time variable_size stuffed parameter list
3892	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
3893	 turned out to be less than manageable in the gimple world.
3894	 Now we have to hunt them down ourselves.  */
3895      walk_tree_without_duplicates (&data.passed_type,
3896				    gimplify_parm_type, &stmts);
3897
3898      if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3899	{
3900	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3901	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3902	}
3903
3904      if (data.passed_pointer)
3905	{
3906          tree type = TREE_TYPE (data.passed_type);
3907	  if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3908				       type, data.named_arg))
3909	    {
3910	      tree local, t;
3911
3912	      /* For constant-sized objects, this is trivial; for
3913		 variable-sized objects, we have to play games.  */
3914	      if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3915		  && !(flag_stack_check == GENERIC_STACK_CHECK
3916		       && compare_tree_int (DECL_SIZE_UNIT (parm),
3917					    STACK_CHECK_MAX_VAR_SIZE) > 0))
3918		{
3919		  local = create_tmp_var (type, get_name (parm));
3920		  DECL_IGNORED_P (local) = 0;
3921		  /* If PARM was addressable, move that flag over
3922		     to the local copy, as its address will be taken,
3923		     not the PARMs.  Keep the parms address taken
3924		     as we'll query that flag during gimplification.  */
3925		  if (TREE_ADDRESSABLE (parm))
3926		    TREE_ADDRESSABLE (local) = 1;
3927		  else if (TREE_CODE (type) == COMPLEX_TYPE
3928			   || TREE_CODE (type) == VECTOR_TYPE)
3929		    DECL_GIMPLE_REG_P (local) = 1;
3930		}
3931	      else
3932		{
3933		  tree ptr_type, addr;
3934
3935		  ptr_type = build_pointer_type (type);
3936		  addr = create_tmp_reg (ptr_type, get_name (parm));
3937		  DECL_IGNORED_P (addr) = 0;
3938		  local = build_fold_indirect_ref (addr);
3939
3940		  t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3941		  t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3942				       size_int (DECL_ALIGN (parm)));
3943
3944		  /* The call has been built for a variable-sized object.  */
3945		  CALL_ALLOCA_FOR_VAR_P (t) = 1;
3946		  t = fold_convert (ptr_type, t);
3947		  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3948		  gimplify_and_add (t, &stmts);
3949		}
3950
3951	      gimplify_assign (local, parm, &stmts);
3952
3953	      SET_DECL_VALUE_EXPR (parm, local);
3954	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
3955	    }
3956	}
3957    }
3958
3959  fnargs.release ();
3960
3961  return stmts;
3962}
3963
3964/* Compute the size and offset from the start of the stacked arguments for a
3965   parm passed in mode PASSED_MODE and with type TYPE.
3966
3967   INITIAL_OFFSET_PTR points to the current offset into the stacked
3968   arguments.
3969
3970   The starting offset and size for this parm are returned in
3971   LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
3972   nonzero, the offset is that of stack slot, which is returned in
3973   LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
3974   padding required from the initial offset ptr to the stack slot.
3975
3976   IN_REGS is nonzero if the argument will be passed in registers.  It will
3977   never be set if REG_PARM_STACK_SPACE is not defined.
3978
3979   REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3980   for arguments which are passed in registers.
3981
3982   FNDECL is the function in which the argument was defined.
3983
3984   There are two types of rounding that are done.  The first, controlled by
3985   TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3986   argument list to be aligned to the specific boundary (in bits).  This
3987   rounding affects the initial and starting offsets, but not the argument
3988   size.
3989
3990   The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3991   optionally rounds the size of the parm to PARM_BOUNDARY.  The
3992   initial offset is not affected by this rounding, while the size always
3993   is and the starting offset may be.  */
3994
3995/*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3996    INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3997    callers pass in the total size of args so far as
3998    INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
3999
4000void
4001locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4002		     int reg_parm_stack_space, int partial,
4003		     tree fndecl ATTRIBUTE_UNUSED,
4004		     struct args_size *initial_offset_ptr,
4005		     struct locate_and_pad_arg_data *locate)
4006{
4007  tree sizetree;
4008  enum direction where_pad;
4009  unsigned int boundary, round_boundary;
4010  int part_size_in_regs;
4011
4012  /* If we have found a stack parm before we reach the end of the
4013     area reserved for registers, skip that area.  */
4014  if (! in_regs)
4015    {
4016      if (reg_parm_stack_space > 0)
4017	{
4018	  if (initial_offset_ptr->var)
4019	    {
4020	      initial_offset_ptr->var
4021		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4022			      ssize_int (reg_parm_stack_space));
4023	      initial_offset_ptr->constant = 0;
4024	    }
4025	  else if (initial_offset_ptr->constant < reg_parm_stack_space)
4026	    initial_offset_ptr->constant = reg_parm_stack_space;
4027	}
4028    }
4029
4030  part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4031
4032  sizetree
4033    = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4034  where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4035  boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4036  round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4037							      type);
4038  locate->where_pad = where_pad;
4039
4040  /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT.  */
4041  if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4042    boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4043
4044  locate->boundary = boundary;
4045
4046  if (SUPPORTS_STACK_ALIGNMENT)
4047    {
4048      /* stack_alignment_estimated can't change after stack has been
4049	 realigned.  */
4050      if (crtl->stack_alignment_estimated < boundary)
4051        {
4052          if (!crtl->stack_realign_processed)
4053	    crtl->stack_alignment_estimated = boundary;
4054	  else
4055	    {
4056	      /* If stack is realigned and stack alignment value
4057		 hasn't been finalized, it is OK not to increase
4058		 stack_alignment_estimated.  The bigger alignment
4059		 requirement is recorded in stack_alignment_needed
4060		 below.  */
4061	      gcc_assert (!crtl->stack_realign_finalized
4062			  && crtl->stack_realign_needed);
4063	    }
4064	}
4065    }
4066
4067  /* Remember if the outgoing parameter requires extra alignment on the
4068     calling function side.  */
4069  if (crtl->stack_alignment_needed < boundary)
4070    crtl->stack_alignment_needed = boundary;
4071  if (crtl->preferred_stack_boundary < boundary)
4072    crtl->preferred_stack_boundary = boundary;
4073
4074#ifdef ARGS_GROW_DOWNWARD
4075  locate->slot_offset.constant = -initial_offset_ptr->constant;
4076  if (initial_offset_ptr->var)
4077    locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4078					  initial_offset_ptr->var);
4079
4080  {
4081    tree s2 = sizetree;
4082    if (where_pad != none
4083	&& (!tree_fits_uhwi_p (sizetree)
4084	    || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4085      s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4086    SUB_PARM_SIZE (locate->slot_offset, s2);
4087  }
4088
4089  locate->slot_offset.constant += part_size_in_regs;
4090
4091  if (!in_regs || reg_parm_stack_space > 0)
4092    pad_to_arg_alignment (&locate->slot_offset, boundary,
4093			  &locate->alignment_pad);
4094
4095  locate->size.constant = (-initial_offset_ptr->constant
4096			   - locate->slot_offset.constant);
4097  if (initial_offset_ptr->var)
4098    locate->size.var = size_binop (MINUS_EXPR,
4099				   size_binop (MINUS_EXPR,
4100					       ssize_int (0),
4101					       initial_offset_ptr->var),
4102				   locate->slot_offset.var);
4103
4104  /* Pad_below needs the pre-rounded size to know how much to pad
4105     below.  */
4106  locate->offset = locate->slot_offset;
4107  if (where_pad == downward)
4108    pad_below (&locate->offset, passed_mode, sizetree);
4109
4110#else /* !ARGS_GROW_DOWNWARD */
4111  if (!in_regs || reg_parm_stack_space > 0)
4112    pad_to_arg_alignment (initial_offset_ptr, boundary,
4113			  &locate->alignment_pad);
4114  locate->slot_offset = *initial_offset_ptr;
4115
4116#ifdef PUSH_ROUNDING
4117  if (passed_mode != BLKmode)
4118    sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4119#endif
4120
4121  /* Pad_below needs the pre-rounded size to know how much to pad below
4122     so this must be done before rounding up.  */
4123  locate->offset = locate->slot_offset;
4124  if (where_pad == downward)
4125    pad_below (&locate->offset, passed_mode, sizetree);
4126
4127  if (where_pad != none
4128      && (!tree_fits_uhwi_p (sizetree)
4129	  || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4130    sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4131
4132  ADD_PARM_SIZE (locate->size, sizetree);
4133
4134  locate->size.constant -= part_size_in_regs;
4135#endif /* ARGS_GROW_DOWNWARD */
4136
4137#ifdef FUNCTION_ARG_OFFSET
4138  locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4139#endif
4140}
4141
4142/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4143   BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
4144
4145static void
4146pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4147		      struct args_size *alignment_pad)
4148{
4149  tree save_var = NULL_TREE;
4150  HOST_WIDE_INT save_constant = 0;
4151  int boundary_in_bytes = boundary / BITS_PER_UNIT;
4152  HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4153
4154#ifdef SPARC_STACK_BOUNDARY_HACK
4155  /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4156     the real alignment of %sp.  However, when it does this, the
4157     alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
4158  if (SPARC_STACK_BOUNDARY_HACK)
4159    sp_offset = 0;
4160#endif
4161
4162  if (boundary > PARM_BOUNDARY)
4163    {
4164      save_var = offset_ptr->var;
4165      save_constant = offset_ptr->constant;
4166    }
4167
4168  alignment_pad->var = NULL_TREE;
4169  alignment_pad->constant = 0;
4170
4171  if (boundary > BITS_PER_UNIT)
4172    {
4173      if (offset_ptr->var)
4174	{
4175	  tree sp_offset_tree = ssize_int (sp_offset);
4176	  tree offset = size_binop (PLUS_EXPR,
4177				    ARGS_SIZE_TREE (*offset_ptr),
4178				    sp_offset_tree);
4179#ifdef ARGS_GROW_DOWNWARD
4180	  tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
4181#else
4182	  tree rounded = round_up   (offset, boundary / BITS_PER_UNIT);
4183#endif
4184
4185	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4186	  /* ARGS_SIZE_TREE includes constant term.  */
4187	  offset_ptr->constant = 0;
4188	  if (boundary > PARM_BOUNDARY)
4189	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4190					     save_var);
4191	}
4192      else
4193	{
4194	  offset_ptr->constant = -sp_offset +
4195#ifdef ARGS_GROW_DOWNWARD
4196	    FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
4197#else
4198	    CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
4199#endif
4200	    if (boundary > PARM_BOUNDARY)
4201	      alignment_pad->constant = offset_ptr->constant - save_constant;
4202	}
4203    }
4204}
4205
4206static void
4207pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4208{
4209  if (passed_mode != BLKmode)
4210    {
4211      if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4212	offset_ptr->constant
4213	  += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4214	       / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4215	      - GET_MODE_SIZE (passed_mode));
4216    }
4217  else
4218    {
4219      if (TREE_CODE (sizetree) != INTEGER_CST
4220	  || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4221	{
4222	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
4223	  tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4224	  /* Add it in.  */
4225	  ADD_PARM_SIZE (*offset_ptr, s2);
4226	  SUB_PARM_SIZE (*offset_ptr, sizetree);
4227	}
4228    }
4229}
4230
4231
4232/* True if register REGNO was alive at a place where `setjmp' was
4233   called and was set more than once or is an argument.  Such regs may
4234   be clobbered by `longjmp'.  */
4235
4236static bool
4237regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4238{
4239  /* There appear to be cases where some local vars never reach the
4240     backend but have bogus regnos.  */
4241  if (regno >= max_reg_num ())
4242    return false;
4243
4244  return ((REG_N_SETS (regno) > 1
4245	   || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4246			       regno))
4247	  && REGNO_REG_SET_P (setjmp_crosses, regno));
4248}
4249
4250/* Walk the tree of blocks describing the binding levels within a
4251   function and warn about variables the might be killed by setjmp or
4252   vfork.  This is done after calling flow_analysis before register
4253   allocation since that will clobber the pseudo-regs to hard
4254   regs.  */
4255
4256static void
4257setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4258{
4259  tree decl, sub;
4260
4261  for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4262    {
4263      if (TREE_CODE (decl) == VAR_DECL
4264	  && DECL_RTL_SET_P (decl)
4265	  && REG_P (DECL_RTL (decl))
4266	  && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4267	warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4268                 " %<longjmp%> or %<vfork%>", decl);
4269    }
4270
4271  for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4272    setjmp_vars_warning (setjmp_crosses, sub);
4273}
4274
4275/* Do the appropriate part of setjmp_vars_warning
4276   but for arguments instead of local variables.  */
4277
4278static void
4279setjmp_args_warning (bitmap setjmp_crosses)
4280{
4281  tree decl;
4282  for (decl = DECL_ARGUMENTS (current_function_decl);
4283       decl; decl = DECL_CHAIN (decl))
4284    if (DECL_RTL (decl) != 0
4285	&& REG_P (DECL_RTL (decl))
4286	&& regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4287      warning (OPT_Wclobbered,
4288               "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4289	       decl);
4290}
4291
4292/* Generate warning messages for variables live across setjmp.  */
4293
4294void
4295generate_setjmp_warnings (void)
4296{
4297  bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4298
4299  if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4300      || bitmap_empty_p (setjmp_crosses))
4301    return;
4302
4303  setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4304  setjmp_args_warning (setjmp_crosses);
4305}
4306
4307
4308/* Reverse the order of elements in the fragment chain T of blocks,
4309   and return the new head of the chain (old last element).
4310   In addition to that clear BLOCK_SAME_RANGE flags when needed
4311   and adjust BLOCK_SUPERCONTEXT from the super fragment to
4312   its super fragment origin.  */
4313
4314static tree
4315block_fragments_nreverse (tree t)
4316{
4317  tree prev = 0, block, next, prev_super = 0;
4318  tree super = BLOCK_SUPERCONTEXT (t);
4319  if (BLOCK_FRAGMENT_ORIGIN (super))
4320    super = BLOCK_FRAGMENT_ORIGIN (super);
4321  for (block = t; block; block = next)
4322    {
4323      next = BLOCK_FRAGMENT_CHAIN (block);
4324      BLOCK_FRAGMENT_CHAIN (block) = prev;
4325      if ((prev && !BLOCK_SAME_RANGE (prev))
4326	  || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4327	      != prev_super))
4328	BLOCK_SAME_RANGE (block) = 0;
4329      prev_super = BLOCK_SUPERCONTEXT (block);
4330      BLOCK_SUPERCONTEXT (block) = super;
4331      prev = block;
4332    }
4333  t = BLOCK_FRAGMENT_ORIGIN (t);
4334  if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4335      != prev_super)
4336    BLOCK_SAME_RANGE (t) = 0;
4337  BLOCK_SUPERCONTEXT (t) = super;
4338  return prev;
4339}
4340
4341/* Reverse the order of elements in the chain T of blocks,
4342   and return the new head of the chain (old last element).
4343   Also do the same on subblocks and reverse the order of elements
4344   in BLOCK_FRAGMENT_CHAIN as well.  */
4345
4346static tree
4347blocks_nreverse_all (tree t)
4348{
4349  tree prev = 0, block, next;
4350  for (block = t; block; block = next)
4351    {
4352      next = BLOCK_CHAIN (block);
4353      BLOCK_CHAIN (block) = prev;
4354      if (BLOCK_FRAGMENT_CHAIN (block)
4355	  && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4356	{
4357	  BLOCK_FRAGMENT_CHAIN (block)
4358	    = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4359	  if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4360	    BLOCK_SAME_RANGE (block) = 0;
4361	}
4362      BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4363      prev = block;
4364    }
4365  return prev;
4366}
4367
4368
4369/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4370   and create duplicate blocks.  */
4371/* ??? Need an option to either create block fragments or to create
4372   abstract origin duplicates of a source block.  It really depends
4373   on what optimization has been performed.  */
4374
4375void
4376reorder_blocks (void)
4377{
4378  tree block = DECL_INITIAL (current_function_decl);
4379
4380  if (block == NULL_TREE)
4381    return;
4382
4383  auto_vec<tree, 10> block_stack;
4384
4385  /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
4386  clear_block_marks (block);
4387
4388  /* Prune the old trees away, so that they don't get in the way.  */
4389  BLOCK_SUBBLOCKS (block) = NULL_TREE;
4390  BLOCK_CHAIN (block) = NULL_TREE;
4391
4392  /* Recreate the block tree from the note nesting.  */
4393  reorder_blocks_1 (get_insns (), block, &block_stack);
4394  BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4395}
4396
4397/* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
4398
4399void
4400clear_block_marks (tree block)
4401{
4402  while (block)
4403    {
4404      TREE_ASM_WRITTEN (block) = 0;
4405      clear_block_marks (BLOCK_SUBBLOCKS (block));
4406      block = BLOCK_CHAIN (block);
4407    }
4408}
4409
4410static void
4411reorder_blocks_1 (rtx_insn *insns, tree current_block,
4412		  vec<tree> *p_block_stack)
4413{
4414  rtx_insn *insn;
4415  tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4416
4417  for (insn = insns; insn; insn = NEXT_INSN (insn))
4418    {
4419      if (NOTE_P (insn))
4420	{
4421	  if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4422	    {
4423	      tree block = NOTE_BLOCK (insn);
4424	      tree origin;
4425
4426	      gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4427	      origin = block;
4428
4429	      if (prev_end)
4430		BLOCK_SAME_RANGE (prev_end) = 0;
4431	      prev_end = NULL_TREE;
4432
4433	      /* If we have seen this block before, that means it now
4434		 spans multiple address regions.  Create a new fragment.  */
4435	      if (TREE_ASM_WRITTEN (block))
4436		{
4437		  tree new_block = copy_node (block);
4438
4439		  BLOCK_SAME_RANGE (new_block) = 0;
4440		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4441		  BLOCK_FRAGMENT_CHAIN (new_block)
4442		    = BLOCK_FRAGMENT_CHAIN (origin);
4443		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4444
4445		  NOTE_BLOCK (insn) = new_block;
4446		  block = new_block;
4447		}
4448
4449	      if (prev_beg == current_block && prev_beg)
4450		BLOCK_SAME_RANGE (block) = 1;
4451
4452	      prev_beg = origin;
4453
4454	      BLOCK_SUBBLOCKS (block) = 0;
4455	      TREE_ASM_WRITTEN (block) = 1;
4456	      /* When there's only one block for the entire function,
4457		 current_block == block and we mustn't do this, it
4458		 will cause infinite recursion.  */
4459	      if (block != current_block)
4460		{
4461		  tree super;
4462		  if (block != origin)
4463		    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4464				|| BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4465								      (origin))
4466				   == current_block);
4467		  if (p_block_stack->is_empty ())
4468		    super = current_block;
4469		  else
4470		    {
4471		      super = p_block_stack->last ();
4472		      gcc_assert (super == current_block
4473				  || BLOCK_FRAGMENT_ORIGIN (super)
4474				     == current_block);
4475		    }
4476		  BLOCK_SUPERCONTEXT (block) = super;
4477		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4478		  BLOCK_SUBBLOCKS (current_block) = block;
4479		  current_block = origin;
4480		}
4481	      p_block_stack->safe_push (block);
4482	    }
4483	  else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4484	    {
4485	      NOTE_BLOCK (insn) = p_block_stack->pop ();
4486	      current_block = BLOCK_SUPERCONTEXT (current_block);
4487	      if (BLOCK_FRAGMENT_ORIGIN (current_block))
4488		current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4489	      prev_beg = NULL_TREE;
4490	      prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4491			 ? NOTE_BLOCK (insn) : NULL_TREE;
4492	    }
4493	}
4494      else
4495	{
4496	  prev_beg = NULL_TREE;
4497	  if (prev_end)
4498	    BLOCK_SAME_RANGE (prev_end) = 0;
4499	  prev_end = NULL_TREE;
4500	}
4501    }
4502}
4503
4504/* Reverse the order of elements in the chain T of blocks,
4505   and return the new head of the chain (old last element).  */
4506
4507tree
4508blocks_nreverse (tree t)
4509{
4510  tree prev = 0, block, next;
4511  for (block = t; block; block = next)
4512    {
4513      next = BLOCK_CHAIN (block);
4514      BLOCK_CHAIN (block) = prev;
4515      prev = block;
4516    }
4517  return prev;
4518}
4519
4520/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4521   by modifying the last node in chain 1 to point to chain 2.  */
4522
4523tree
4524block_chainon (tree op1, tree op2)
4525{
4526  tree t1;
4527
4528  if (!op1)
4529    return op2;
4530  if (!op2)
4531    return op1;
4532
4533  for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4534    continue;
4535  BLOCK_CHAIN (t1) = op2;
4536
4537#ifdef ENABLE_TREE_CHECKING
4538  {
4539    tree t2;
4540    for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4541      gcc_assert (t2 != t1);
4542  }
4543#endif
4544
4545  return op1;
4546}
4547
4548/* Count the subblocks of the list starting with BLOCK.  If VECTOR is
4549   non-NULL, list them all into VECTOR, in a depth-first preorder
4550   traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
4551   blocks.  */
4552
4553static int
4554all_blocks (tree block, tree *vector)
4555{
4556  int n_blocks = 0;
4557
4558  while (block)
4559    {
4560      TREE_ASM_WRITTEN (block) = 0;
4561
4562      /* Record this block.  */
4563      if (vector)
4564	vector[n_blocks] = block;
4565
4566      ++n_blocks;
4567
4568      /* Record the subblocks, and their subblocks...  */
4569      n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4570			      vector ? vector + n_blocks : 0);
4571      block = BLOCK_CHAIN (block);
4572    }
4573
4574  return n_blocks;
4575}
4576
4577/* Return a vector containing all the blocks rooted at BLOCK.  The
4578   number of elements in the vector is stored in N_BLOCKS_P.  The
4579   vector is dynamically allocated; it is the caller's responsibility
4580   to call `free' on the pointer returned.  */
4581
4582static tree *
4583get_block_vector (tree block, int *n_blocks_p)
4584{
4585  tree *block_vector;
4586
4587  *n_blocks_p = all_blocks (block, NULL);
4588  block_vector = XNEWVEC (tree, *n_blocks_p);
4589  all_blocks (block, block_vector);
4590
4591  return block_vector;
4592}
4593
4594static GTY(()) int next_block_index = 2;
4595
4596/* Set BLOCK_NUMBER for all the blocks in FN.  */
4597
4598void
4599number_blocks (tree fn)
4600{
4601  int i;
4602  int n_blocks;
4603  tree *block_vector;
4604
4605  /* For SDB and XCOFF debugging output, we start numbering the blocks
4606     from 1 within each function, rather than keeping a running
4607     count.  */
4608#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4609  if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4610    next_block_index = 1;
4611#endif
4612
4613  block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4614
4615  /* The top-level BLOCK isn't numbered at all.  */
4616  for (i = 1; i < n_blocks; ++i)
4617    /* We number the blocks from two.  */
4618    BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4619
4620  free (block_vector);
4621
4622  return;
4623}
4624
4625/* If VAR is present in a subblock of BLOCK, return the subblock.  */
4626
4627DEBUG_FUNCTION tree
4628debug_find_var_in_block_tree (tree var, tree block)
4629{
4630  tree t;
4631
4632  for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4633    if (t == var)
4634      return block;
4635
4636  for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4637    {
4638      tree ret = debug_find_var_in_block_tree (var, t);
4639      if (ret)
4640	return ret;
4641    }
4642
4643  return NULL_TREE;
4644}
4645
4646/* Keep track of whether we're in a dummy function context.  If we are,
4647   we don't want to invoke the set_current_function hook, because we'll
4648   get into trouble if the hook calls target_reinit () recursively or
4649   when the initial initialization is not yet complete.  */
4650
4651static bool in_dummy_function;
4652
4653/* Invoke the target hook when setting cfun.  Update the optimization options
4654   if the function uses different options than the default.  */
4655
4656static void
4657invoke_set_current_function_hook (tree fndecl)
4658{
4659  if (!in_dummy_function)
4660    {
4661      tree opts = ((fndecl)
4662		   ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4663		   : optimization_default_node);
4664
4665      if (!opts)
4666	opts = optimization_default_node;
4667
4668      /* Change optimization options if needed.  */
4669      if (optimization_current_node != opts)
4670	{
4671	  optimization_current_node = opts;
4672	  cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4673	}
4674
4675      targetm.set_current_function (fndecl);
4676      this_fn_optabs = this_target_optabs;
4677
4678      if (opts != optimization_default_node)
4679	{
4680	  init_tree_optimization_optabs (opts);
4681	  if (TREE_OPTIMIZATION_OPTABS (opts))
4682	    this_fn_optabs = (struct target_optabs *)
4683	      TREE_OPTIMIZATION_OPTABS (opts);
4684	}
4685    }
4686}
4687
4688/* cfun should never be set directly; use this function.  */
4689
4690void
4691set_cfun (struct function *new_cfun)
4692{
4693  if (cfun != new_cfun)
4694    {
4695      cfun = new_cfun;
4696      invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4697    }
4698}
4699
4700/* Initialized with NOGC, making this poisonous to the garbage collector.  */
4701
4702static vec<function_p> cfun_stack;
4703
4704/* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
4705   current_function_decl accordingly.  */
4706
4707void
4708push_cfun (struct function *new_cfun)
4709{
4710  gcc_assert ((!cfun && !current_function_decl)
4711	      || (cfun && current_function_decl == cfun->decl));
4712  cfun_stack.safe_push (cfun);
4713  current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4714  set_cfun (new_cfun);
4715}
4716
4717/* Pop cfun from the stack.  Also set current_function_decl accordingly.  */
4718
4719void
4720pop_cfun (void)
4721{
4722  struct function *new_cfun = cfun_stack.pop ();
4723  /* When in_dummy_function, we do have a cfun but current_function_decl is
4724     NULL.  We also allow pushing NULL cfun and subsequently changing
4725     current_function_decl to something else and have both restored by
4726     pop_cfun.  */
4727  gcc_checking_assert (in_dummy_function
4728		       || !cfun
4729		       || current_function_decl == cfun->decl);
4730  set_cfun (new_cfun);
4731  current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4732}
4733
4734/* Return value of funcdef and increase it.  */
4735int
4736get_next_funcdef_no (void)
4737{
4738  return funcdef_no++;
4739}
4740
4741/* Return value of funcdef.  */
4742int
4743get_last_funcdef_no (void)
4744{
4745  return funcdef_no;
4746}
4747
4748/* Allocate a function structure for FNDECL and set its contents
4749   to the defaults.  Set cfun to the newly-allocated object.
4750   Some of the helper functions invoked during initialization assume
4751   that cfun has already been set.  Therefore, assign the new object
4752   directly into cfun and invoke the back end hook explicitly at the
4753   very end, rather than initializing a temporary and calling set_cfun
4754   on it.
4755
4756   ABSTRACT_P is true if this is a function that will never be seen by
4757   the middle-end.  Such functions are front-end concepts (like C++
4758   function templates) that do not correspond directly to functions
4759   placed in object files.  */
4760
4761void
4762allocate_struct_function (tree fndecl, bool abstract_p)
4763{
4764  tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4765
4766  cfun = ggc_cleared_alloc<function> ();
4767
4768  init_eh_for_function ();
4769
4770  if (init_machine_status)
4771    cfun->machine = (*init_machine_status) ();
4772
4773#ifdef OVERRIDE_ABI_FORMAT
4774  OVERRIDE_ABI_FORMAT (fndecl);
4775#endif
4776
4777  if (fndecl != NULL_TREE)
4778    {
4779      DECL_STRUCT_FUNCTION (fndecl) = cfun;
4780      cfun->decl = fndecl;
4781      current_function_funcdef_no = get_next_funcdef_no ();
4782    }
4783
4784  invoke_set_current_function_hook (fndecl);
4785
4786  if (fndecl != NULL_TREE)
4787    {
4788      tree result = DECL_RESULT (fndecl);
4789      if (!abstract_p && aggregate_value_p (result, fndecl))
4790	{
4791#ifdef PCC_STATIC_STRUCT_RETURN
4792	  cfun->returns_pcc_struct = 1;
4793#endif
4794	  cfun->returns_struct = 1;
4795	}
4796
4797      cfun->stdarg = stdarg_p (fntype);
4798
4799      /* Assume all registers in stdarg functions need to be saved.  */
4800      cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4801      cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4802
4803      /* ??? This could be set on a per-function basis by the front-end
4804         but is this worth the hassle?  */
4805      cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4806      cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4807
4808      if (!profile_flag && !flag_instrument_function_entry_exit)
4809	DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4810    }
4811}
4812
4813/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4814   instead of just setting it.  */
4815
4816void
4817push_struct_function (tree fndecl)
4818{
4819  /* When in_dummy_function we might be in the middle of a pop_cfun and
4820     current_function_decl and cfun may not match.  */
4821  gcc_assert (in_dummy_function
4822	      || (!cfun && !current_function_decl)
4823	      || (cfun && current_function_decl == cfun->decl));
4824  cfun_stack.safe_push (cfun);
4825  current_function_decl = fndecl;
4826  allocate_struct_function (fndecl, false);
4827}
4828
4829/* Reset crtl and other non-struct-function variables to defaults as
4830   appropriate for emitting rtl at the start of a function.  */
4831
4832static void
4833prepare_function_start (void)
4834{
4835  gcc_assert (!crtl->emit.x_last_insn);
4836  init_temp_slots ();
4837  init_emit ();
4838  init_varasm_status ();
4839  init_expr ();
4840  default_rtl_profile ();
4841
4842  if (flag_stack_usage_info)
4843    {
4844      cfun->su = ggc_cleared_alloc<stack_usage> ();
4845      cfun->su->static_stack_size = -1;
4846    }
4847
4848  cse_not_expected = ! optimize;
4849
4850  /* Caller save not needed yet.  */
4851  caller_save_needed = 0;
4852
4853  /* We haven't done register allocation yet.  */
4854  reg_renumber = 0;
4855
4856  /* Indicate that we have not instantiated virtual registers yet.  */
4857  virtuals_instantiated = 0;
4858
4859  /* Indicate that we want CONCATs now.  */
4860  generating_concat_p = 1;
4861
4862  /* Indicate we have no need of a frame pointer yet.  */
4863  frame_pointer_needed = 0;
4864}
4865
4866/* Initialize the rtl expansion mechanism so that we can do simple things
4867   like generate sequences.  This is used to provide a context during global
4868   initialization of some passes.  You must call expand_dummy_function_end
4869   to exit this context.  */
4870
4871void
4872init_dummy_function_start (void)
4873{
4874  gcc_assert (!in_dummy_function);
4875  in_dummy_function = true;
4876  push_struct_function (NULL_TREE);
4877  prepare_function_start ();
4878}
4879
4880/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4881   and initialize static variables for generating RTL for the statements
4882   of the function.  */
4883
4884void
4885init_function_start (tree subr)
4886{
4887  if (subr && DECL_STRUCT_FUNCTION (subr))
4888    set_cfun (DECL_STRUCT_FUNCTION (subr));
4889  else
4890    allocate_struct_function (subr, false);
4891
4892  /* Initialize backend, if needed.  */
4893  initialize_rtl ();
4894
4895  prepare_function_start ();
4896  decide_function_section (subr);
4897
4898  /* Warn if this value is an aggregate type,
4899     regardless of which calling convention we are using for it.  */
4900  if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4901    warning (OPT_Waggregate_return, "function returns an aggregate");
4902}
4903
4904/* Expand code to verify the stack_protect_guard.  This is invoked at
4905   the end of a function to be protected.  */
4906
4907#ifndef HAVE_stack_protect_test
4908# define HAVE_stack_protect_test		0
4909# define gen_stack_protect_test(x, y, z)	(gcc_unreachable (), NULL_RTX)
4910#endif
4911
4912void
4913stack_protect_epilogue (void)
4914{
4915  tree guard_decl = targetm.stack_protect_guard ();
4916  rtx_code_label *label = gen_label_rtx ();
4917  rtx x, y, tmp;
4918
4919  x = expand_normal (crtl->stack_protect_guard);
4920  y = expand_normal (guard_decl);
4921
4922  /* Allow the target to compare Y with X without leaking either into
4923     a register.  */
4924  switch ((int) (HAVE_stack_protect_test != 0))
4925    {
4926    case 1:
4927      tmp = gen_stack_protect_test (x, y, label);
4928      if (tmp)
4929	{
4930	  emit_insn (tmp);
4931	  break;
4932	}
4933      /* FALLTHRU */
4934
4935    default:
4936      emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4937      break;
4938    }
4939
4940  /* The noreturn predictor has been moved to the tree level.  The rtl-level
4941     predictors estimate this branch about 20%, which isn't enough to get
4942     things moved out of line.  Since this is the only extant case of adding
4943     a noreturn function at the rtl level, it doesn't seem worth doing ought
4944     except adding the prediction by hand.  */
4945  tmp = get_last_insn ();
4946  if (JUMP_P (tmp))
4947    predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
4948
4949  expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4950  free_temp_slots ();
4951  emit_label (label);
4952}
4953
4954/* Start the RTL for a new function, and set variables used for
4955   emitting RTL.
4956   SUBR is the FUNCTION_DECL node.
4957   PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4958   the function's parameters, which must be run at any return statement.  */
4959
4960void
4961expand_function_start (tree subr)
4962{
4963  /* Make sure volatile mem refs aren't considered
4964     valid operands of arithmetic insns.  */
4965  init_recog_no_volatile ();
4966
4967  crtl->profile
4968    = (profile_flag
4969       && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4970
4971  crtl->limit_stack
4972    = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4973
4974  /* Make the label for return statements to jump to.  Do not special
4975     case machines with special return instructions -- they will be
4976     handled later during jump, ifcvt, or epilogue creation.  */
4977  return_label = gen_label_rtx ();
4978
4979  /* Initialize rtx used to return the value.  */
4980  /* Do this before assign_parms so that we copy the struct value address
4981     before any library calls that assign parms might generate.  */
4982
4983  /* Decide whether to return the value in memory or in a register.  */
4984  if (aggregate_value_p (DECL_RESULT (subr), subr))
4985    {
4986      /* Returning something that won't go in a register.  */
4987      rtx value_address = 0;
4988
4989#ifdef PCC_STATIC_STRUCT_RETURN
4990      if (cfun->returns_pcc_struct)
4991	{
4992	  int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4993	  value_address = assemble_static_space (size);
4994	}
4995      else
4996#endif
4997	{
4998	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4999	  /* Expect to be passed the address of a place to store the value.
5000	     If it is passed as an argument, assign_parms will take care of
5001	     it.  */
5002	  if (sv)
5003	    {
5004	      value_address = gen_reg_rtx (Pmode);
5005	      emit_move_insn (value_address, sv);
5006	    }
5007	}
5008      if (value_address)
5009	{
5010	  rtx x = value_address;
5011	  if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
5012	    {
5013	      x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
5014	      set_mem_attributes (x, DECL_RESULT (subr), 1);
5015	    }
5016	  SET_DECL_RTL (DECL_RESULT (subr), x);
5017	}
5018    }
5019  else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5020    /* If return mode is void, this decl rtl should not be used.  */
5021    SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
5022  else
5023    {
5024      /* Compute the return values into a pseudo reg, which we will copy
5025	 into the true return register after the cleanups are done.  */
5026      tree return_type = TREE_TYPE (DECL_RESULT (subr));
5027      if (TYPE_MODE (return_type) != BLKmode
5028	  && targetm.calls.return_in_msb (return_type))
5029	/* expand_function_end will insert the appropriate padding in
5030	   this case.  Use the return value's natural (unpadded) mode
5031	   within the function proper.  */
5032	SET_DECL_RTL (DECL_RESULT (subr),
5033		      gen_reg_rtx (TYPE_MODE (return_type)));
5034      else
5035	{
5036	  /* In order to figure out what mode to use for the pseudo, we
5037	     figure out what the mode of the eventual return register will
5038	     actually be, and use that.  */
5039	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5040
5041	  /* Structures that are returned in registers are not
5042	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
5043	  if (REG_P (hard_reg))
5044	    SET_DECL_RTL (DECL_RESULT (subr),
5045			  gen_reg_rtx (GET_MODE (hard_reg)));
5046	  else
5047	    {
5048	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5049	      SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
5050	    }
5051	}
5052
5053      /* Set DECL_REGISTER flag so that expand_function_end will copy the
5054	 result to the real return register(s).  */
5055      DECL_REGISTER (DECL_RESULT (subr)) = 1;
5056
5057      if (chkp_function_instrumented_p (current_function_decl))
5058	{
5059	  tree return_type = TREE_TYPE (DECL_RESULT (subr));
5060	  rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5061								 subr, 1);
5062	  SET_DECL_BOUNDS_RTL (DECL_RESULT (subr), bounds);
5063	}
5064    }
5065
5066  /* Initialize rtx for parameters and local variables.
5067     In some cases this requires emitting insns.  */
5068  assign_parms (subr);
5069
5070  /* If function gets a static chain arg, store it.  */
5071  if (cfun->static_chain_decl)
5072    {
5073      tree parm = cfun->static_chain_decl;
5074      rtx local, chain, insn;
5075
5076      local = gen_reg_rtx (Pmode);
5077      chain = targetm.calls.static_chain (current_function_decl, true);
5078
5079      set_decl_incoming_rtl (parm, chain, false);
5080      SET_DECL_RTL (parm, local);
5081      mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5082
5083      insn = emit_move_insn (local, chain);
5084
5085      /* Mark the register as eliminable, similar to parameters.  */
5086      if (MEM_P (chain)
5087	  && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5088	set_dst_reg_note (insn, REG_EQUIV, chain, local);
5089
5090      /* If we aren't optimizing, save the static chain onto the stack.  */
5091      if (!optimize)
5092	{
5093	  tree saved_static_chain_decl
5094	    = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5095			  DECL_NAME (parm), TREE_TYPE (parm));
5096	  rtx saved_static_chain_rtx
5097	    = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5098	  SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5099	  emit_move_insn (saved_static_chain_rtx, chain);
5100	  SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5101	  DECL_HAS_VALUE_EXPR_P (parm) = 1;
5102	}
5103    }
5104
5105  /* If the function receives a non-local goto, then store the
5106     bits we need to restore the frame pointer.  */
5107  if (cfun->nonlocal_goto_save_area)
5108    {
5109      tree t_save;
5110      rtx r_save;
5111
5112      tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5113      gcc_assert (DECL_RTL_SET_P (var));
5114
5115      t_save = build4 (ARRAY_REF,
5116		       TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5117		       cfun->nonlocal_goto_save_area,
5118		       integer_zero_node, NULL_TREE, NULL_TREE);
5119      r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5120      gcc_assert (GET_MODE (r_save) == Pmode);
5121
5122      emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5123      update_nonlocal_goto_save_area ();
5124    }
5125
5126  /* The following was moved from init_function_start.
5127     The move is supposed to make sdb output more accurate.  */
5128  /* Indicate the beginning of the function body,
5129     as opposed to parm setup.  */
5130  emit_note (NOTE_INSN_FUNCTION_BEG);
5131
5132  gcc_assert (NOTE_P (get_last_insn ()));
5133
5134  parm_birth_insn = get_last_insn ();
5135
5136  if (crtl->profile)
5137    {
5138#ifdef PROFILE_HOOK
5139      PROFILE_HOOK (current_function_funcdef_no);
5140#endif
5141    }
5142
5143  /* If we are doing generic stack checking, the probe should go here.  */
5144  if (flag_stack_check == GENERIC_STACK_CHECK)
5145    stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5146}
5147
5148/* Undo the effects of init_dummy_function_start.  */
5149void
5150expand_dummy_function_end (void)
5151{
5152  gcc_assert (in_dummy_function);
5153
5154  /* End any sequences that failed to be closed due to syntax errors.  */
5155  while (in_sequence_p ())
5156    end_sequence ();
5157
5158  /* Outside function body, can't compute type's actual size
5159     until next function's body starts.  */
5160
5161  free_after_parsing (cfun);
5162  free_after_compilation (cfun);
5163  pop_cfun ();
5164  in_dummy_function = false;
5165}
5166
5167/* Helper for diddle_return_value.  */
5168
5169void
5170diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5171{
5172  if (! outgoing)
5173    return;
5174
5175  if (REG_P (outgoing))
5176    (*doit) (outgoing, arg);
5177  else if (GET_CODE (outgoing) == PARALLEL)
5178    {
5179      int i;
5180
5181      for (i = 0; i < XVECLEN (outgoing, 0); i++)
5182	{
5183	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5184
5185	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5186	    (*doit) (x, arg);
5187	}
5188    }
5189}
5190
5191/* Call DOIT for each hard register used as a return value from
5192   the current function.  */
5193
5194void
5195diddle_return_value (void (*doit) (rtx, void *), void *arg)
5196{
5197  diddle_return_value_1 (doit, arg, crtl->return_bnd);
5198  diddle_return_value_1 (doit, arg, crtl->return_rtx);
5199}
5200
5201static void
5202do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5203{
5204  emit_clobber (reg);
5205}
5206
5207void
5208clobber_return_register (void)
5209{
5210  diddle_return_value (do_clobber_return_reg, NULL);
5211
5212  /* In case we do use pseudo to return value, clobber it too.  */
5213  if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5214    {
5215      tree decl_result = DECL_RESULT (current_function_decl);
5216      rtx decl_rtl = DECL_RTL (decl_result);
5217      if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5218	{
5219	  do_clobber_return_reg (decl_rtl, NULL);
5220	}
5221    }
5222}
5223
5224static void
5225do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5226{
5227  emit_use (reg);
5228}
5229
5230static void
5231use_return_register (void)
5232{
5233  diddle_return_value (do_use_return_reg, NULL);
5234}
5235
5236/* Possibly warn about unused parameters.  */
5237void
5238do_warn_unused_parameter (tree fn)
5239{
5240  tree decl;
5241
5242  for (decl = DECL_ARGUMENTS (fn);
5243       decl; decl = DECL_CHAIN (decl))
5244    if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5245	&& DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
5246	&& !TREE_NO_WARNING (decl))
5247      warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
5248}
5249
5250/* Set the location of the insn chain starting at INSN to LOC.  */
5251
5252static void
5253set_insn_locations (rtx_insn *insn, int loc)
5254{
5255  while (insn != NULL)
5256    {
5257      if (INSN_P (insn))
5258	INSN_LOCATION (insn) = loc;
5259      insn = NEXT_INSN (insn);
5260    }
5261}
5262
5263/* Generate RTL for the end of the current function.  */
5264
5265void
5266expand_function_end (void)
5267{
5268  rtx clobber_after;
5269
5270  /* If arg_pointer_save_area was referenced only from a nested
5271     function, we will not have initialized it yet.  Do that now.  */
5272  if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5273    get_arg_pointer_save_area ();
5274
5275  /* If we are doing generic stack checking and this function makes calls,
5276     do a stack probe at the start of the function to ensure we have enough
5277     space for another stack frame.  */
5278  if (flag_stack_check == GENERIC_STACK_CHECK)
5279    {
5280      rtx_insn *insn, *seq;
5281
5282      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5283	if (CALL_P (insn))
5284	  {
5285	    rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5286	    start_sequence ();
5287	    if (STACK_CHECK_MOVING_SP)
5288	      anti_adjust_stack_and_probe (max_frame_size, true);
5289	    else
5290	      probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5291	    seq = get_insns ();
5292	    end_sequence ();
5293	    set_insn_locations (seq, prologue_location);
5294	    emit_insn_before (seq, stack_check_probe_note);
5295	    break;
5296	  }
5297    }
5298
5299  /* End any sequences that failed to be closed due to syntax errors.  */
5300  while (in_sequence_p ())
5301    end_sequence ();
5302
5303  clear_pending_stack_adjust ();
5304  do_pending_stack_adjust ();
5305
5306  /* Output a linenumber for the end of the function.
5307     SDB depends on this.  */
5308  set_curr_insn_location (input_location);
5309
5310  /* Before the return label (if any), clobber the return
5311     registers so that they are not propagated live to the rest of
5312     the function.  This can only happen with functions that drop
5313     through; if there had been a return statement, there would
5314     have either been a return rtx, or a jump to the return label.
5315
5316     We delay actual code generation after the current_function_value_rtx
5317     is computed.  */
5318  clobber_after = get_last_insn ();
5319
5320  /* Output the label for the actual return from the function.  */
5321  emit_label (return_label);
5322
5323  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5324    {
5325      /* Let except.c know where it should emit the call to unregister
5326	 the function context for sjlj exceptions.  */
5327      if (flag_exceptions)
5328	sjlj_emit_function_exit_after (get_last_insn ());
5329    }
5330  else
5331    {
5332      /* We want to ensure that instructions that may trap are not
5333	 moved into the epilogue by scheduling, because we don't
5334	 always emit unwind information for the epilogue.  */
5335      if (cfun->can_throw_non_call_exceptions)
5336	emit_insn (gen_blockage ());
5337    }
5338
5339  /* If this is an implementation of throw, do what's necessary to
5340     communicate between __builtin_eh_return and the epilogue.  */
5341  expand_eh_return ();
5342
5343  /* If scalar return value was computed in a pseudo-reg, or was a named
5344     return value that got dumped to the stack, copy that to the hard
5345     return register.  */
5346  if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5347    {
5348      tree decl_result = DECL_RESULT (current_function_decl);
5349      rtx decl_rtl = DECL_RTL (decl_result);
5350
5351      if (REG_P (decl_rtl)
5352	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5353	  : DECL_REGISTER (decl_result))
5354	{
5355	  rtx real_decl_rtl = crtl->return_rtx;
5356
5357	  /* This should be set in assign_parms.  */
5358	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5359
5360	  /* If this is a BLKmode structure being returned in registers,
5361	     then use the mode computed in expand_return.  Note that if
5362	     decl_rtl is memory, then its mode may have been changed,
5363	     but that crtl->return_rtx has not.  */
5364	  if (GET_MODE (real_decl_rtl) == BLKmode)
5365	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5366
5367	  /* If a non-BLKmode return value should be padded at the least
5368	     significant end of the register, shift it left by the appropriate
5369	     amount.  BLKmode results are handled using the group load/store
5370	     machinery.  */
5371	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5372	      && REG_P (real_decl_rtl)
5373	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5374	    {
5375	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5376					   REGNO (real_decl_rtl)),
5377			      decl_rtl);
5378	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5379	    }
5380	  /* If a named return value dumped decl_return to memory, then
5381	     we may need to re-do the PROMOTE_MODE signed/unsigned
5382	     extension.  */
5383	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5384	    {
5385	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5386	      promote_function_mode (TREE_TYPE (decl_result),
5387				     GET_MODE (decl_rtl), &unsignedp,
5388				     TREE_TYPE (current_function_decl), 1);
5389
5390	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
5391	    }
5392	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
5393	    {
5394	      /* If expand_function_start has created a PARALLEL for decl_rtl,
5395		 move the result to the real return registers.  Otherwise, do
5396		 a group load from decl_rtl for a named return.  */
5397	      if (GET_CODE (decl_rtl) == PARALLEL)
5398		emit_group_move (real_decl_rtl, decl_rtl);
5399	      else
5400		emit_group_load (real_decl_rtl, decl_rtl,
5401				 TREE_TYPE (decl_result),
5402				 int_size_in_bytes (TREE_TYPE (decl_result)));
5403	    }
5404	  /* In the case of complex integer modes smaller than a word, we'll
5405	     need to generate some non-trivial bitfield insertions.  Do that
5406	     on a pseudo and not the hard register.  */
5407	  else if (GET_CODE (decl_rtl) == CONCAT
5408		   && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5409		   && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5410	    {
5411	      int old_generating_concat_p;
5412	      rtx tmp;
5413
5414	      old_generating_concat_p = generating_concat_p;
5415	      generating_concat_p = 0;
5416	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5417	      generating_concat_p = old_generating_concat_p;
5418
5419	      emit_move_insn (tmp, decl_rtl);
5420	      emit_move_insn (real_decl_rtl, tmp);
5421	    }
5422	  else
5423	    emit_move_insn (real_decl_rtl, decl_rtl);
5424	}
5425    }
5426
5427  /* If returning a structure, arrange to return the address of the value
5428     in a place where debuggers expect to find it.
5429
5430     If returning a structure PCC style,
5431     the caller also depends on this value.
5432     And cfun->returns_pcc_struct is not necessarily set.  */
5433  if ((cfun->returns_struct || cfun->returns_pcc_struct)
5434      && !targetm.calls.omit_struct_return_reg)
5435    {
5436      rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5437      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5438      rtx outgoing;
5439
5440      if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5441	type = TREE_TYPE (type);
5442      else
5443	value_address = XEXP (value_address, 0);
5444
5445      outgoing = targetm.calls.function_value (build_pointer_type (type),
5446					       current_function_decl, true);
5447
5448      /* Mark this as a function return value so integrate will delete the
5449	 assignment and USE below when inlining this function.  */
5450      REG_FUNCTION_VALUE_P (outgoing) = 1;
5451
5452      /* The address may be ptr_mode and OUTGOING may be Pmode.  */
5453      value_address = convert_memory_address (GET_MODE (outgoing),
5454					      value_address);
5455
5456      emit_move_insn (outgoing, value_address);
5457
5458      /* Show return register used to hold result (in this case the address
5459	 of the result.  */
5460      crtl->return_rtx = outgoing;
5461    }
5462
5463  /* Emit the actual code to clobber return register.  Don't emit
5464     it if clobber_after is a barrier, then the previous basic block
5465     certainly doesn't fall thru into the exit block.  */
5466  if (!BARRIER_P (clobber_after))
5467    {
5468      rtx seq;
5469
5470      start_sequence ();
5471      clobber_return_register ();
5472      seq = get_insns ();
5473      end_sequence ();
5474
5475      emit_insn_after (seq, clobber_after);
5476    }
5477
5478  /* Output the label for the naked return from the function.  */
5479  if (naked_return_label)
5480    emit_label (naked_return_label);
5481
5482  /* @@@ This is a kludge.  We want to ensure that instructions that
5483     may trap are not moved into the epilogue by scheduling, because
5484     we don't always emit unwind information for the epilogue.  */
5485  if (cfun->can_throw_non_call_exceptions
5486      && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5487    emit_insn (gen_blockage ());
5488
5489  /* If stack protection is enabled for this function, check the guard.  */
5490  if (crtl->stack_protect_guard)
5491    stack_protect_epilogue ();
5492
5493  /* If we had calls to alloca, and this machine needs
5494     an accurate stack pointer to exit the function,
5495     insert some code to save and restore the stack pointer.  */
5496  if (! EXIT_IGNORE_STACK
5497      && cfun->calls_alloca)
5498    {
5499      rtx tem = 0, seq;
5500
5501      start_sequence ();
5502      emit_stack_save (SAVE_FUNCTION, &tem);
5503      seq = get_insns ();
5504      end_sequence ();
5505      emit_insn_before (seq, parm_birth_insn);
5506
5507      emit_stack_restore (SAVE_FUNCTION, tem);
5508    }
5509
5510  /* ??? This should no longer be necessary since stupid is no longer with
5511     us, but there are some parts of the compiler (eg reload_combine, and
5512     sh mach_dep_reorg) that still try and compute their own lifetime info
5513     instead of using the general framework.  */
5514  use_return_register ();
5515}
5516
5517rtx
5518get_arg_pointer_save_area (void)
5519{
5520  rtx ret = arg_pointer_save_area;
5521
5522  if (! ret)
5523    {
5524      ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5525      arg_pointer_save_area = ret;
5526    }
5527
5528  if (! crtl->arg_pointer_save_area_init)
5529    {
5530      rtx seq;
5531
5532      /* Save the arg pointer at the beginning of the function.  The
5533	 generated stack slot may not be a valid memory address, so we
5534	 have to check it and fix it if necessary.  */
5535      start_sequence ();
5536      emit_move_insn (validize_mem (copy_rtx (ret)),
5537                      crtl->args.internal_arg_pointer);
5538      seq = get_insns ();
5539      end_sequence ();
5540
5541      push_topmost_sequence ();
5542      emit_insn_after (seq, entry_of_function ());
5543      pop_topmost_sequence ();
5544
5545      crtl->arg_pointer_save_area_init = true;
5546    }
5547
5548  return ret;
5549}
5550
5551/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5552   for the first time.  */
5553
5554static void
5555record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5556{
5557  rtx_insn *tmp;
5558  hash_table<insn_cache_hasher> *hash = *hashp;
5559
5560  if (hash == NULL)
5561    *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5562
5563  for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5564    {
5565      rtx *slot = hash->find_slot (tmp, INSERT);
5566      gcc_assert (*slot == NULL);
5567      *slot = tmp;
5568    }
5569}
5570
5571/* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5572   basic block, splitting or peepholes.  If INSN is a prologue or epilogue
5573   insn, then record COPY as well.  */
5574
5575void
5576maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5577{
5578  hash_table<insn_cache_hasher> *hash;
5579  rtx *slot;
5580
5581  hash = epilogue_insn_hash;
5582  if (!hash || !hash->find (insn))
5583    {
5584      hash = prologue_insn_hash;
5585      if (!hash || !hash->find (insn))
5586	return;
5587    }
5588
5589  slot = hash->find_slot (copy, INSERT);
5590  gcc_assert (*slot == NULL);
5591  *slot = copy;
5592}
5593
5594/* Determine if any INSNs in HASH are, or are part of, INSN.  Because
5595   we can be running after reorg, SEQUENCE rtl is possible.  */
5596
5597static bool
5598contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
5599{
5600  if (hash == NULL)
5601    return false;
5602
5603  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5604    {
5605      rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5606      int i;
5607      for (i = seq->len () - 1; i >= 0; i--)
5608	if (hash->find (seq->element (i)))
5609	  return true;
5610      return false;
5611    }
5612
5613  return hash->find (const_cast<rtx> (insn)) != NULL;
5614}
5615
5616int
5617prologue_epilogue_contains (const_rtx insn)
5618{
5619  if (contains (insn, prologue_insn_hash))
5620    return 1;
5621  if (contains (insn, epilogue_insn_hash))
5622    return 1;
5623  return 0;
5624}
5625
5626#ifdef HAVE_return
5627/* Insert use of return register before the end of BB.  */
5628
5629static void
5630emit_use_return_register_into_block (basic_block bb)
5631{
5632  rtx seq, insn;
5633  start_sequence ();
5634  use_return_register ();
5635  seq = get_insns ();
5636  end_sequence ();
5637  insn = BB_END (bb);
5638#ifdef HAVE_cc0
5639  if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5640    insn = prev_cc0_setter (insn);
5641#endif
5642  emit_insn_before (seq, insn);
5643}
5644
5645
5646/* Create a return pattern, either simple_return or return, depending on
5647   simple_p.  */
5648
5649static rtx
5650gen_return_pattern (bool simple_p)
5651{
5652#ifdef HAVE_simple_return
5653  return simple_p ? gen_simple_return () : gen_return ();
5654#else
5655  gcc_assert (!simple_p);
5656  return gen_return ();
5657#endif
5658}
5659
5660/* Insert an appropriate return pattern at the end of block BB.  This
5661   also means updating block_for_insn appropriately.  SIMPLE_P is
5662   the same as in gen_return_pattern and passed to it.  */
5663
5664void
5665emit_return_into_block (bool simple_p, basic_block bb)
5666{
5667  rtx jump, pat;
5668  jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5669  pat = PATTERN (jump);
5670  if (GET_CODE (pat) == PARALLEL)
5671    pat = XVECEXP (pat, 0, 0);
5672  gcc_assert (ANY_RETURN_P (pat));
5673  JUMP_LABEL (jump) = pat;
5674}
5675#endif
5676
5677/* Set JUMP_LABEL for a return insn.  */
5678
5679void
5680set_return_jump_label (rtx returnjump)
5681{
5682  rtx pat = PATTERN (returnjump);
5683  if (GET_CODE (pat) == PARALLEL)
5684    pat = XVECEXP (pat, 0, 0);
5685  if (ANY_RETURN_P (pat))
5686    JUMP_LABEL (returnjump) = pat;
5687  else
5688    JUMP_LABEL (returnjump) = ret_rtx;
5689}
5690
5691#if defined (HAVE_return) || defined (HAVE_simple_return)
5692/* Return true if there are any active insns between HEAD and TAIL.  */
5693bool
5694active_insn_between (rtx_insn *head, rtx_insn *tail)
5695{
5696  while (tail)
5697    {
5698      if (active_insn_p (tail))
5699	return true;
5700      if (tail == head)
5701	return false;
5702      tail = PREV_INSN (tail);
5703    }
5704  return false;
5705}
5706
5707/* LAST_BB is a block that exits, and empty of active instructions.
5708   Examine its predecessors for jumps that can be converted to
5709   (conditional) returns.  */
5710vec<edge>
5711convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5712			  vec<edge> unconverted ATTRIBUTE_UNUSED)
5713{
5714  int i;
5715  basic_block bb;
5716  rtx label;
5717  edge_iterator ei;
5718  edge e;
5719  auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
5720
5721  FOR_EACH_EDGE (e, ei, last_bb->preds)
5722    if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5723      src_bbs.quick_push (e->src);
5724
5725  label = BB_HEAD (last_bb);
5726
5727  FOR_EACH_VEC_ELT (src_bbs, i, bb)
5728    {
5729      rtx_insn *jump = BB_END (bb);
5730
5731      if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5732	continue;
5733
5734      e = find_edge (bb, last_bb);
5735
5736      /* If we have an unconditional jump, we can replace that
5737	 with a simple return instruction.  */
5738      if (simplejump_p (jump))
5739	{
5740	  /* The use of the return register might be present in the exit
5741	     fallthru block.  Either:
5742	     - removing the use is safe, and we should remove the use in
5743	     the exit fallthru block, or
5744	     - removing the use is not safe, and we should add it here.
5745	     For now, we conservatively choose the latter.  Either of the
5746	     2 helps in crossjumping.  */
5747	  emit_use_return_register_into_block (bb);
5748
5749	  emit_return_into_block (simple_p, bb);
5750	  delete_insn (jump);
5751	}
5752
5753      /* If we have a conditional jump branching to the last
5754	 block, we can try to replace that with a conditional
5755	 return instruction.  */
5756      else if (condjump_p (jump))
5757	{
5758	  rtx dest;
5759
5760	  if (simple_p)
5761	    dest = simple_return_rtx;
5762	  else
5763	    dest = ret_rtx;
5764	  if (!redirect_jump (jump, dest, 0))
5765	    {
5766#ifdef HAVE_simple_return
5767	      if (simple_p)
5768		{
5769		  if (dump_file)
5770		    fprintf (dump_file,
5771			     "Failed to redirect bb %d branch.\n", bb->index);
5772		  unconverted.safe_push (e);
5773		}
5774#endif
5775	      continue;
5776	    }
5777
5778	  /* See comment in simplejump_p case above.  */
5779	  emit_use_return_register_into_block (bb);
5780
5781	  /* If this block has only one successor, it both jumps
5782	     and falls through to the fallthru block, so we can't
5783	     delete the edge.  */
5784	  if (single_succ_p (bb))
5785	    continue;
5786	}
5787      else
5788	{
5789#ifdef HAVE_simple_return
5790	  if (simple_p)
5791	    {
5792	      if (dump_file)
5793		fprintf (dump_file,
5794			 "Failed to redirect bb %d branch.\n", bb->index);
5795	      unconverted.safe_push (e);
5796	    }
5797#endif
5798	  continue;
5799	}
5800
5801      /* Fix up the CFG for the successful change we just made.  */
5802      redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
5803      e->flags &= ~EDGE_CROSSING;
5804    }
5805  src_bbs.release ();
5806  return unconverted;
5807}
5808
5809/* Emit a return insn for the exit fallthru block.  */
5810basic_block
5811emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5812{
5813  basic_block last_bb = exit_fallthru_edge->src;
5814
5815  if (JUMP_P (BB_END (last_bb)))
5816    {
5817      last_bb = split_edge (exit_fallthru_edge);
5818      exit_fallthru_edge = single_succ_edge (last_bb);
5819    }
5820  emit_barrier_after (BB_END (last_bb));
5821  emit_return_into_block (simple_p, last_bb);
5822  exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5823  return last_bb;
5824}
5825#endif
5826
5827
5828/* Generate the prologue and epilogue RTL if the machine supports it.  Thread
5829   this into place with notes indicating where the prologue ends and where
5830   the epilogue begins.  Update the basic block information when possible.
5831
5832   Notes on epilogue placement:
5833   There are several kinds of edges to the exit block:
5834   * a single fallthru edge from LAST_BB
5835   * possibly, edges from blocks containing sibcalls
5836   * possibly, fake edges from infinite loops
5837
5838   The epilogue is always emitted on the fallthru edge from the last basic
5839   block in the function, LAST_BB, into the exit block.
5840
5841   If LAST_BB is empty except for a label, it is the target of every
5842   other basic block in the function that ends in a return.  If a
5843   target has a return or simple_return pattern (possibly with
5844   conditional variants), these basic blocks can be changed so that a
5845   return insn is emitted into them, and their target is adjusted to
5846   the real exit block.
5847
5848   Notes on shrink wrapping: We implement a fairly conservative
5849   version of shrink-wrapping rather than the textbook one.  We only
5850   generate a single prologue and a single epilogue.  This is
5851   sufficient to catch a number of interesting cases involving early
5852   exits.
5853
5854   First, we identify the blocks that require the prologue to occur before
5855   them.  These are the ones that modify a call-saved register, or reference
5856   any of the stack or frame pointer registers.  To simplify things, we then
5857   mark everything reachable from these blocks as also requiring a prologue.
5858   This takes care of loops automatically, and avoids the need to examine
5859   whether MEMs reference the frame, since it is sufficient to check for
5860   occurrences of the stack or frame pointer.
5861
5862   We then compute the set of blocks for which the need for a prologue
5863   is anticipatable (borrowing terminology from the shrink-wrapping
5864   description in Muchnick's book).  These are the blocks which either
5865   require a prologue themselves, or those that have only successors
5866   where the prologue is anticipatable.  The prologue needs to be
5867   inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5868   is not.  For the moment, we ensure that only one such edge exists.
5869
5870   The epilogue is placed as described above, but we make a
5871   distinction between inserting return and simple_return patterns
5872   when modifying other blocks that end in a return.  Blocks that end
5873   in a sibcall omit the sibcall_epilogue if the block is not in
5874   ANTIC.  */
5875
5876void
5877thread_prologue_and_epilogue_insns (void)
5878{
5879  bool inserted;
5880#ifdef HAVE_simple_return
5881  vec<edge> unconverted_simple_returns = vNULL;
5882  bitmap_head bb_flags;
5883#endif
5884  rtx_insn *returnjump;
5885  rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
5886  rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
5887  edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5888  edge_iterator ei;
5889
5890  df_analyze ();
5891
5892  rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5893
5894  inserted = false;
5895  epilogue_end = NULL;
5896  returnjump = NULL;
5897
5898  /* Can't deal with multiple successors of the entry block at the
5899     moment.  Function should always have at least one entry
5900     point.  */
5901  gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5902  entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5903  orig_entry_edge = entry_edge;
5904
5905  split_prologue_seq = NULL;
5906  if (flag_split_stack
5907      && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5908	  == NULL))
5909    {
5910#ifndef HAVE_split_stack_prologue
5911      gcc_unreachable ();
5912#else
5913      gcc_assert (HAVE_split_stack_prologue);
5914
5915      start_sequence ();
5916      emit_insn (gen_split_stack_prologue ());
5917      split_prologue_seq = get_insns ();
5918      end_sequence ();
5919
5920      record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5921      set_insn_locations (split_prologue_seq, prologue_location);
5922#endif
5923    }
5924
5925  prologue_seq = NULL;
5926#ifdef HAVE_prologue
5927  if (HAVE_prologue)
5928    {
5929      start_sequence ();
5930      rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
5931      emit_insn (seq);
5932
5933      /* Insert an explicit USE for the frame pointer
5934         if the profiling is on and the frame pointer is required.  */
5935      if (crtl->profile && frame_pointer_needed)
5936	emit_use (hard_frame_pointer_rtx);
5937
5938      /* Retain a map of the prologue insns.  */
5939      record_insns (seq, NULL, &prologue_insn_hash);
5940      emit_note (NOTE_INSN_PROLOGUE_END);
5941
5942      /* Ensure that instructions are not moved into the prologue when
5943	 profiling is on.  The call to the profiling routine can be
5944	 emitted within the live range of a call-clobbered register.  */
5945      if (!targetm.profile_before_prologue () && crtl->profile)
5946        emit_insn (gen_blockage ());
5947
5948      prologue_seq = get_insns ();
5949      end_sequence ();
5950      set_insn_locations (prologue_seq, prologue_location);
5951    }
5952#endif
5953
5954#ifdef HAVE_simple_return
5955  bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5956
5957  /* Try to perform a kind of shrink-wrapping, making sure the
5958     prologue/epilogue is emitted only around those parts of the
5959     function that require it.  */
5960
5961  try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
5962#endif
5963
5964  if (split_prologue_seq != NULL_RTX)
5965    {
5966      insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
5967      inserted = true;
5968    }
5969  if (prologue_seq != NULL_RTX)
5970    {
5971      insert_insn_on_edge (prologue_seq, entry_edge);
5972      inserted = true;
5973    }
5974
5975  /* If the exit block has no non-fake predecessors, we don't need
5976     an epilogue.  */
5977  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5978    if ((e->flags & EDGE_FAKE) == 0)
5979      break;
5980  if (e == NULL)
5981    goto epilogue_done;
5982
5983  rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5984
5985  exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5986
5987#ifdef HAVE_simple_return
5988  if (entry_edge != orig_entry_edge)
5989    exit_fallthru_edge
5990	= get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5991					 &unconverted_simple_returns,
5992					 &returnjump);
5993#endif
5994#ifdef HAVE_return
5995  if (HAVE_return)
5996    {
5997      if (exit_fallthru_edge == NULL)
5998	goto epilogue_done;
5999
6000      if (optimize)
6001	{
6002	  basic_block last_bb = exit_fallthru_edge->src;
6003
6004	  if (LABEL_P (BB_HEAD (last_bb))
6005	      && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6006	    convert_jumps_to_returns (last_bb, false, vNULL);
6007
6008	  if (EDGE_COUNT (last_bb->preds) != 0
6009	      && single_succ_p (last_bb))
6010	    {
6011	      last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6012	      epilogue_end = returnjump = BB_END (last_bb);
6013#ifdef HAVE_simple_return
6014	      /* Emitting the return may add a basic block.
6015		 Fix bb_flags for the added block.  */
6016	      if (last_bb != exit_fallthru_edge->src)
6017		bitmap_set_bit (&bb_flags, last_bb->index);
6018#endif
6019	      goto epilogue_done;
6020	    }
6021	}
6022    }
6023#endif
6024
6025  /* A small fib -- epilogue is not yet completed, but we wish to re-use
6026     this marker for the splits of EH_RETURN patterns, and nothing else
6027     uses the flag in the meantime.  */
6028  epilogue_completed = 1;
6029
6030#ifdef HAVE_eh_return
6031  /* Find non-fallthru edges that end with EH_RETURN instructions.  On
6032     some targets, these get split to a special version of the epilogue
6033     code.  In order to be able to properly annotate these with unwind
6034     info, try to split them now.  If we get a valid split, drop an
6035     EPILOGUE_BEG note and mark the insns as epilogue insns.  */
6036  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6037    {
6038      rtx_insn *prev, *last, *trial;
6039
6040      if (e->flags & EDGE_FALLTHRU)
6041	continue;
6042      last = BB_END (e->src);
6043      if (!eh_returnjump_p (last))
6044	continue;
6045
6046      prev = PREV_INSN (last);
6047      trial = try_split (PATTERN (last), last, 1);
6048      if (trial == last)
6049	continue;
6050
6051      record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6052      emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6053    }
6054#endif
6055
6056  /* If nothing falls through into the exit block, we don't need an
6057     epilogue.  */
6058
6059  if (exit_fallthru_edge == NULL)
6060    goto epilogue_done;
6061
6062#ifdef HAVE_epilogue
6063  if (HAVE_epilogue)
6064    {
6065      start_sequence ();
6066      epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6067      rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
6068      if (seq)
6069	emit_jump_insn (seq);
6070
6071      /* Retain a map of the epilogue insns.  */
6072      record_insns (seq, NULL, &epilogue_insn_hash);
6073      set_insn_locations (seq, epilogue_location);
6074
6075      seq = get_insns ();
6076      returnjump = get_last_insn ();
6077      end_sequence ();
6078
6079      insert_insn_on_edge (seq, exit_fallthru_edge);
6080      inserted = true;
6081
6082      if (JUMP_P (returnjump))
6083	set_return_jump_label (returnjump);
6084    }
6085  else
6086#endif
6087    {
6088      basic_block cur_bb;
6089
6090      if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6091	goto epilogue_done;
6092      /* We have a fall-through edge to the exit block, the source is not
6093         at the end of the function, and there will be an assembler epilogue
6094         at the end of the function.
6095         We can't use force_nonfallthru here, because that would try to
6096	 use return.  Inserting a jump 'by hand' is extremely messy, so
6097	 we take advantage of cfg_layout_finalize using
6098	 fixup_fallthru_exit_predecessor.  */
6099      cfg_layout_initialize (0);
6100      FOR_EACH_BB_FN (cur_bb, cfun)
6101	if (cur_bb->index >= NUM_FIXED_BLOCKS
6102	    && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6103	  cur_bb->aux = cur_bb->next_bb;
6104      cfg_layout_finalize ();
6105    }
6106
6107epilogue_done:
6108
6109  default_rtl_profile ();
6110
6111  if (inserted)
6112    {
6113      sbitmap blocks;
6114
6115      commit_edge_insertions ();
6116
6117      /* Look for basic blocks within the prologue insns.  */
6118      blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6119      bitmap_clear (blocks);
6120      bitmap_set_bit (blocks, entry_edge->dest->index);
6121      bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6122      find_many_sub_basic_blocks (blocks);
6123      sbitmap_free (blocks);
6124
6125      /* The epilogue insns we inserted may cause the exit edge to no longer
6126	 be fallthru.  */
6127      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6128	{
6129	  if (((e->flags & EDGE_FALLTHRU) != 0)
6130	      && returnjump_p (BB_END (e->src)))
6131	    e->flags &= ~EDGE_FALLTHRU;
6132	}
6133    }
6134
6135#ifdef HAVE_simple_return
6136  convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags, returnjump,
6137			    unconverted_simple_returns);
6138#endif
6139
6140#ifdef HAVE_sibcall_epilogue
6141  /* Emit sibling epilogues before any sibling call sites.  */
6142  for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6143							     ei_safe_edge (ei));
6144							     )
6145    {
6146      basic_block bb = e->src;
6147      rtx_insn *insn = BB_END (bb);
6148      rtx ep_seq;
6149
6150      if (!CALL_P (insn)
6151	  || ! SIBLING_CALL_P (insn)
6152#ifdef HAVE_simple_return
6153	  || (entry_edge != orig_entry_edge
6154	      && !bitmap_bit_p (&bb_flags, bb->index))
6155#endif
6156	  )
6157	{
6158	  ei_next (&ei);
6159	  continue;
6160	}
6161
6162      ep_seq = gen_sibcall_epilogue ();
6163      if (ep_seq)
6164	{
6165	  start_sequence ();
6166	  emit_note (NOTE_INSN_EPILOGUE_BEG);
6167	  emit_insn (ep_seq);
6168	  rtx_insn *seq = get_insns ();
6169	  end_sequence ();
6170
6171	  /* Retain a map of the epilogue insns.  Used in life analysis to
6172	     avoid getting rid of sibcall epilogue insns.  Do this before we
6173	     actually emit the sequence.  */
6174	  record_insns (seq, NULL, &epilogue_insn_hash);
6175	  set_insn_locations (seq, epilogue_location);
6176
6177	  emit_insn_before (seq, insn);
6178	}
6179      ei_next (&ei);
6180    }
6181#endif
6182
6183#ifdef HAVE_epilogue
6184  if (epilogue_end)
6185    {
6186      rtx_insn *insn, *next;
6187
6188      /* Similarly, move any line notes that appear after the epilogue.
6189         There is no need, however, to be quite so anal about the existence
6190	 of such a note.  Also possibly move
6191	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6192	 info generation.  */
6193      for (insn = epilogue_end; insn; insn = next)
6194	{
6195	  next = NEXT_INSN (insn);
6196	  if (NOTE_P (insn)
6197	      && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6198	    reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6199	}
6200    }
6201#endif
6202
6203#ifdef HAVE_simple_return
6204  bitmap_clear (&bb_flags);
6205#endif
6206
6207  /* Threading the prologue and epilogue changes the artificial refs
6208     in the entry and exit blocks.  */
6209  epilogue_completed = 1;
6210  df_update_entry_exit_and_calls ();
6211}
6212
6213/* Reposition the prologue-end and epilogue-begin notes after
6214   instruction scheduling.  */
6215
6216void
6217reposition_prologue_and_epilogue_notes (void)
6218{
6219#if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6220    || defined (HAVE_sibcall_epilogue)
6221  /* Since the hash table is created on demand, the fact that it is
6222     non-null is a signal that it is non-empty.  */
6223  if (prologue_insn_hash != NULL)
6224    {
6225      size_t len = prologue_insn_hash->elements ();
6226      rtx_insn *insn, *last = NULL, *note = NULL;
6227
6228      /* Scan from the beginning until we reach the last prologue insn.  */
6229      /* ??? While we do have the CFG intact, there are two problems:
6230	 (1) The prologue can contain loops (typically probing the stack),
6231	     which means that the end of the prologue isn't in the first bb.
6232	 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb.  */
6233      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6234	{
6235	  if (NOTE_P (insn))
6236	    {
6237	      if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6238		note = insn;
6239	    }
6240	  else if (contains (insn, prologue_insn_hash))
6241	    {
6242	      last = insn;
6243	      if (--len == 0)
6244		break;
6245	    }
6246	}
6247
6248      if (last)
6249	{
6250	  if (note == NULL)
6251	    {
6252	      /* Scan forward looking for the PROLOGUE_END note.  It should
6253		 be right at the beginning of the block, possibly with other
6254		 insn notes that got moved there.  */
6255	      for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6256		{
6257		  if (NOTE_P (note)
6258		      && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6259		    break;
6260		}
6261	    }
6262
6263	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
6264	  if (LABEL_P (last))
6265	    last = NEXT_INSN (last);
6266	  reorder_insns (note, note, last);
6267	}
6268    }
6269
6270  if (epilogue_insn_hash != NULL)
6271    {
6272      edge_iterator ei;
6273      edge e;
6274
6275      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6276	{
6277	  rtx_insn *insn, *first = NULL, *note = NULL;
6278	  basic_block bb = e->src;
6279
6280	  /* Scan from the beginning until we reach the first epilogue insn. */
6281	  FOR_BB_INSNS (bb, insn)
6282	    {
6283	      if (NOTE_P (insn))
6284		{
6285		  if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6286		    {
6287		      note = insn;
6288		      if (first != NULL)
6289			break;
6290		    }
6291		}
6292	      else if (first == NULL && contains (insn, epilogue_insn_hash))
6293		{
6294		  first = insn;
6295		  if (note != NULL)
6296		    break;
6297		}
6298	    }
6299
6300	  if (note)
6301	    {
6302	      /* If the function has a single basic block, and no real
6303		 epilogue insns (e.g. sibcall with no cleanup), the
6304		 epilogue note can get scheduled before the prologue
6305		 note.  If we have frame related prologue insns, having
6306		 them scanned during the epilogue will result in a crash.
6307		 In this case re-order the epilogue note to just before
6308		 the last insn in the block.  */
6309	      if (first == NULL)
6310		first = BB_END (bb);
6311
6312	      if (PREV_INSN (first) != note)
6313		reorder_insns (note, note, PREV_INSN (first));
6314	    }
6315	}
6316    }
6317#endif /* HAVE_prologue or HAVE_epilogue */
6318}
6319
6320/* Returns the name of function declared by FNDECL.  */
6321const char *
6322fndecl_name (tree fndecl)
6323{
6324  if (fndecl == NULL)
6325    return "(nofn)";
6326  return lang_hooks.decl_printable_name (fndecl, 2);
6327}
6328
6329/* Returns the name of function FN.  */
6330const char *
6331function_name (struct function *fn)
6332{
6333  tree fndecl = (fn == NULL) ? NULL : fn->decl;
6334  return fndecl_name (fndecl);
6335}
6336
6337/* Returns the name of the current function.  */
6338const char *
6339current_function_name (void)
6340{
6341  return function_name (cfun);
6342}
6343
6344
6345static unsigned int
6346rest_of_handle_check_leaf_regs (void)
6347{
6348#ifdef LEAF_REGISTERS
6349  crtl->uses_only_leaf_regs
6350    = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6351#endif
6352  return 0;
6353}
6354
6355/* Insert a TYPE into the used types hash table of CFUN.  */
6356
6357static void
6358used_types_insert_helper (tree type, struct function *func)
6359{
6360  if (type != NULL && func != NULL)
6361    {
6362      if (func->used_types_hash == NULL)
6363	func->used_types_hash = hash_set<tree>::create_ggc (37);
6364
6365      func->used_types_hash->add (type);
6366    }
6367}
6368
6369/* Given a type, insert it into the used hash table in cfun.  */
6370void
6371used_types_insert (tree t)
6372{
6373  while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6374    if (TYPE_NAME (t))
6375      break;
6376    else
6377      t = TREE_TYPE (t);
6378  if (TREE_CODE (t) == ERROR_MARK)
6379    return;
6380  if (TYPE_NAME (t) == NULL_TREE
6381      || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6382    t = TYPE_MAIN_VARIANT (t);
6383  if (debug_info_level > DINFO_LEVEL_NONE)
6384    {
6385      if (cfun)
6386	used_types_insert_helper (t, cfun);
6387      else
6388	{
6389	  /* So this might be a type referenced by a global variable.
6390	     Record that type so that we can later decide to emit its
6391	     debug information.  */
6392	  vec_safe_push (types_used_by_cur_var_decl, t);
6393	}
6394    }
6395}
6396
6397/* Helper to Hash a struct types_used_by_vars_entry.  */
6398
6399static hashval_t
6400hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6401{
6402  gcc_assert (entry && entry->var_decl && entry->type);
6403
6404  return iterative_hash_object (entry->type,
6405				iterative_hash_object (entry->var_decl, 0));
6406}
6407
6408/* Hash function of the types_used_by_vars_entry hash table.  */
6409
6410hashval_t
6411used_type_hasher::hash (types_used_by_vars_entry *entry)
6412{
6413  return hash_types_used_by_vars_entry (entry);
6414}
6415
6416/*Equality function of the types_used_by_vars_entry hash table.  */
6417
6418bool
6419used_type_hasher::equal (types_used_by_vars_entry *e1,
6420			 types_used_by_vars_entry *e2)
6421{
6422  return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6423}
6424
6425/* Inserts an entry into the types_used_by_vars_hash hash table. */
6426
6427void
6428types_used_by_var_decl_insert (tree type, tree var_decl)
6429{
6430  if (type != NULL && var_decl != NULL)
6431    {
6432      types_used_by_vars_entry **slot;
6433      struct types_used_by_vars_entry e;
6434      e.var_decl = var_decl;
6435      e.type = type;
6436      if (types_used_by_vars_hash == NULL)
6437	types_used_by_vars_hash
6438	  = hash_table<used_type_hasher>::create_ggc (37);
6439
6440      slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6441      if (*slot == NULL)
6442	{
6443	  struct types_used_by_vars_entry *entry;
6444	  entry = ggc_alloc<types_used_by_vars_entry> ();
6445	  entry->type = type;
6446	  entry->var_decl = var_decl;
6447	  *slot = entry;
6448	}
6449    }
6450}
6451
6452namespace {
6453
6454const pass_data pass_data_leaf_regs =
6455{
6456  RTL_PASS, /* type */
6457  "*leaf_regs", /* name */
6458  OPTGROUP_NONE, /* optinfo_flags */
6459  TV_NONE, /* tv_id */
6460  0, /* properties_required */
6461  0, /* properties_provided */
6462  0, /* properties_destroyed */
6463  0, /* todo_flags_start */
6464  0, /* todo_flags_finish */
6465};
6466
6467class pass_leaf_regs : public rtl_opt_pass
6468{
6469public:
6470  pass_leaf_regs (gcc::context *ctxt)
6471    : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6472  {}
6473
6474  /* opt_pass methods: */
6475  virtual unsigned int execute (function *)
6476    {
6477      return rest_of_handle_check_leaf_regs ();
6478    }
6479
6480}; // class pass_leaf_regs
6481
6482} // anon namespace
6483
6484rtl_opt_pass *
6485make_pass_leaf_regs (gcc::context *ctxt)
6486{
6487  return new pass_leaf_regs (ctxt);
6488}
6489
6490static unsigned int
6491rest_of_handle_thread_prologue_and_epilogue (void)
6492{
6493  if (optimize)
6494    cleanup_cfg (CLEANUP_EXPENSIVE);
6495
6496  /* On some machines, the prologue and epilogue code, or parts thereof,
6497     can be represented as RTL.  Doing so lets us schedule insns between
6498     it and the rest of the code and also allows delayed branch
6499     scheduling to operate in the epilogue.  */
6500  thread_prologue_and_epilogue_insns ();
6501
6502  /* Shrink-wrapping can result in unreachable edges in the epilogue,
6503     see PR57320.  */
6504  cleanup_cfg (0);
6505
6506  /* The stack usage info is finalized during prologue expansion.  */
6507  if (flag_stack_usage_info)
6508    output_stack_usage ();
6509
6510  return 0;
6511}
6512
6513namespace {
6514
6515const pass_data pass_data_thread_prologue_and_epilogue =
6516{
6517  RTL_PASS, /* type */
6518  "pro_and_epilogue", /* name */
6519  OPTGROUP_NONE, /* optinfo_flags */
6520  TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6521  0, /* properties_required */
6522  0, /* properties_provided */
6523  0, /* properties_destroyed */
6524  0, /* todo_flags_start */
6525  ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6526};
6527
6528class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6529{
6530public:
6531  pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6532    : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6533  {}
6534
6535  /* opt_pass methods: */
6536  virtual unsigned int execute (function *)
6537    {
6538      return rest_of_handle_thread_prologue_and_epilogue ();
6539    }
6540
6541}; // class pass_thread_prologue_and_epilogue
6542
6543} // anon namespace
6544
6545rtl_opt_pass *
6546make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6547{
6548  return new pass_thread_prologue_and_epilogue (ctxt);
6549}
6550
6551
6552/* This mini-pass fixes fall-out from SSA in asm statements that have
6553   in-out constraints.  Say you start with
6554
6555     orig = inout;
6556     asm ("": "+mr" (inout));
6557     use (orig);
6558
6559   which is transformed very early to use explicit output and match operands:
6560
6561     orig = inout;
6562     asm ("": "=mr" (inout) : "0" (inout));
6563     use (orig);
6564
6565   Or, after SSA and copyprop,
6566
6567     asm ("": "=mr" (inout_2) : "0" (inout_1));
6568     use (inout_1);
6569
6570   Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6571   they represent two separate values, so they will get different pseudo
6572   registers during expansion.  Then, since the two operands need to match
6573   per the constraints, but use different pseudo registers, reload can
6574   only register a reload for these operands.  But reloads can only be
6575   satisfied by hardregs, not by memory, so we need a register for this
6576   reload, just because we are presented with non-matching operands.
6577   So, even though we allow memory for this operand, no memory can be
6578   used for it, just because the two operands don't match.  This can
6579   cause reload failures on register-starved targets.
6580
6581   So it's a symptom of reload not being able to use memory for reloads
6582   or, alternatively it's also a symptom of both operands not coming into
6583   reload as matching (in which case the pseudo could go to memory just
6584   fine, as the alternative allows it, and no reload would be necessary).
6585   We fix the latter problem here, by transforming
6586
6587     asm ("": "=mr" (inout_2) : "0" (inout_1));
6588
6589   back to
6590
6591     inout_2 = inout_1;
6592     asm ("": "=mr" (inout_2) : "0" (inout_2));  */
6593
6594static void
6595match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6596{
6597  int i;
6598  bool changed = false;
6599  rtx op = SET_SRC (p_sets[0]);
6600  int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6601  rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6602  bool *output_matched = XALLOCAVEC (bool, noutputs);
6603
6604  memset (output_matched, 0, noutputs * sizeof (bool));
6605  for (i = 0; i < ninputs; i++)
6606    {
6607      rtx input, output;
6608      rtx_insn *insns;
6609      const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6610      char *end;
6611      int match, j;
6612
6613      if (*constraint == '%')
6614	constraint++;
6615
6616      match = strtoul (constraint, &end, 10);
6617      if (end == constraint)
6618	continue;
6619
6620      gcc_assert (match < noutputs);
6621      output = SET_DEST (p_sets[match]);
6622      input = RTVEC_ELT (inputs, i);
6623      /* Only do the transformation for pseudos.  */
6624      if (! REG_P (output)
6625	  || rtx_equal_p (output, input)
6626	  || (GET_MODE (input) != VOIDmode
6627	      && GET_MODE (input) != GET_MODE (output)))
6628	continue;
6629
6630      /* We can't do anything if the output is also used as input,
6631	 as we're going to overwrite it.  */
6632      for (j = 0; j < ninputs; j++)
6633        if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6634	  break;
6635      if (j != ninputs)
6636	continue;
6637
6638      /* Avoid changing the same input several times.  For
6639	 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6640	 only change in once (to out1), rather than changing it
6641	 first to out1 and afterwards to out2.  */
6642      if (i > 0)
6643	{
6644	  for (j = 0; j < noutputs; j++)
6645	    if (output_matched[j] && input == SET_DEST (p_sets[j]))
6646	      break;
6647	  if (j != noutputs)
6648	    continue;
6649	}
6650      output_matched[match] = true;
6651
6652      start_sequence ();
6653      emit_move_insn (output, input);
6654      insns = get_insns ();
6655      end_sequence ();
6656      emit_insn_before (insns, insn);
6657
6658      /* Now replace all mentions of the input with output.  We can't
6659	 just replace the occurrence in inputs[i], as the register might
6660	 also be used in some other input (or even in an address of an
6661	 output), which would mean possibly increasing the number of
6662	 inputs by one (namely 'output' in addition), which might pose
6663	 a too complicated problem for reload to solve.  E.g. this situation:
6664
6665	   asm ("" : "=r" (output), "=m" (input) : "0" (input))
6666
6667	 Here 'input' is used in two occurrences as input (once for the
6668	 input operand, once for the address in the second output operand).
6669	 If we would replace only the occurrence of the input operand (to
6670	 make the matching) we would be left with this:
6671
6672	   output = input
6673	   asm ("" : "=r" (output), "=m" (input) : "0" (output))
6674
6675	 Now we suddenly have two different input values (containing the same
6676	 value, but different pseudos) where we formerly had only one.
6677	 With more complicated asms this might lead to reload failures
6678	 which wouldn't have happen without this pass.  So, iterate over
6679	 all operands and replace all occurrences of the register used.  */
6680      for (j = 0; j < noutputs; j++)
6681	if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6682	    && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6683	  SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6684					      input, output);
6685      for (j = 0; j < ninputs; j++)
6686	if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6687	  RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6688					       input, output);
6689
6690      changed = true;
6691    }
6692
6693  if (changed)
6694    df_insn_rescan (insn);
6695}
6696
6697/* Add the decl D to the local_decls list of FUN.  */
6698
6699void
6700add_local_decl (struct function *fun, tree d)
6701{
6702  gcc_assert (TREE_CODE (d) == VAR_DECL);
6703  vec_safe_push (fun->local_decls, d);
6704}
6705
6706namespace {
6707
6708const pass_data pass_data_match_asm_constraints =
6709{
6710  RTL_PASS, /* type */
6711  "asmcons", /* name */
6712  OPTGROUP_NONE, /* optinfo_flags */
6713  TV_NONE, /* tv_id */
6714  0, /* properties_required */
6715  0, /* properties_provided */
6716  0, /* properties_destroyed */
6717  0, /* todo_flags_start */
6718  0, /* todo_flags_finish */
6719};
6720
6721class pass_match_asm_constraints : public rtl_opt_pass
6722{
6723public:
6724  pass_match_asm_constraints (gcc::context *ctxt)
6725    : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6726  {}
6727
6728  /* opt_pass methods: */
6729  virtual unsigned int execute (function *);
6730
6731}; // class pass_match_asm_constraints
6732
6733unsigned
6734pass_match_asm_constraints::execute (function *fun)
6735{
6736  basic_block bb;
6737  rtx_insn *insn;
6738  rtx pat, *p_sets;
6739  int noutputs;
6740
6741  if (!crtl->has_asm_statement)
6742    return 0;
6743
6744  df_set_flags (DF_DEFER_INSN_RESCAN);
6745  FOR_EACH_BB_FN (bb, fun)
6746    {
6747      FOR_BB_INSNS (bb, insn)
6748	{
6749	  if (!INSN_P (insn))
6750	    continue;
6751
6752	  pat = PATTERN (insn);
6753	  if (GET_CODE (pat) == PARALLEL)
6754	    p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6755	  else if (GET_CODE (pat) == SET)
6756	    p_sets = &PATTERN (insn), noutputs = 1;
6757	  else
6758	    continue;
6759
6760	  if (GET_CODE (*p_sets) == SET
6761	      && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6762	    match_asm_constraints_1 (insn, p_sets, noutputs);
6763	 }
6764    }
6765
6766  return TODO_df_finish;
6767}
6768
6769} // anon namespace
6770
6771rtl_opt_pass *
6772make_pass_match_asm_constraints (gcc::context *ctxt)
6773{
6774  return new pass_match_asm_constraints (ctxt);
6775}
6776
6777
6778#include "gt-function.h"
6779