1/* Emit RTL for the GCC expander.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23
24/* Middle-to-low level generation of rtx code and insns.
25
26   This file contains support functions for creating rtl expressions
27   and manipulating them in the doubly-linked chain of insns.
28
29   The patterns of the insns are created by machine-dependent
30   routines in insn-emit.c, which is generated automatically from
31   the machine description.  These routines make the individual rtx's
32   of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33   which are automatically generated from rtl.def; what is machine
34   dependent is the kind of rtx's they make and what arguments they
35   use.  */
36
37#include "config.h"
38#include "system.h"
39#include "coretypes.h"
40#include "tm.h"
41#include "toplev.h"
42#include "rtl.h"
43#include "tree.h"
44#include "tm_p.h"
45#include "flags.h"
46#include "function.h"
47#include "expr.h"
48#include "regs.h"
49#include "hard-reg-set.h"
50#include "hashtab.h"
51#include "insn-config.h"
52#include "recog.h"
53#include "real.h"
54#include "bitmap.h"
55#include "basic-block.h"
56#include "ggc.h"
57#include "debug.h"
58#include "langhooks.h"
59#include "tree-pass.h"
60
61/* Commonly used modes.  */
62
63enum machine_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
64enum machine_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
65enum machine_mode double_mode;	/* Mode whose width is DOUBLE_TYPE_SIZE.  */
66enum machine_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
67
68
69/* This is *not* reset after each function.  It gives each CODE_LABEL
70   in the entire compilation a unique label number.  */
71
72static GTY(()) int label_num = 1;
73
74/* Nonzero means do not generate NOTEs for source line numbers.  */
75
76static int no_line_numbers;
77
78/* Commonly used rtx's, so that we only need space for one copy.
79   These are initialized once for the entire compilation.
80   All of these are unique; no other rtx-object will be equal to any
81   of these.  */
82
83rtx global_rtl[GR_MAX];
84
85/* Commonly used RTL for hard registers.  These objects are not necessarily
86   unique, so we allocate them separately from global_rtl.  They are
87   initialized once per compilation unit, then copied into regno_reg_rtx
88   at the beginning of each function.  */
89static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
90
91/* We record floating-point CONST_DOUBLEs in each floating-point mode for
92   the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
93   record a copy of const[012]_rtx.  */
94
95rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
96
97rtx const_true_rtx;
98
99REAL_VALUE_TYPE dconst0;
100REAL_VALUE_TYPE dconst1;
101REAL_VALUE_TYPE dconst2;
102REAL_VALUE_TYPE dconst3;
103REAL_VALUE_TYPE dconst10;
104REAL_VALUE_TYPE dconstm1;
105REAL_VALUE_TYPE dconstm2;
106REAL_VALUE_TYPE dconsthalf;
107REAL_VALUE_TYPE dconstthird;
108REAL_VALUE_TYPE dconstpi;
109REAL_VALUE_TYPE dconste;
110
111/* All references to the following fixed hard registers go through
112   these unique rtl objects.  On machines where the frame-pointer and
113   arg-pointer are the same register, they use the same unique object.
114
115   After register allocation, other rtl objects which used to be pseudo-regs
116   may be clobbered to refer to the frame-pointer register.
117   But references that were originally to the frame-pointer can be
118   distinguished from the others because they contain frame_pointer_rtx.
119
120   When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
121   tricky: until register elimination has taken place hard_frame_pointer_rtx
122   should be used if it is being set, and frame_pointer_rtx otherwise.  After
123   register elimination hard_frame_pointer_rtx should always be used.
124   On machines where the two registers are same (most) then these are the
125   same.
126
127   In an inline procedure, the stack and frame pointer rtxs may not be
128   used for anything else.  */
129rtx static_chain_rtx;		/* (REG:Pmode STATIC_CHAIN_REGNUM) */
130rtx static_chain_incoming_rtx;	/* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
131rtx pic_offset_table_rtx;	/* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132
133/* This is used to implement __builtin_return_address for some machines.
134   See for instance the MIPS port.  */
135rtx return_address_pointer_rtx;	/* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136
137/* We make one copy of (const_int C) where C is in
138   [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
139   to save space during the compilation and simplify comparisons of
140   integers.  */
141
142rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
143
144/* A hash table storing CONST_INTs whose absolute value is greater
145   than MAX_SAVED_CONST_INT.  */
146
147static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
148     htab_t const_int_htab;
149
150/* A hash table storing memory attribute structures.  */
151static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
152     htab_t mem_attrs_htab;
153
154/* A hash table storing register attribute structures.  */
155static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
156     htab_t reg_attrs_htab;
157
158/* A hash table storing all CONST_DOUBLEs.  */
159static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
160     htab_t const_double_htab;
161
162#define first_insn (cfun->emit->x_first_insn)
163#define last_insn (cfun->emit->x_last_insn)
164#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
165#define last_location (cfun->emit->x_last_location)
166#define first_label_num (cfun->emit->x_first_label_num)
167
168static rtx make_call_insn_raw (rtx);
169static rtx find_line_note (rtx);
170static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
171static void unshare_all_decls (tree);
172static void reset_used_decls (tree);
173static void mark_label_nuses (rtx);
174static hashval_t const_int_htab_hash (const void *);
175static int const_int_htab_eq (const void *, const void *);
176static hashval_t const_double_htab_hash (const void *);
177static int const_double_htab_eq (const void *, const void *);
178static rtx lookup_const_double (rtx);
179static hashval_t mem_attrs_htab_hash (const void *);
180static int mem_attrs_htab_eq (const void *, const void *);
181static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
182				 enum machine_mode);
183static hashval_t reg_attrs_htab_hash (const void *);
184static int reg_attrs_htab_eq (const void *, const void *);
185static reg_attrs *get_reg_attrs (tree, int);
186static tree component_ref_for_mem_expr (tree);
187static rtx gen_const_vector (enum machine_mode, int);
188static void copy_rtx_if_shared_1 (rtx *orig);
189
190/* Probability of the conditional branch currently proceeded by try_split.
191   Set to -1 otherwise.  */
192int split_branch_probability = -1;
193
194/* Returns a hash code for X (which is a really a CONST_INT).  */
195
196static hashval_t
197const_int_htab_hash (const void *x)
198{
199  return (hashval_t) INTVAL ((rtx) x);
200}
201
202/* Returns nonzero if the value represented by X (which is really a
203   CONST_INT) is the same as that given by Y (which is really a
204   HOST_WIDE_INT *).  */
205
206static int
207const_int_htab_eq (const void *x, const void *y)
208{
209  return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
210}
211
212/* Returns a hash code for X (which is really a CONST_DOUBLE).  */
213static hashval_t
214const_double_htab_hash (const void *x)
215{
216  rtx value = (rtx) x;
217  hashval_t h;
218
219  if (GET_MODE (value) == VOIDmode)
220    h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
221  else
222    {
223      h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
224      /* MODE is used in the comparison, so it should be in the hash.  */
225      h ^= GET_MODE (value);
226    }
227  return h;
228}
229
230/* Returns nonzero if the value represented by X (really a ...)
231   is the same as that represented by Y (really a ...) */
232static int
233const_double_htab_eq (const void *x, const void *y)
234{
235  rtx a = (rtx)x, b = (rtx)y;
236
237  if (GET_MODE (a) != GET_MODE (b))
238    return 0;
239  if (GET_MODE (a) == VOIDmode)
240    return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
241	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
242  else
243    return real_identical (CONST_DOUBLE_REAL_VALUE (a),
244			   CONST_DOUBLE_REAL_VALUE (b));
245}
246
247/* Returns a hash code for X (which is a really a mem_attrs *).  */
248
249static hashval_t
250mem_attrs_htab_hash (const void *x)
251{
252  mem_attrs *p = (mem_attrs *) x;
253
254  return (p->alias ^ (p->align * 1000)
255	  ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256	  ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
257	  ^ (size_t) iterative_hash_expr (p->expr, 0));
258}
259
260/* Returns nonzero if the value represented by X (which is really a
261   mem_attrs *) is the same as that given by Y (which is also really a
262   mem_attrs *).  */
263
264static int
265mem_attrs_htab_eq (const void *x, const void *y)
266{
267  mem_attrs *p = (mem_attrs *) x;
268  mem_attrs *q = (mem_attrs *) y;
269
270  return (p->alias == q->alias && p->offset == q->offset
271	  && p->size == q->size && p->align == q->align
272	  && (p->expr == q->expr
273	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
274		  && operand_equal_p (p->expr, q->expr, 0))));
275}
276
277/* Allocate a new mem_attrs structure and insert it into the hash table if
278   one identical to it is not already in the table.  We are doing this for
279   MEM of mode MODE.  */
280
281static mem_attrs *
282get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
283	       unsigned int align, enum machine_mode mode)
284{
285  mem_attrs attrs;
286  void **slot;
287
288  /* If everything is the default, we can just return zero.
289     This must match what the corresponding MEM_* macros return when the
290     field is not present.  */
291  if (alias == 0 && expr == 0 && offset == 0
292      && (size == 0
293	  || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
294      && (STRICT_ALIGNMENT && mode != BLKmode
295	  ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
296    return 0;
297
298  attrs.alias = alias;
299  attrs.expr = expr;
300  attrs.offset = offset;
301  attrs.size = size;
302  attrs.align = align;
303
304  slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
305  if (*slot == 0)
306    {
307      *slot = ggc_alloc (sizeof (mem_attrs));
308      memcpy (*slot, &attrs, sizeof (mem_attrs));
309    }
310
311  return *slot;
312}
313
314/* Returns a hash code for X (which is a really a reg_attrs *).  */
315
316static hashval_t
317reg_attrs_htab_hash (const void *x)
318{
319  reg_attrs *p = (reg_attrs *) x;
320
321  return ((p->offset * 1000) ^ (long) p->decl);
322}
323
324/* Returns nonzero if the value represented by X (which is really a
325   reg_attrs *) is the same as that given by Y (which is also really a
326   reg_attrs *).  */
327
328static int
329reg_attrs_htab_eq (const void *x, const void *y)
330{
331  reg_attrs *p = (reg_attrs *) x;
332  reg_attrs *q = (reg_attrs *) y;
333
334  return (p->decl == q->decl && p->offset == q->offset);
335}
336/* Allocate a new reg_attrs structure and insert it into the hash table if
337   one identical to it is not already in the table.  We are doing this for
338   MEM of mode MODE.  */
339
340static reg_attrs *
341get_reg_attrs (tree decl, int offset)
342{
343  reg_attrs attrs;
344  void **slot;
345
346  /* If everything is the default, we can just return zero.  */
347  if (decl == 0 && offset == 0)
348    return 0;
349
350  attrs.decl = decl;
351  attrs.offset = offset;
352
353  slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
354  if (*slot == 0)
355    {
356      *slot = ggc_alloc (sizeof (reg_attrs));
357      memcpy (*slot, &attrs, sizeof (reg_attrs));
358    }
359
360  return *slot;
361}
362
363/* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
364   don't attempt to share with the various global pieces of rtl (such as
365   frame_pointer_rtx).  */
366
367rtx
368gen_raw_REG (enum machine_mode mode, int regno)
369{
370  rtx x = gen_rtx_raw_REG (mode, regno);
371  ORIGINAL_REGNO (x) = regno;
372  return x;
373}
374
375/* There are some RTL codes that require special attention; the generation
376   functions do the raw handling.  If you add to this list, modify
377   special_rtx in gengenrtl.c as well.  */
378
379rtx
380gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
381{
382  void **slot;
383
384  if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
385    return const_int_rtx[arg + MAX_SAVED_CONST_INT];
386
387#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
388  if (const_true_rtx && arg == STORE_FLAG_VALUE)
389    return const_true_rtx;
390#endif
391
392  /* Look up the CONST_INT in the hash table.  */
393  slot = htab_find_slot_with_hash (const_int_htab, &arg,
394				   (hashval_t) arg, INSERT);
395  if (*slot == 0)
396    *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
397
398  return (rtx) *slot;
399}
400
401rtx
402gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
403{
404  return GEN_INT (trunc_int_for_mode (c, mode));
405}
406
407/* CONST_DOUBLEs might be created from pairs of integers, or from
408   REAL_VALUE_TYPEs.  Also, their length is known only at run time,
409   so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
410
411/* Determine whether REAL, a CONST_DOUBLE, already exists in the
412   hash table.  If so, return its counterpart; otherwise add it
413   to the hash table and return it.  */
414static rtx
415lookup_const_double (rtx real)
416{
417  void **slot = htab_find_slot (const_double_htab, real, INSERT);
418  if (*slot == 0)
419    *slot = real;
420
421  return (rtx) *slot;
422}
423
424/* Return a CONST_DOUBLE rtx for a floating-point value specified by
425   VALUE in mode MODE.  */
426rtx
427const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
428{
429  rtx real = rtx_alloc (CONST_DOUBLE);
430  PUT_MODE (real, mode);
431
432  real->u.rv = value;
433
434  return lookup_const_double (real);
435}
436
437/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
438   of ints: I0 is the low-order word and I1 is the high-order word.
439   Do not use this routine for non-integer modes; convert to
440   REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE.  */
441
442rtx
443immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
444{
445  rtx value;
446  unsigned int i;
447
448  /* There are the following cases (note that there are no modes with
449     HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
450
451     1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
452	gen_int_mode.
453     2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
454	the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
455	from copies of the sign bit, and sign of i0 and i1 are the same),  then
456	we return a CONST_INT for i0.
457     3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
458  if (mode != VOIDmode)
459    {
460      gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
461		  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
462		  /* We can get a 0 for an error mark.  */
463		  || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
464		  || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
465
466      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
467	return gen_int_mode (i0, mode);
468
469      gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
470    }
471
472  /* If this integer fits in one word, return a CONST_INT.  */
473  if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
474    return GEN_INT (i0);
475
476  /* We use VOIDmode for integers.  */
477  value = rtx_alloc (CONST_DOUBLE);
478  PUT_MODE (value, VOIDmode);
479
480  CONST_DOUBLE_LOW (value) = i0;
481  CONST_DOUBLE_HIGH (value) = i1;
482
483  for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
484    XWINT (value, i) = 0;
485
486  return lookup_const_double (value);
487}
488
489rtx
490gen_rtx_REG (enum machine_mode mode, unsigned int regno)
491{
492  /* In case the MD file explicitly references the frame pointer, have
493     all such references point to the same frame pointer.  This is
494     used during frame pointer elimination to distinguish the explicit
495     references to these registers from pseudos that happened to be
496     assigned to them.
497
498     If we have eliminated the frame pointer or arg pointer, we will
499     be using it as a normal register, for example as a spill
500     register.  In such cases, we might be accessing it in a mode that
501     is not Pmode and therefore cannot use the pre-allocated rtx.
502
503     Also don't do this when we are making new REGs in reload, since
504     we don't want to get confused with the real pointers.  */
505
506  if (mode == Pmode && !reload_in_progress)
507    {
508      if (regno == FRAME_POINTER_REGNUM
509	  && (!reload_completed || frame_pointer_needed))
510	return frame_pointer_rtx;
511#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
512      if (regno == HARD_FRAME_POINTER_REGNUM
513	  && (!reload_completed || frame_pointer_needed))
514	return hard_frame_pointer_rtx;
515#endif
516#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
517      if (regno == ARG_POINTER_REGNUM)
518	return arg_pointer_rtx;
519#endif
520#ifdef RETURN_ADDRESS_POINTER_REGNUM
521      if (regno == RETURN_ADDRESS_POINTER_REGNUM)
522	return return_address_pointer_rtx;
523#endif
524      if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
525	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
526	return pic_offset_table_rtx;
527      if (regno == STACK_POINTER_REGNUM)
528	return stack_pointer_rtx;
529    }
530
531#if 0
532  /* If the per-function register table has been set up, try to re-use
533     an existing entry in that table to avoid useless generation of RTL.
534
535     This code is disabled for now until we can fix the various backends
536     which depend on having non-shared hard registers in some cases.   Long
537     term we want to re-enable this code as it can significantly cut down
538     on the amount of useless RTL that gets generated.
539
540     We'll also need to fix some code that runs after reload that wants to
541     set ORIGINAL_REGNO.  */
542
543  if (cfun
544      && cfun->emit
545      && regno_reg_rtx
546      && regno < FIRST_PSEUDO_REGISTER
547      && reg_raw_mode[regno] == mode)
548    return regno_reg_rtx[regno];
549#endif
550
551  return gen_raw_REG (mode, regno);
552}
553
554rtx
555gen_rtx_MEM (enum machine_mode mode, rtx addr)
556{
557  rtx rt = gen_rtx_raw_MEM (mode, addr);
558
559  /* This field is not cleared by the mere allocation of the rtx, so
560     we clear it here.  */
561  MEM_ATTRS (rt) = 0;
562
563  return rt;
564}
565
566/* Generate a memory referring to non-trapping constant memory.  */
567
568rtx
569gen_const_mem (enum machine_mode mode, rtx addr)
570{
571  rtx mem = gen_rtx_MEM (mode, addr);
572  MEM_READONLY_P (mem) = 1;
573  MEM_NOTRAP_P (mem) = 1;
574  return mem;
575}
576
577/* Generate a MEM referring to fixed portions of the frame, e.g., register
578   save areas.  */
579
580rtx
581gen_frame_mem (enum machine_mode mode, rtx addr)
582{
583  rtx mem = gen_rtx_MEM (mode, addr);
584  MEM_NOTRAP_P (mem) = 1;
585  set_mem_alias_set (mem, get_frame_alias_set ());
586  return mem;
587}
588
589/* Generate a MEM referring to a temporary use of the stack, not part
590    of the fixed stack frame.  For example, something which is pushed
591    by a target splitter.  */
592rtx
593gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
594{
595  rtx mem = gen_rtx_MEM (mode, addr);
596  MEM_NOTRAP_P (mem) = 1;
597  if (!current_function_calls_alloca)
598    set_mem_alias_set (mem, get_frame_alias_set ());
599  return mem;
600}
601
602/* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
603   this construct would be valid, and false otherwise.  */
604
605bool
606validate_subreg (enum machine_mode omode, enum machine_mode imode,
607		 rtx reg, unsigned int offset)
608{
609  unsigned int isize = GET_MODE_SIZE (imode);
610  unsigned int osize = GET_MODE_SIZE (omode);
611
612  /* All subregs must be aligned.  */
613  if (offset % osize != 0)
614    return false;
615
616  /* The subreg offset cannot be outside the inner object.  */
617  if (offset >= isize)
618    return false;
619
620  /* ??? This should not be here.  Temporarily continue to allow word_mode
621     subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
622     Generally, backends are doing something sketchy but it'll take time to
623     fix them all.  */
624  if (omode == word_mode)
625    ;
626  /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
627     is the culprit here, and not the backends.  */
628  else if (osize >= UNITS_PER_WORD && isize >= osize)
629    ;
630  /* Allow component subregs of complex and vector.  Though given the below
631     extraction rules, it's not always clear what that means.  */
632  else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
633	   && GET_MODE_INNER (imode) == omode)
634    ;
635  /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
636     i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
637     represent this.  It's questionable if this ought to be represented at
638     all -- why can't this all be hidden in post-reload splitters that make
639     arbitrarily mode changes to the registers themselves.  */
640  else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
641    ;
642  /* Subregs involving floating point modes are not allowed to
643     change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
644     (subreg:SI (reg:DF) 0) isn't.  */
645  else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
646    {
647      if (isize != osize)
648	return false;
649    }
650
651  /* Paradoxical subregs must have offset zero.  */
652  if (osize > isize)
653    return offset == 0;
654
655  /* This is a normal subreg.  Verify that the offset is representable.  */
656
657  /* For hard registers, we already have most of these rules collected in
658     subreg_offset_representable_p.  */
659  if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
660    {
661      unsigned int regno = REGNO (reg);
662
663#ifdef CANNOT_CHANGE_MODE_CLASS
664      if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
665	  && GET_MODE_INNER (imode) == omode)
666	;
667      else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
668	return false;
669#endif
670
671      return subreg_offset_representable_p (regno, imode, offset, omode);
672    }
673
674  /* For pseudo registers, we want most of the same checks.  Namely:
675     If the register no larger than a word, the subreg must be lowpart.
676     If the register is larger than a word, the subreg must be the lowpart
677     of a subword.  A subreg does *not* perform arbitrary bit extraction.
678     Given that we've already checked mode/offset alignment, we only have
679     to check subword subregs here.  */
680  if (osize < UNITS_PER_WORD)
681    {
682      enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
683      unsigned int low_off = subreg_lowpart_offset (omode, wmode);
684      if (offset % UNITS_PER_WORD != low_off)
685	return false;
686    }
687  return true;
688}
689
690rtx
691gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
692{
693  gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
694  return gen_rtx_raw_SUBREG (mode, reg, offset);
695}
696
697/* Generate a SUBREG representing the least-significant part of REG if MODE
698   is smaller than mode of REG, otherwise paradoxical SUBREG.  */
699
700rtx
701gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
702{
703  enum machine_mode inmode;
704
705  inmode = GET_MODE (reg);
706  if (inmode == VOIDmode)
707    inmode = mode;
708  return gen_rtx_SUBREG (mode, reg,
709			 subreg_lowpart_offset (mode, inmode));
710}
711
712/* gen_rtvec (n, [rt1, ..., rtn])
713**
714**	    This routine creates an rtvec and stores within it the
715**	pointers to rtx's which are its arguments.
716*/
717
718/*VARARGS1*/
719rtvec
720gen_rtvec (int n, ...)
721{
722  int i, save_n;
723  rtx *vector;
724  va_list p;
725
726  va_start (p, n);
727
728  if (n == 0)
729    return NULL_RTVEC;		/* Don't allocate an empty rtvec...	*/
730
731  vector = alloca (n * sizeof (rtx));
732
733  for (i = 0; i < n; i++)
734    vector[i] = va_arg (p, rtx);
735
736  /* The definition of VA_* in K&R C causes `n' to go out of scope.  */
737  save_n = n;
738  va_end (p);
739
740  return gen_rtvec_v (save_n, vector);
741}
742
743rtvec
744gen_rtvec_v (int n, rtx *argp)
745{
746  int i;
747  rtvec rt_val;
748
749  if (n == 0)
750    return NULL_RTVEC;		/* Don't allocate an empty rtvec...	*/
751
752  rt_val = rtvec_alloc (n);	/* Allocate an rtvec...			*/
753
754  for (i = 0; i < n; i++)
755    rt_val->elem[i] = *argp++;
756
757  return rt_val;
758}
759
760/* Generate a REG rtx for a new pseudo register of mode MODE.
761   This pseudo is assigned the next sequential register number.  */
762
763rtx
764gen_reg_rtx (enum machine_mode mode)
765{
766  struct function *f = cfun;
767  rtx val;
768
769  /* Don't let anything called after initial flow analysis create new
770     registers.  */
771  gcc_assert (!no_new_pseudos);
772
773  if (generating_concat_p
774      && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
775	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
776    {
777      /* For complex modes, don't make a single pseudo.
778	 Instead, make a CONCAT of two pseudos.
779	 This allows noncontiguous allocation of the real and imaginary parts,
780	 which makes much better code.  Besides, allocating DCmode
781	 pseudos overstrains reload on some machines like the 386.  */
782      rtx realpart, imagpart;
783      enum machine_mode partmode = GET_MODE_INNER (mode);
784
785      realpart = gen_reg_rtx (partmode);
786      imagpart = gen_reg_rtx (partmode);
787      return gen_rtx_CONCAT (mode, realpart, imagpart);
788    }
789
790  /* Make sure regno_pointer_align, and regno_reg_rtx are large
791     enough to have an element for this pseudo reg number.  */
792
793  if (reg_rtx_no == f->emit->regno_pointer_align_length)
794    {
795      int old_size = f->emit->regno_pointer_align_length;
796      char *new;
797      rtx *new1;
798
799      new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
800      memset (new + old_size, 0, old_size);
801      f->emit->regno_pointer_align = (unsigned char *) new;
802
803      new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
804			  old_size * 2 * sizeof (rtx));
805      memset (new1 + old_size, 0, old_size * sizeof (rtx));
806      regno_reg_rtx = new1;
807
808      f->emit->regno_pointer_align_length = old_size * 2;
809    }
810
811  val = gen_raw_REG (mode, reg_rtx_no);
812  regno_reg_rtx[reg_rtx_no++] = val;
813  return val;
814}
815
816/* Generate a register with same attributes as REG, but offsetted by OFFSET.
817   Do the big endian correction if needed.  */
818
819rtx
820gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
821{
822  rtx new = gen_rtx_REG (mode, regno);
823  tree decl;
824  HOST_WIDE_INT var_size;
825
826  /* PR middle-end/14084
827     The problem appears when a variable is stored in a larger register
828     and later it is used in the original mode or some mode in between
829     or some part of variable is accessed.
830
831     On little endian machines there is no problem because
832     the REG_OFFSET of the start of the variable is the same when
833     accessed in any mode (it is 0).
834
835     However, this is not true on big endian machines.
836     The offset of the start of the variable is different when accessed
837     in different modes.
838     When we are taking a part of the REG we have to change the OFFSET
839     from offset WRT size of mode of REG to offset WRT size of variable.
840
841     If we would not do the big endian correction the resulting REG_OFFSET
842     would be larger than the size of the DECL.
843
844     Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
845
846     REG.mode  MODE  DECL size  old offset  new offset  description
847     DI        SI    4          4           0           int32 in SImode
848     DI        SI    1          4           0           char in SImode
849     DI        QI    1          7           0           char in QImode
850     DI        QI    4          5           1           1st element in QImode
851                                                        of char[4]
852     DI        HI    4          6           2           1st element in HImode
853                                                        of int16[2]
854
855     If the size of DECL is equal or greater than the size of REG
856     we can't do this correction because the register holds the
857     whole variable or a part of the variable and thus the REG_OFFSET
858     is already correct.  */
859
860  decl = REG_EXPR (reg);
861  if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
862      && decl != NULL
863      && offset > 0
864      && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
865      && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
866	  && var_size < GET_MODE_SIZE (GET_MODE (reg))))
867    {
868      int offset_le;
869
870      /* Convert machine endian to little endian WRT size of mode of REG.  */
871      if (WORDS_BIG_ENDIAN)
872	offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
873		     / UNITS_PER_WORD) * UNITS_PER_WORD;
874      else
875	offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
876
877      if (BYTES_BIG_ENDIAN)
878	offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
879		      % UNITS_PER_WORD);
880      else
881	offset_le += offset % UNITS_PER_WORD;
882
883      if (offset_le >= var_size)
884	{
885	  /* MODE is wider than the variable so the new reg will cover
886	     the whole variable so the resulting OFFSET should be 0.  */
887	  offset = 0;
888	}
889      else
890	{
891	  /* Convert little endian to machine endian WRT size of variable.  */
892	  if (WORDS_BIG_ENDIAN)
893	    offset = ((var_size - 1 - offset_le)
894		      / UNITS_PER_WORD) * UNITS_PER_WORD;
895	  else
896	    offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
897
898	  if (BYTES_BIG_ENDIAN)
899	    offset += ((var_size - 1 - offset_le)
900		       % UNITS_PER_WORD);
901	  else
902	    offset += offset_le % UNITS_PER_WORD;
903	}
904    }
905
906  REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
907				   REG_OFFSET (reg) + offset);
908  return new;
909}
910
911/* Set the decl for MEM to DECL.  */
912
913void
914set_reg_attrs_from_mem (rtx reg, rtx mem)
915{
916  if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
917    REG_ATTRS (reg)
918      = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
919}
920
921/* Set the register attributes for registers contained in PARM_RTX.
922   Use needed values from memory attributes of MEM.  */
923
924void
925set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
926{
927  if (REG_P (parm_rtx))
928    set_reg_attrs_from_mem (parm_rtx, mem);
929  else if (GET_CODE (parm_rtx) == PARALLEL)
930    {
931      /* Check for a NULL entry in the first slot, used to indicate that the
932	 parameter goes both on the stack and in registers.  */
933      int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
934      for (; i < XVECLEN (parm_rtx, 0); i++)
935	{
936	  rtx x = XVECEXP (parm_rtx, 0, i);
937	  if (REG_P (XEXP (x, 0)))
938	    REG_ATTRS (XEXP (x, 0))
939	      = get_reg_attrs (MEM_EXPR (mem),
940			       INTVAL (XEXP (x, 1)));
941	}
942    }
943}
944
945/* Assign the RTX X to declaration T.  */
946void
947set_decl_rtl (tree t, rtx x)
948{
949  DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
950
951  if (!x)
952    return;
953  /* For register, we maintain the reverse information too.  */
954  if (REG_P (x))
955    REG_ATTRS (x) = get_reg_attrs (t, 0);
956  else if (GET_CODE (x) == SUBREG)
957    REG_ATTRS (SUBREG_REG (x))
958      = get_reg_attrs (t, -SUBREG_BYTE (x));
959  if (GET_CODE (x) == CONCAT)
960    {
961      if (REG_P (XEXP (x, 0)))
962        REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
963      if (REG_P (XEXP (x, 1)))
964	REG_ATTRS (XEXP (x, 1))
965	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
966    }
967  if (GET_CODE (x) == PARALLEL)
968    {
969      int i;
970      for (i = 0; i < XVECLEN (x, 0); i++)
971	{
972	  rtx y = XVECEXP (x, 0, i);
973	  if (REG_P (XEXP (y, 0)))
974	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
975	}
976    }
977}
978
979/* Assign the RTX X to parameter declaration T.  */
980void
981set_decl_incoming_rtl (tree t, rtx x)
982{
983  DECL_INCOMING_RTL (t) = x;
984
985  if (!x)
986    return;
987  /* For register, we maintain the reverse information too.  */
988  if (REG_P (x))
989    REG_ATTRS (x) = get_reg_attrs (t, 0);
990  else if (GET_CODE (x) == SUBREG)
991    REG_ATTRS (SUBREG_REG (x))
992      = get_reg_attrs (t, -SUBREG_BYTE (x));
993  if (GET_CODE (x) == CONCAT)
994    {
995      if (REG_P (XEXP (x, 0)))
996        REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
997      if (REG_P (XEXP (x, 1)))
998	REG_ATTRS (XEXP (x, 1))
999	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1000    }
1001  if (GET_CODE (x) == PARALLEL)
1002    {
1003      int i, start;
1004
1005      /* Check for a NULL entry, used to indicate that the parameter goes
1006	 both on the stack and in registers.  */
1007      if (XEXP (XVECEXP (x, 0, 0), 0))
1008	start = 0;
1009      else
1010	start = 1;
1011
1012      for (i = start; i < XVECLEN (x, 0); i++)
1013	{
1014	  rtx y = XVECEXP (x, 0, i);
1015	  if (REG_P (XEXP (y, 0)))
1016	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1017	}
1018    }
1019}
1020
1021/* Identify REG (which may be a CONCAT) as a user register.  */
1022
1023void
1024mark_user_reg (rtx reg)
1025{
1026  if (GET_CODE (reg) == CONCAT)
1027    {
1028      REG_USERVAR_P (XEXP (reg, 0)) = 1;
1029      REG_USERVAR_P (XEXP (reg, 1)) = 1;
1030    }
1031  else
1032    {
1033      gcc_assert (REG_P (reg));
1034      REG_USERVAR_P (reg) = 1;
1035    }
1036}
1037
1038/* Identify REG as a probable pointer register and show its alignment
1039   as ALIGN, if nonzero.  */
1040
1041void
1042mark_reg_pointer (rtx reg, int align)
1043{
1044  if (! REG_POINTER (reg))
1045    {
1046      REG_POINTER (reg) = 1;
1047
1048      if (align)
1049	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1050    }
1051  else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1052    /* We can no-longer be sure just how aligned this pointer is.  */
1053    REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1054}
1055
1056/* Return 1 plus largest pseudo reg number used in the current function.  */
1057
1058int
1059max_reg_num (void)
1060{
1061  return reg_rtx_no;
1062}
1063
1064/* Return 1 + the largest label number used so far in the current function.  */
1065
1066int
1067max_label_num (void)
1068{
1069  return label_num;
1070}
1071
1072/* Return first label number used in this function (if any were used).  */
1073
1074int
1075get_first_label_num (void)
1076{
1077  return first_label_num;
1078}
1079
1080/* If the rtx for label was created during the expansion of a nested
1081   function, then first_label_num won't include this label number.
1082   Fix this now so that array indicies work later.  */
1083
1084void
1085maybe_set_first_label_num (rtx x)
1086{
1087  if (CODE_LABEL_NUMBER (x) < first_label_num)
1088    first_label_num = CODE_LABEL_NUMBER (x);
1089}
1090
1091/* Return a value representing some low-order bits of X, where the number
1092   of low-order bits is given by MODE.  Note that no conversion is done
1093   between floating-point and fixed-point values, rather, the bit
1094   representation is returned.
1095
1096   This function handles the cases in common between gen_lowpart, below,
1097   and two variants in cse.c and combine.c.  These are the cases that can
1098   be safely handled at all points in the compilation.
1099
1100   If this is not a case we can handle, return 0.  */
1101
1102rtx
1103gen_lowpart_common (enum machine_mode mode, rtx x)
1104{
1105  int msize = GET_MODE_SIZE (mode);
1106  int xsize;
1107  int offset = 0;
1108  enum machine_mode innermode;
1109
1110  /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1111     so we have to make one up.  Yuk.  */
1112  innermode = GET_MODE (x);
1113  if (GET_CODE (x) == CONST_INT
1114      && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1115    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1116  else if (innermode == VOIDmode)
1117    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1118
1119  xsize = GET_MODE_SIZE (innermode);
1120
1121  gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1122
1123  if (innermode == mode)
1124    return x;
1125
1126  /* MODE must occupy no more words than the mode of X.  */
1127  if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1128      > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1129    return 0;
1130
1131  /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
1132  if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1133    return 0;
1134
1135  offset = subreg_lowpart_offset (mode, innermode);
1136
1137  if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1138      && (GET_MODE_CLASS (mode) == MODE_INT
1139	  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1140    {
1141      /* If we are getting the low-order part of something that has been
1142	 sign- or zero-extended, we can either just use the object being
1143	 extended or make a narrower extension.  If we want an even smaller
1144	 piece than the size of the object being extended, call ourselves
1145	 recursively.
1146
1147	 This case is used mostly by combine and cse.  */
1148
1149      if (GET_MODE (XEXP (x, 0)) == mode)
1150	return XEXP (x, 0);
1151      else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1152	return gen_lowpart_common (mode, XEXP (x, 0));
1153      else if (msize < xsize)
1154	return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1155    }
1156  else if (GET_CODE (x) == SUBREG || REG_P (x)
1157	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1158	   || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1159    return simplify_gen_subreg (mode, x, innermode, offset);
1160
1161  /* Otherwise, we can't do this.  */
1162  return 0;
1163}
1164
1165rtx
1166gen_highpart (enum machine_mode mode, rtx x)
1167{
1168  unsigned int msize = GET_MODE_SIZE (mode);
1169  rtx result;
1170
1171  /* This case loses if X is a subreg.  To catch bugs early,
1172     complain if an invalid MODE is used even in other cases.  */
1173  gcc_assert (msize <= UNITS_PER_WORD
1174	      || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1175
1176  result = simplify_gen_subreg (mode, x, GET_MODE (x),
1177				subreg_highpart_offset (mode, GET_MODE (x)));
1178  gcc_assert (result);
1179
1180  /* simplify_gen_subreg is not guaranteed to return a valid operand for
1181     the target if we have a MEM.  gen_highpart must return a valid operand,
1182     emitting code if necessary to do so.  */
1183  if (MEM_P (result))
1184    {
1185      result = validize_mem (result);
1186      gcc_assert (result);
1187    }
1188
1189  return result;
1190}
1191
1192/* Like gen_highpart, but accept mode of EXP operand in case EXP can
1193   be VOIDmode constant.  */
1194rtx
1195gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1196{
1197  if (GET_MODE (exp) != VOIDmode)
1198    {
1199      gcc_assert (GET_MODE (exp) == innermode);
1200      return gen_highpart (outermode, exp);
1201    }
1202  return simplify_gen_subreg (outermode, exp, innermode,
1203			      subreg_highpart_offset (outermode, innermode));
1204}
1205
1206/* Return offset in bytes to get OUTERMODE low part
1207   of the value in mode INNERMODE stored in memory in target format.  */
1208
1209unsigned int
1210subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1211{
1212  unsigned int offset = 0;
1213  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1214
1215  if (difference > 0)
1216    {
1217      if (WORDS_BIG_ENDIAN)
1218	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1219      if (BYTES_BIG_ENDIAN)
1220	offset += difference % UNITS_PER_WORD;
1221    }
1222
1223  return offset;
1224}
1225
1226/* Return offset in bytes to get OUTERMODE high part
1227   of the value in mode INNERMODE stored in memory in target format.  */
1228unsigned int
1229subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1230{
1231  unsigned int offset = 0;
1232  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1233
1234  gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1235
1236  if (difference > 0)
1237    {
1238      if (! WORDS_BIG_ENDIAN)
1239	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1240      if (! BYTES_BIG_ENDIAN)
1241	offset += difference % UNITS_PER_WORD;
1242    }
1243
1244  return offset;
1245}
1246
1247/* Return 1 iff X, assumed to be a SUBREG,
1248   refers to the least significant part of its containing reg.
1249   If X is not a SUBREG, always return 1 (it is its own low part!).  */
1250
1251int
1252subreg_lowpart_p (rtx x)
1253{
1254  if (GET_CODE (x) != SUBREG)
1255    return 1;
1256  else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1257    return 0;
1258
1259  return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1260	  == SUBREG_BYTE (x));
1261}
1262
1263/* Return subword OFFSET of operand OP.
1264   The word number, OFFSET, is interpreted as the word number starting
1265   at the low-order address.  OFFSET 0 is the low-order word if not
1266   WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1267
1268   If we cannot extract the required word, we return zero.  Otherwise,
1269   an rtx corresponding to the requested word will be returned.
1270
1271   VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1272   reload has completed, a valid address will always be returned.  After
1273   reload, if a valid address cannot be returned, we return zero.
1274
1275   If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1276   it is the responsibility of the caller.
1277
1278   MODE is the mode of OP in case it is a CONST_INT.
1279
1280   ??? This is still rather broken for some cases.  The problem for the
1281   moment is that all callers of this thing provide no 'goal mode' to
1282   tell us to work with.  This exists because all callers were written
1283   in a word based SUBREG world.
1284   Now use of this function can be deprecated by simplify_subreg in most
1285   cases.
1286 */
1287
1288rtx
1289operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1290{
1291  if (mode == VOIDmode)
1292    mode = GET_MODE (op);
1293
1294  gcc_assert (mode != VOIDmode);
1295
1296  /* If OP is narrower than a word, fail.  */
1297  if (mode != BLKmode
1298      && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1299    return 0;
1300
1301  /* If we want a word outside OP, return zero.  */
1302  if (mode != BLKmode
1303      && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1304    return const0_rtx;
1305
1306  /* Form a new MEM at the requested address.  */
1307  if (MEM_P (op))
1308    {
1309      rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1310
1311      if (! validate_address)
1312	return new;
1313
1314      else if (reload_completed)
1315	{
1316	  if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1317	    return 0;
1318	}
1319      else
1320	return replace_equiv_address (new, XEXP (new, 0));
1321    }
1322
1323  /* Rest can be handled by simplify_subreg.  */
1324  return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1325}
1326
1327/* Similar to `operand_subword', but never return 0.  If we can't
1328   extract the required subword, put OP into a register and try again.
1329   The second attempt must succeed.  We always validate the address in
1330   this case.
1331
1332   MODE is the mode of OP, in case it is CONST_INT.  */
1333
1334rtx
1335operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1336{
1337  rtx result = operand_subword (op, offset, 1, mode);
1338
1339  if (result)
1340    return result;
1341
1342  if (mode != BLKmode && mode != VOIDmode)
1343    {
1344      /* If this is a register which can not be accessed by words, copy it
1345	 to a pseudo register.  */
1346      if (REG_P (op))
1347	op = copy_to_reg (op);
1348      else
1349	op = force_reg (mode, op);
1350    }
1351
1352  result = operand_subword (op, offset, 1, mode);
1353  gcc_assert (result);
1354
1355  return result;
1356}
1357
1358/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1359   or (2) a component ref of something variable.  Represent the later with
1360   a NULL expression.  */
1361
1362static tree
1363component_ref_for_mem_expr (tree ref)
1364{
1365  tree inner = TREE_OPERAND (ref, 0);
1366
1367  if (TREE_CODE (inner) == COMPONENT_REF)
1368    inner = component_ref_for_mem_expr (inner);
1369  else
1370    {
1371      /* Now remove any conversions: they don't change what the underlying
1372	 object is.  Likewise for SAVE_EXPR.  */
1373      while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1374	     || TREE_CODE (inner) == NON_LVALUE_EXPR
1375	     || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1376	     || TREE_CODE (inner) == SAVE_EXPR)
1377	inner = TREE_OPERAND (inner, 0);
1378
1379      if (! DECL_P (inner))
1380	inner = NULL_TREE;
1381    }
1382
1383  if (inner == TREE_OPERAND (ref, 0))
1384    return ref;
1385  else
1386    return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1387		   TREE_OPERAND (ref, 1), NULL_TREE);
1388}
1389
1390/* Returns 1 if both MEM_EXPR can be considered equal
1391   and 0 otherwise.  */
1392
1393int
1394mem_expr_equal_p (tree expr1, tree expr2)
1395{
1396  if (expr1 == expr2)
1397    return 1;
1398
1399  if (! expr1 || ! expr2)
1400    return 0;
1401
1402  if (TREE_CODE (expr1) != TREE_CODE (expr2))
1403    return 0;
1404
1405  if (TREE_CODE (expr1) == COMPONENT_REF)
1406    return
1407      mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1408			TREE_OPERAND (expr2, 0))
1409      && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1410			   TREE_OPERAND (expr2, 1));
1411
1412  if (INDIRECT_REF_P (expr1))
1413    return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1414			     TREE_OPERAND (expr2, 0));
1415
1416  /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1417	      have been resolved here.  */
1418  gcc_assert (DECL_P (expr1));
1419
1420  /* Decls with different pointers can't be equal.  */
1421  return 0;
1422}
1423
1424/* Given REF, a MEM, and T, either the type of X or the expression
1425   corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1426   if we are making a new object of this type.  BITPOS is nonzero if
1427   there is an offset outstanding on T that will be applied later.  */
1428
1429void
1430set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1431				 HOST_WIDE_INT bitpos)
1432{
1433  HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1434  tree expr = MEM_EXPR (ref);
1435  rtx offset = MEM_OFFSET (ref);
1436  rtx size = MEM_SIZE (ref);
1437  unsigned int align = MEM_ALIGN (ref);
1438  HOST_WIDE_INT apply_bitpos = 0;
1439  tree type;
1440
1441  /* It can happen that type_for_mode was given a mode for which there
1442     is no language-level type.  In which case it returns NULL, which
1443     we can see here.  */
1444  if (t == NULL_TREE)
1445    return;
1446
1447  type = TYPE_P (t) ? t : TREE_TYPE (t);
1448  if (type == error_mark_node)
1449    return;
1450
1451  /* If we have already set DECL_RTL = ref, get_alias_set will get the
1452     wrong answer, as it assumes that DECL_RTL already has the right alias
1453     info.  Callers should not set DECL_RTL until after the call to
1454     set_mem_attributes.  */
1455  gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1456
1457  /* Get the alias set from the expression or type (perhaps using a
1458     front-end routine) and use it.  */
1459  alias = get_alias_set (t);
1460
1461  MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1462  MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1463  MEM_POINTER (ref) = POINTER_TYPE_P (type);
1464
1465  /* If we are making an object of this type, or if this is a DECL, we know
1466     that it is a scalar if the type is not an aggregate.  */
1467  if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1468    MEM_SCALAR_P (ref) = 1;
1469
1470  /* We can set the alignment from the type if we are making an object,
1471     this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
1472  if (objectp || TREE_CODE (t) == INDIRECT_REF
1473      || TREE_CODE (t) == ALIGN_INDIRECT_REF
1474      || TYPE_ALIGN_OK (type))
1475    align = MAX (align, TYPE_ALIGN (type));
1476  else
1477    if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1478      {
1479	if (integer_zerop (TREE_OPERAND (t, 1)))
1480	  /* We don't know anything about the alignment.  */
1481	  align = BITS_PER_UNIT;
1482	else
1483	  align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1484      }
1485
1486  /* If the size is known, we can set that.  */
1487  if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1488    size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1489
1490  /* If T is not a type, we may be able to deduce some more information about
1491     the expression.  */
1492  if (! TYPE_P (t))
1493    {
1494      tree base;
1495
1496      if (TREE_THIS_VOLATILE (t))
1497	MEM_VOLATILE_P (ref) = 1;
1498
1499      /* Now remove any conversions: they don't change what the underlying
1500	 object is.  Likewise for SAVE_EXPR.  */
1501      while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1502	     || TREE_CODE (t) == NON_LVALUE_EXPR
1503	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1504	     || TREE_CODE (t) == SAVE_EXPR)
1505	t = TREE_OPERAND (t, 0);
1506
1507      /* We may look through structure-like accesses for the purposes of
1508	 examining TREE_THIS_NOTRAP, but not array-like accesses.  */
1509      base = t;
1510      while (TREE_CODE (base) == COMPONENT_REF
1511	     || TREE_CODE (base) == REALPART_EXPR
1512	     || TREE_CODE (base) == IMAGPART_EXPR
1513	     || TREE_CODE (base) == BIT_FIELD_REF)
1514	base = TREE_OPERAND (base, 0);
1515
1516      if (DECL_P (base))
1517	{
1518	  if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1519	    MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1520	  else
1521	    MEM_NOTRAP_P (ref) = 1;
1522	}
1523      else
1524	MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1525
1526      base = get_base_address (base);
1527      if (base && DECL_P (base)
1528	  && TREE_READONLY (base)
1529	  && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1530	{
1531	  tree base_type = TREE_TYPE (base);
1532	  gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1533		      || DECL_ARTIFICIAL (base));
1534	  MEM_READONLY_P (ref) = 1;
1535	}
1536
1537      /* If this expression uses it's parent's alias set, mark it such
1538	 that we won't change it.  */
1539      if (component_uses_parent_alias_set (t))
1540	MEM_KEEP_ALIAS_SET_P (ref) = 1;
1541
1542      /* If this is a decl, set the attributes of the MEM from it.  */
1543      if (DECL_P (t))
1544	{
1545	  expr = t;
1546	  offset = const0_rtx;
1547	  apply_bitpos = bitpos;
1548	  size = (DECL_SIZE_UNIT (t)
1549		  && host_integerp (DECL_SIZE_UNIT (t), 1)
1550		  ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1551	  align = DECL_ALIGN (t);
1552	}
1553
1554      /* If this is a constant, we know the alignment.  */
1555      else if (CONSTANT_CLASS_P (t))
1556	{
1557	  align = TYPE_ALIGN (type);
1558#ifdef CONSTANT_ALIGNMENT
1559	  align = CONSTANT_ALIGNMENT (t, align);
1560#endif
1561	}
1562
1563      /* If this is a field reference and not a bit-field, record it.  */
1564      /* ??? There is some information that can be gleened from bit-fields,
1565	 such as the word offset in the structure that might be modified.
1566	 But skip it for now.  */
1567      else if (TREE_CODE (t) == COMPONENT_REF
1568	       && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1569	{
1570	  expr = component_ref_for_mem_expr (t);
1571	  offset = const0_rtx;
1572	  apply_bitpos = bitpos;
1573	  /* ??? Any reason the field size would be different than
1574	     the size we got from the type?  */
1575	}
1576
1577      /* If this is an array reference, look for an outer field reference.  */
1578      else if (TREE_CODE (t) == ARRAY_REF)
1579	{
1580	  tree off_tree = size_zero_node;
1581	  /* We can't modify t, because we use it at the end of the
1582	     function.  */
1583	  tree t2 = t;
1584
1585	  do
1586	    {
1587	      tree index = TREE_OPERAND (t2, 1);
1588	      tree low_bound = array_ref_low_bound (t2);
1589	      tree unit_size = array_ref_element_size (t2);
1590
1591	      /* We assume all arrays have sizes that are a multiple of a byte.
1592		 First subtract the lower bound, if any, in the type of the
1593		 index, then convert to sizetype and multiply by the size of
1594		 the array element.  */
1595	      if (! integer_zerop (low_bound))
1596		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1597				     index, low_bound);
1598
1599	      off_tree = size_binop (PLUS_EXPR,
1600				     size_binop (MULT_EXPR,
1601						 fold_convert (sizetype,
1602							       index),
1603						 unit_size),
1604				     off_tree);
1605	      t2 = TREE_OPERAND (t2, 0);
1606	    }
1607	  while (TREE_CODE (t2) == ARRAY_REF);
1608
1609	  if (DECL_P (t2))
1610	    {
1611	      expr = t2;
1612	      offset = NULL;
1613	      if (host_integerp (off_tree, 1))
1614		{
1615		  HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1616		  HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1617		  align = DECL_ALIGN (t2);
1618		  if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1619	            align = aoff;
1620		  offset = GEN_INT (ioff);
1621		  apply_bitpos = bitpos;
1622		}
1623	    }
1624	  else if (TREE_CODE (t2) == COMPONENT_REF)
1625	    {
1626	      expr = component_ref_for_mem_expr (t2);
1627	      if (host_integerp (off_tree, 1))
1628		{
1629		  offset = GEN_INT (tree_low_cst (off_tree, 1));
1630		  apply_bitpos = bitpos;
1631		}
1632	      /* ??? Any reason the field size would be different than
1633		 the size we got from the type?  */
1634	    }
1635	  else if (flag_argument_noalias > 1
1636		   && (INDIRECT_REF_P (t2))
1637		   && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1638	    {
1639	      expr = t2;
1640	      offset = NULL;
1641	    }
1642	}
1643
1644      /* If this is a Fortran indirect argument reference, record the
1645	 parameter decl.  */
1646      else if (flag_argument_noalias > 1
1647	       && (INDIRECT_REF_P (t))
1648	       && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1649	{
1650	  expr = t;
1651	  offset = NULL;
1652	}
1653    }
1654
1655  /* If we modified OFFSET based on T, then subtract the outstanding
1656     bit position offset.  Similarly, increase the size of the accessed
1657     object to contain the negative offset.  */
1658  if (apply_bitpos)
1659    {
1660      offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1661      if (size)
1662	size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1663    }
1664
1665  if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1666    {
1667      /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1668	 we're overlapping.  */
1669      offset = NULL;
1670      expr = NULL;
1671    }
1672
1673  /* Now set the attributes we computed above.  */
1674  MEM_ATTRS (ref)
1675    = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1676
1677  /* If this is already known to be a scalar or aggregate, we are done.  */
1678  if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1679    return;
1680
1681  /* If it is a reference into an aggregate, this is part of an aggregate.
1682     Otherwise we don't know.  */
1683  else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1684	   || TREE_CODE (t) == ARRAY_RANGE_REF
1685	   || TREE_CODE (t) == BIT_FIELD_REF)
1686    MEM_IN_STRUCT_P (ref) = 1;
1687}
1688
1689void
1690set_mem_attributes (rtx ref, tree t, int objectp)
1691{
1692  set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1693}
1694
1695/* Set the decl for MEM to DECL.  */
1696
1697void
1698set_mem_attrs_from_reg (rtx mem, rtx reg)
1699{
1700  MEM_ATTRS (mem)
1701    = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1702		     GEN_INT (REG_OFFSET (reg)),
1703		     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1704}
1705
1706/* Set the alias set of MEM to SET.  */
1707
1708void
1709set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1710{
1711#ifdef ENABLE_CHECKING
1712  /* If the new and old alias sets don't conflict, something is wrong.  */
1713  gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1714#endif
1715
1716  MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1717				   MEM_SIZE (mem), MEM_ALIGN (mem),
1718				   GET_MODE (mem));
1719}
1720
1721/* Set the alignment of MEM to ALIGN bits.  */
1722
1723void
1724set_mem_align (rtx mem, unsigned int align)
1725{
1726  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1727				   MEM_OFFSET (mem), MEM_SIZE (mem), align,
1728				   GET_MODE (mem));
1729}
1730
1731/* Set the expr for MEM to EXPR.  */
1732
1733void
1734set_mem_expr (rtx mem, tree expr)
1735{
1736  MEM_ATTRS (mem)
1737    = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1738		     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1739}
1740
1741/* Set the offset of MEM to OFFSET.  */
1742
1743void
1744set_mem_offset (rtx mem, rtx offset)
1745{
1746  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1747				   offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1748				   GET_MODE (mem));
1749}
1750
1751/* Set the size of MEM to SIZE.  */
1752
1753void
1754set_mem_size (rtx mem, rtx size)
1755{
1756  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1757				   MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1758				   GET_MODE (mem));
1759}
1760
1761/* Return a memory reference like MEMREF, but with its mode changed to MODE
1762   and its address changed to ADDR.  (VOIDmode means don't change the mode.
1763   NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
1764   returned memory location is required to be valid.  The memory
1765   attributes are not changed.  */
1766
1767static rtx
1768change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1769{
1770  rtx new;
1771
1772  gcc_assert (MEM_P (memref));
1773  if (mode == VOIDmode)
1774    mode = GET_MODE (memref);
1775  if (addr == 0)
1776    addr = XEXP (memref, 0);
1777  if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1778      && (!validate || memory_address_p (mode, addr)))
1779    return memref;
1780
1781  if (validate)
1782    {
1783      if (reload_in_progress || reload_completed)
1784	gcc_assert (memory_address_p (mode, addr));
1785      else
1786	addr = memory_address (mode, addr);
1787    }
1788
1789  if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1790    return memref;
1791
1792  new = gen_rtx_MEM (mode, addr);
1793  MEM_COPY_ATTRIBUTES (new, memref);
1794  return new;
1795}
1796
1797/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1798   way we are changing MEMREF, so we only preserve the alias set.  */
1799
1800rtx
1801change_address (rtx memref, enum machine_mode mode, rtx addr)
1802{
1803  rtx new = change_address_1 (memref, mode, addr, 1), size;
1804  enum machine_mode mmode = GET_MODE (new);
1805  unsigned int align;
1806
1807  size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1808  align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1809
1810  /* If there are no changes, just return the original memory reference.  */
1811  if (new == memref)
1812    {
1813      if (MEM_ATTRS (memref) == 0
1814	  || (MEM_EXPR (memref) == NULL
1815	      && MEM_OFFSET (memref) == NULL
1816	      && MEM_SIZE (memref) == size
1817	      && MEM_ALIGN (memref) == align))
1818	return new;
1819
1820      new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1821      MEM_COPY_ATTRIBUTES (new, memref);
1822    }
1823
1824  MEM_ATTRS (new)
1825    = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1826
1827  return new;
1828}
1829
1830/* Return a memory reference like MEMREF, but with its mode changed
1831   to MODE and its address offset by OFFSET bytes.  If VALIDATE is
1832   nonzero, the memory address is forced to be valid.
1833   If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1834   and caller is responsible for adjusting MEMREF base register.  */
1835
1836rtx
1837adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1838		  int validate, int adjust)
1839{
1840  rtx addr = XEXP (memref, 0);
1841  rtx new;
1842  rtx memoffset = MEM_OFFSET (memref);
1843  rtx size = 0;
1844  unsigned int memalign = MEM_ALIGN (memref);
1845
1846  /* If there are no changes, just return the original memory reference.  */
1847  if (mode == GET_MODE (memref) && !offset
1848      && (!validate || memory_address_p (mode, addr)))
1849    return memref;
1850
1851  /* ??? Prefer to create garbage instead of creating shared rtl.
1852     This may happen even if offset is nonzero -- consider
1853     (plus (plus reg reg) const_int) -- so do this always.  */
1854  addr = copy_rtx (addr);
1855
1856  if (adjust)
1857    {
1858      /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1859	 object, we can merge it into the LO_SUM.  */
1860      if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1861	  && offset >= 0
1862	  && (unsigned HOST_WIDE_INT) offset
1863	      < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1864	addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1865			       plus_constant (XEXP (addr, 1), offset));
1866      else
1867	addr = plus_constant (addr, offset);
1868    }
1869
1870  new = change_address_1 (memref, mode, addr, validate);
1871
1872  /* Compute the new values of the memory attributes due to this adjustment.
1873     We add the offsets and update the alignment.  */
1874  if (memoffset)
1875    memoffset = GEN_INT (offset + INTVAL (memoffset));
1876
1877  /* Compute the new alignment by taking the MIN of the alignment and the
1878     lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1879     if zero.  */
1880  if (offset != 0)
1881    memalign
1882      = MIN (memalign,
1883	     (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1884
1885  /* We can compute the size in a number of ways.  */
1886  if (GET_MODE (new) != BLKmode)
1887    size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1888  else if (MEM_SIZE (memref))
1889    size = plus_constant (MEM_SIZE (memref), -offset);
1890
1891  MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1892				   memoffset, size, memalign, GET_MODE (new));
1893
1894  /* At some point, we should validate that this offset is within the object,
1895     if all the appropriate values are known.  */
1896  return new;
1897}
1898
1899/* Return a memory reference like MEMREF, but with its mode changed
1900   to MODE and its address changed to ADDR, which is assumed to be
1901   MEMREF offseted by OFFSET bytes.  If VALIDATE is
1902   nonzero, the memory address is forced to be valid.  */
1903
1904rtx
1905adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1906			     HOST_WIDE_INT offset, int validate)
1907{
1908  memref = change_address_1 (memref, VOIDmode, addr, validate);
1909  return adjust_address_1 (memref, mode, offset, validate, 0);
1910}
1911
1912/* Return a memory reference like MEMREF, but whose address is changed by
1913   adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
1914   known to be in OFFSET (possibly 1).  */
1915
1916rtx
1917offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1918{
1919  rtx new, addr = XEXP (memref, 0);
1920
1921  new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1922
1923  /* At this point we don't know _why_ the address is invalid.  It
1924     could have secondary memory references, multiplies or anything.
1925
1926     However, if we did go and rearrange things, we can wind up not
1927     being able to recognize the magic around pic_offset_table_rtx.
1928     This stuff is fragile, and is yet another example of why it is
1929     bad to expose PIC machinery too early.  */
1930  if (! memory_address_p (GET_MODE (memref), new)
1931      && GET_CODE (addr) == PLUS
1932      && XEXP (addr, 0) == pic_offset_table_rtx)
1933    {
1934      addr = force_reg (GET_MODE (addr), addr);
1935      new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1936    }
1937
1938  update_temp_slot_address (XEXP (memref, 0), new);
1939  new = change_address_1 (memref, VOIDmode, new, 1);
1940
1941  /* If there are no changes, just return the original memory reference.  */
1942  if (new == memref)
1943    return new;
1944
1945  /* Update the alignment to reflect the offset.  Reset the offset, which
1946     we don't know.  */
1947  MEM_ATTRS (new)
1948    = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1949		     MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1950		     GET_MODE (new));
1951  return new;
1952}
1953
1954/* Return a memory reference like MEMREF, but with its address changed to
1955   ADDR.  The caller is asserting that the actual piece of memory pointed
1956   to is the same, just the form of the address is being changed, such as
1957   by putting something into a register.  */
1958
1959rtx
1960replace_equiv_address (rtx memref, rtx addr)
1961{
1962  /* change_address_1 copies the memory attribute structure without change
1963     and that's exactly what we want here.  */
1964  update_temp_slot_address (XEXP (memref, 0), addr);
1965  return change_address_1 (memref, VOIDmode, addr, 1);
1966}
1967
1968/* Likewise, but the reference is not required to be valid.  */
1969
1970rtx
1971replace_equiv_address_nv (rtx memref, rtx addr)
1972{
1973  return change_address_1 (memref, VOIDmode, addr, 0);
1974}
1975
1976/* Return a memory reference like MEMREF, but with its mode widened to
1977   MODE and offset by OFFSET.  This would be used by targets that e.g.
1978   cannot issue QImode memory operations and have to use SImode memory
1979   operations plus masking logic.  */
1980
1981rtx
1982widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
1983{
1984  rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1985  tree expr = MEM_EXPR (new);
1986  rtx memoffset = MEM_OFFSET (new);
1987  unsigned int size = GET_MODE_SIZE (mode);
1988
1989  /* If there are no changes, just return the original memory reference.  */
1990  if (new == memref)
1991    return new;
1992
1993  /* If we don't know what offset we were at within the expression, then
1994     we can't know if we've overstepped the bounds.  */
1995  if (! memoffset)
1996    expr = NULL_TREE;
1997
1998  while (expr)
1999    {
2000      if (TREE_CODE (expr) == COMPONENT_REF)
2001	{
2002	  tree field = TREE_OPERAND (expr, 1);
2003	  tree offset = component_ref_field_offset (expr);
2004
2005	  if (! DECL_SIZE_UNIT (field))
2006	    {
2007	      expr = NULL_TREE;
2008	      break;
2009	    }
2010
2011	  /* Is the field at least as large as the access?  If so, ok,
2012	     otherwise strip back to the containing structure.  */
2013	  if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2014	      && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2015	      && INTVAL (memoffset) >= 0)
2016	    break;
2017
2018	  if (! host_integerp (offset, 1))
2019	    {
2020	      expr = NULL_TREE;
2021	      break;
2022	    }
2023
2024	  expr = TREE_OPERAND (expr, 0);
2025	  memoffset
2026	    = (GEN_INT (INTVAL (memoffset)
2027			+ tree_low_cst (offset, 1)
2028			+ (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2029			   / BITS_PER_UNIT)));
2030	}
2031      /* Similarly for the decl.  */
2032      else if (DECL_P (expr)
2033	       && DECL_SIZE_UNIT (expr)
2034	       && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2035	       && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2036	       && (! memoffset || INTVAL (memoffset) >= 0))
2037	break;
2038      else
2039	{
2040	  /* The widened memory access overflows the expression, which means
2041	     that it could alias another expression.  Zap it.  */
2042	  expr = NULL_TREE;
2043	  break;
2044	}
2045    }
2046
2047  if (! expr)
2048    memoffset = NULL_RTX;
2049
2050  /* The widened memory may alias other stuff, so zap the alias set.  */
2051  /* ??? Maybe use get_alias_set on any remaining expression.  */
2052
2053  MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2054				   MEM_ALIGN (new), mode);
2055
2056  return new;
2057}
2058
2059/* Return a newly created CODE_LABEL rtx with a unique label number.  */
2060
2061rtx
2062gen_label_rtx (void)
2063{
2064  return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2065/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
2066			     NULL, label_num++, NULL, 0);
2067/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
2068}
2069
2070/* For procedure integration.  */
2071
2072/* Install new pointers to the first and last insns in the chain.
2073   Also, set cur_insn_uid to one higher than the last in use.
2074   Used for an inline-procedure after copying the insn chain.  */
2075
2076void
2077set_new_first_and_last_insn (rtx first, rtx last)
2078{
2079  rtx insn;
2080
2081  first_insn = first;
2082  last_insn = last;
2083  cur_insn_uid = 0;
2084
2085  for (insn = first; insn; insn = NEXT_INSN (insn))
2086    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2087
2088  cur_insn_uid++;
2089}
2090
2091/* Go through all the RTL insn bodies and copy any invalid shared
2092   structure.  This routine should only be called once.  */
2093
2094static void
2095unshare_all_rtl_1 (tree fndecl, rtx insn)
2096{
2097  tree decl;
2098
2099  /* Make sure that virtual parameters are not shared.  */
2100  for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2101    SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2102
2103  /* Make sure that virtual stack slots are not shared.  */
2104  unshare_all_decls (DECL_INITIAL (fndecl));
2105
2106  /* Unshare just about everything else.  */
2107  unshare_all_rtl_in_chain (insn);
2108
2109  /* Make sure the addresses of stack slots found outside the insn chain
2110     (such as, in DECL_RTL of a variable) are not shared
2111     with the insn chain.
2112
2113     This special care is necessary when the stack slot MEM does not
2114     actually appear in the insn chain.  If it does appear, its address
2115     is unshared from all else at that point.  */
2116  stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2117}
2118
2119/* Go through all the RTL insn bodies and copy any invalid shared
2120   structure, again.  This is a fairly expensive thing to do so it
2121   should be done sparingly.  */
2122
2123void
2124unshare_all_rtl_again (rtx insn)
2125{
2126  rtx p;
2127  tree decl;
2128
2129  for (p = insn; p; p = NEXT_INSN (p))
2130    if (INSN_P (p))
2131      {
2132	reset_used_flags (PATTERN (p));
2133	reset_used_flags (REG_NOTES (p));
2134	reset_used_flags (LOG_LINKS (p));
2135      }
2136
2137  /* Make sure that virtual stack slots are not shared.  */
2138  reset_used_decls (DECL_INITIAL (cfun->decl));
2139
2140  /* Make sure that virtual parameters are not shared.  */
2141  for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2142    reset_used_flags (DECL_RTL (decl));
2143
2144  reset_used_flags (stack_slot_list);
2145
2146  unshare_all_rtl_1 (cfun->decl, insn);
2147}
2148
2149unsigned int
2150unshare_all_rtl (void)
2151{
2152  unshare_all_rtl_1 (current_function_decl, get_insns ());
2153  return 0;
2154}
2155
2156struct tree_opt_pass pass_unshare_all_rtl =
2157{
2158  "unshare",                            /* name */
2159  NULL,                                 /* gate */
2160  unshare_all_rtl,                      /* execute */
2161  NULL,                                 /* sub */
2162  NULL,                                 /* next */
2163  0,                                    /* static_pass_number */
2164  0,                                    /* tv_id */
2165  0,                                    /* properties_required */
2166  0,                                    /* properties_provided */
2167  0,                                    /* properties_destroyed */
2168  0,                                    /* todo_flags_start */
2169  TODO_dump_func,                       /* todo_flags_finish */
2170  0                                     /* letter */
2171};
2172
2173
2174/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2175   Recursively does the same for subexpressions.  */
2176
2177static void
2178verify_rtx_sharing (rtx orig, rtx insn)
2179{
2180  rtx x = orig;
2181  int i;
2182  enum rtx_code code;
2183  const char *format_ptr;
2184
2185  if (x == 0)
2186    return;
2187
2188  code = GET_CODE (x);
2189
2190  /* These types may be freely shared.  */
2191
2192  switch (code)
2193    {
2194    case REG:
2195    case CONST_INT:
2196    case CONST_DOUBLE:
2197    case CONST_VECTOR:
2198    case SYMBOL_REF:
2199    case LABEL_REF:
2200    case CODE_LABEL:
2201    case PC:
2202    case CC0:
2203    case SCRATCH:
2204      return;
2205      /* SCRATCH must be shared because they represent distinct values.  */
2206    case CLOBBER:
2207      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2208	return;
2209      break;
2210
2211    case CONST:
2212      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2213	 a LABEL_REF, it isn't sharable.  */
2214      if (GET_CODE (XEXP (x, 0)) == PLUS
2215	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2216	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2217	return;
2218      break;
2219
2220    case MEM:
2221      /* A MEM is allowed to be shared if its address is constant.  */
2222      if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2223	  || reload_completed || reload_in_progress)
2224	return;
2225
2226      break;
2227
2228    default:
2229      break;
2230    }
2231
2232  /* This rtx may not be shared.  If it has already been seen,
2233     replace it with a copy of itself.  */
2234#ifdef ENABLE_CHECKING
2235  if (RTX_FLAG (x, used))
2236    {
2237      error ("invalid rtl sharing found in the insn");
2238      debug_rtx (insn);
2239      error ("shared rtx");
2240      debug_rtx (x);
2241      internal_error ("internal consistency failure");
2242    }
2243#endif
2244  gcc_assert (!RTX_FLAG (x, used));
2245
2246  RTX_FLAG (x, used) = 1;
2247
2248  /* Now scan the subexpressions recursively.  */
2249
2250  format_ptr = GET_RTX_FORMAT (code);
2251
2252  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2253    {
2254      switch (*format_ptr++)
2255	{
2256	case 'e':
2257	  verify_rtx_sharing (XEXP (x, i), insn);
2258	  break;
2259
2260	case 'E':
2261	  if (XVEC (x, i) != NULL)
2262	    {
2263	      int j;
2264	      int len = XVECLEN (x, i);
2265
2266	      for (j = 0; j < len; j++)
2267		{
2268		  /* We allow sharing of ASM_OPERANDS inside single
2269		     instruction.  */
2270		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2271		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2272			  == ASM_OPERANDS))
2273		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2274		  else
2275		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2276		}
2277	    }
2278	  break;
2279	}
2280    }
2281  return;
2282}
2283
2284/* Go through all the RTL insn bodies and check that there is no unexpected
2285   sharing in between the subexpressions.  */
2286
2287void
2288verify_rtl_sharing (void)
2289{
2290  rtx p;
2291
2292  for (p = get_insns (); p; p = NEXT_INSN (p))
2293    if (INSN_P (p))
2294      {
2295	reset_used_flags (PATTERN (p));
2296	reset_used_flags (REG_NOTES (p));
2297	reset_used_flags (LOG_LINKS (p));
2298      }
2299
2300  for (p = get_insns (); p; p = NEXT_INSN (p))
2301    if (INSN_P (p))
2302      {
2303	verify_rtx_sharing (PATTERN (p), p);
2304	verify_rtx_sharing (REG_NOTES (p), p);
2305	verify_rtx_sharing (LOG_LINKS (p), p);
2306      }
2307}
2308
2309/* Go through all the RTL insn bodies and copy any invalid shared structure.
2310   Assumes the mark bits are cleared at entry.  */
2311
2312void
2313unshare_all_rtl_in_chain (rtx insn)
2314{
2315  for (; insn; insn = NEXT_INSN (insn))
2316    if (INSN_P (insn))
2317      {
2318	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2319	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2320	LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2321      }
2322}
2323
2324/* Go through all virtual stack slots of a function and copy any
2325   shared structure.  */
2326static void
2327unshare_all_decls (tree blk)
2328{
2329  tree t;
2330
2331  /* Copy shared decls.  */
2332  for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2333    if (DECL_RTL_SET_P (t))
2334      SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2335
2336  /* Now process sub-blocks.  */
2337  for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2338    unshare_all_decls (t);
2339}
2340
2341/* Go through all virtual stack slots of a function and mark them as
2342   not shared.  */
2343static void
2344reset_used_decls (tree blk)
2345{
2346  tree t;
2347
2348  /* Mark decls.  */
2349  for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2350    if (DECL_RTL_SET_P (t))
2351      reset_used_flags (DECL_RTL (t));
2352
2353  /* Now process sub-blocks.  */
2354  for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2355    reset_used_decls (t);
2356}
2357
2358/* Mark ORIG as in use, and return a copy of it if it was already in use.
2359   Recursively does the same for subexpressions.  Uses
2360   copy_rtx_if_shared_1 to reduce stack space.  */
2361
2362rtx
2363copy_rtx_if_shared (rtx orig)
2364{
2365  copy_rtx_if_shared_1 (&orig);
2366  return orig;
2367}
2368
2369/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2370   use.  Recursively does the same for subexpressions.  */
2371
2372static void
2373copy_rtx_if_shared_1 (rtx *orig1)
2374{
2375  rtx x;
2376  int i;
2377  enum rtx_code code;
2378  rtx *last_ptr;
2379  const char *format_ptr;
2380  int copied = 0;
2381  int length;
2382
2383  /* Repeat is used to turn tail-recursion into iteration.  */
2384repeat:
2385  x = *orig1;
2386
2387  if (x == 0)
2388    return;
2389
2390  code = GET_CODE (x);
2391
2392  /* These types may be freely shared.  */
2393
2394  switch (code)
2395    {
2396    case REG:
2397    case CONST_INT:
2398    case CONST_DOUBLE:
2399    case CONST_VECTOR:
2400    case SYMBOL_REF:
2401    case LABEL_REF:
2402    case CODE_LABEL:
2403    case PC:
2404    case CC0:
2405    case SCRATCH:
2406      /* SCRATCH must be shared because they represent distinct values.  */
2407      return;
2408    case CLOBBER:
2409      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2410	return;
2411      break;
2412
2413    case CONST:
2414      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2415	 a LABEL_REF, it isn't sharable.  */
2416      if (GET_CODE (XEXP (x, 0)) == PLUS
2417	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2418	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2419	return;
2420      break;
2421
2422    case INSN:
2423    case JUMP_INSN:
2424    case CALL_INSN:
2425    case NOTE:
2426    case BARRIER:
2427      /* The chain of insns is not being copied.  */
2428      return;
2429
2430    default:
2431      break;
2432    }
2433
2434  /* This rtx may not be shared.  If it has already been seen,
2435     replace it with a copy of itself.  */
2436
2437  if (RTX_FLAG (x, used))
2438    {
2439      x = shallow_copy_rtx (x);
2440      copied = 1;
2441    }
2442  RTX_FLAG (x, used) = 1;
2443
2444  /* Now scan the subexpressions recursively.
2445     We can store any replaced subexpressions directly into X
2446     since we know X is not shared!  Any vectors in X
2447     must be copied if X was copied.  */
2448
2449  format_ptr = GET_RTX_FORMAT (code);
2450  length = GET_RTX_LENGTH (code);
2451  last_ptr = NULL;
2452
2453  for (i = 0; i < length; i++)
2454    {
2455      switch (*format_ptr++)
2456	{
2457	case 'e':
2458          if (last_ptr)
2459            copy_rtx_if_shared_1 (last_ptr);
2460	  last_ptr = &XEXP (x, i);
2461	  break;
2462
2463	case 'E':
2464	  if (XVEC (x, i) != NULL)
2465	    {
2466	      int j;
2467	      int len = XVECLEN (x, i);
2468
2469              /* Copy the vector iff I copied the rtx and the length
2470		 is nonzero.  */
2471	      if (copied && len > 0)
2472		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2473
2474              /* Call recursively on all inside the vector.  */
2475	      for (j = 0; j < len; j++)
2476                {
2477		  if (last_ptr)
2478		    copy_rtx_if_shared_1 (last_ptr);
2479                  last_ptr = &XVECEXP (x, i, j);
2480                }
2481	    }
2482	  break;
2483	}
2484    }
2485  *orig1 = x;
2486  if (last_ptr)
2487    {
2488      orig1 = last_ptr;
2489      goto repeat;
2490    }
2491  return;
2492}
2493
2494/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2495   to look for shared sub-parts.  */
2496
2497void
2498reset_used_flags (rtx x)
2499{
2500  int i, j;
2501  enum rtx_code code;
2502  const char *format_ptr;
2503  int length;
2504
2505  /* Repeat is used to turn tail-recursion into iteration.  */
2506repeat:
2507  if (x == 0)
2508    return;
2509
2510  code = GET_CODE (x);
2511
2512  /* These types may be freely shared so we needn't do any resetting
2513     for them.  */
2514
2515  switch (code)
2516    {
2517    case REG:
2518    case CONST_INT:
2519    case CONST_DOUBLE:
2520    case CONST_VECTOR:
2521    case SYMBOL_REF:
2522    case CODE_LABEL:
2523    case PC:
2524    case CC0:
2525      return;
2526
2527    case INSN:
2528    case JUMP_INSN:
2529    case CALL_INSN:
2530    case NOTE:
2531    case LABEL_REF:
2532    case BARRIER:
2533      /* The chain of insns is not being copied.  */
2534      return;
2535
2536    default:
2537      break;
2538    }
2539
2540  RTX_FLAG (x, used) = 0;
2541
2542  format_ptr = GET_RTX_FORMAT (code);
2543  length = GET_RTX_LENGTH (code);
2544
2545  for (i = 0; i < length; i++)
2546    {
2547      switch (*format_ptr++)
2548	{
2549	case 'e':
2550          if (i == length-1)
2551            {
2552              x = XEXP (x, i);
2553	      goto repeat;
2554            }
2555	  reset_used_flags (XEXP (x, i));
2556	  break;
2557
2558	case 'E':
2559	  for (j = 0; j < XVECLEN (x, i); j++)
2560	    reset_used_flags (XVECEXP (x, i, j));
2561	  break;
2562	}
2563    }
2564}
2565
2566/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2567   to look for shared sub-parts.  */
2568
2569void
2570set_used_flags (rtx x)
2571{
2572  int i, j;
2573  enum rtx_code code;
2574  const char *format_ptr;
2575
2576  if (x == 0)
2577    return;
2578
2579  code = GET_CODE (x);
2580
2581  /* These types may be freely shared so we needn't do any resetting
2582     for them.  */
2583
2584  switch (code)
2585    {
2586    case REG:
2587    case CONST_INT:
2588    case CONST_DOUBLE:
2589    case CONST_VECTOR:
2590    case SYMBOL_REF:
2591    case CODE_LABEL:
2592    case PC:
2593    case CC0:
2594      return;
2595
2596    case INSN:
2597    case JUMP_INSN:
2598    case CALL_INSN:
2599    case NOTE:
2600    case LABEL_REF:
2601    case BARRIER:
2602      /* The chain of insns is not being copied.  */
2603      return;
2604
2605    default:
2606      break;
2607    }
2608
2609  RTX_FLAG (x, used) = 1;
2610
2611  format_ptr = GET_RTX_FORMAT (code);
2612  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2613    {
2614      switch (*format_ptr++)
2615	{
2616	case 'e':
2617	  set_used_flags (XEXP (x, i));
2618	  break;
2619
2620	case 'E':
2621	  for (j = 0; j < XVECLEN (x, i); j++)
2622	    set_used_flags (XVECEXP (x, i, j));
2623	  break;
2624	}
2625    }
2626}
2627
2628/* Copy X if necessary so that it won't be altered by changes in OTHER.
2629   Return X or the rtx for the pseudo reg the value of X was copied into.
2630   OTHER must be valid as a SET_DEST.  */
2631
2632rtx
2633make_safe_from (rtx x, rtx other)
2634{
2635  while (1)
2636    switch (GET_CODE (other))
2637      {
2638      case SUBREG:
2639	other = SUBREG_REG (other);
2640	break;
2641      case STRICT_LOW_PART:
2642      case SIGN_EXTEND:
2643      case ZERO_EXTEND:
2644	other = XEXP (other, 0);
2645	break;
2646      default:
2647	goto done;
2648      }
2649 done:
2650  if ((MEM_P (other)
2651       && ! CONSTANT_P (x)
2652       && !REG_P (x)
2653       && GET_CODE (x) != SUBREG)
2654      || (REG_P (other)
2655	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
2656	      || reg_mentioned_p (other, x))))
2657    {
2658      rtx temp = gen_reg_rtx (GET_MODE (x));
2659      emit_move_insn (temp, x);
2660      return temp;
2661    }
2662  return x;
2663}
2664
2665/* Emission of insns (adding them to the doubly-linked list).  */
2666
2667/* Return the first insn of the current sequence or current function.  */
2668
2669rtx
2670get_insns (void)
2671{
2672  return first_insn;
2673}
2674
2675/* Specify a new insn as the first in the chain.  */
2676
2677void
2678set_first_insn (rtx insn)
2679{
2680  gcc_assert (!PREV_INSN (insn));
2681  first_insn = insn;
2682}
2683
2684/* Return the last insn emitted in current sequence or current function.  */
2685
2686rtx
2687get_last_insn (void)
2688{
2689  return last_insn;
2690}
2691
2692/* Specify a new insn as the last in the chain.  */
2693
2694void
2695set_last_insn (rtx insn)
2696{
2697  gcc_assert (!NEXT_INSN (insn));
2698  last_insn = insn;
2699}
2700
2701/* Return the last insn emitted, even if it is in a sequence now pushed.  */
2702
2703rtx
2704get_last_insn_anywhere (void)
2705{
2706  struct sequence_stack *stack;
2707  if (last_insn)
2708    return last_insn;
2709  for (stack = seq_stack; stack; stack = stack->next)
2710    if (stack->last != 0)
2711      return stack->last;
2712  return 0;
2713}
2714
2715/* Return the first nonnote insn emitted in current sequence or current
2716   function.  This routine looks inside SEQUENCEs.  */
2717
2718rtx
2719get_first_nonnote_insn (void)
2720{
2721  rtx insn = first_insn;
2722
2723  if (insn)
2724    {
2725      if (NOTE_P (insn))
2726	for (insn = next_insn (insn);
2727	     insn && NOTE_P (insn);
2728	     insn = next_insn (insn))
2729	  continue;
2730      else
2731	{
2732	  if (NONJUMP_INSN_P (insn)
2733	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2734	    insn = XVECEXP (PATTERN (insn), 0, 0);
2735	}
2736    }
2737
2738  return insn;
2739}
2740
2741/* Return the last nonnote insn emitted in current sequence or current
2742   function.  This routine looks inside SEQUENCEs.  */
2743
2744rtx
2745get_last_nonnote_insn (void)
2746{
2747  rtx insn = last_insn;
2748
2749  if (insn)
2750    {
2751      if (NOTE_P (insn))
2752	for (insn = previous_insn (insn);
2753	     insn && NOTE_P (insn);
2754	     insn = previous_insn (insn))
2755	  continue;
2756      else
2757	{
2758	  if (NONJUMP_INSN_P (insn)
2759	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2760	    insn = XVECEXP (PATTERN (insn), 0,
2761			    XVECLEN (PATTERN (insn), 0) - 1);
2762	}
2763    }
2764
2765  return insn;
2766}
2767
2768/* Return a number larger than any instruction's uid in this function.  */
2769
2770int
2771get_max_uid (void)
2772{
2773  return cur_insn_uid;
2774}
2775
2776/* Renumber instructions so that no instruction UIDs are wasted.  */
2777
2778void
2779renumber_insns (void)
2780{
2781  rtx insn;
2782
2783  /* If we're not supposed to renumber instructions, don't.  */
2784  if (!flag_renumber_insns)
2785    return;
2786
2787  /* If there aren't that many instructions, then it's not really
2788     worth renumbering them.  */
2789  if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2790    return;
2791
2792  cur_insn_uid = 1;
2793
2794  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2795    {
2796      if (dump_file)
2797	fprintf (dump_file, "Renumbering insn %d to %d\n",
2798		 INSN_UID (insn), cur_insn_uid);
2799      INSN_UID (insn) = cur_insn_uid++;
2800    }
2801}
2802
2803/* Return the next insn.  If it is a SEQUENCE, return the first insn
2804   of the sequence.  */
2805
2806rtx
2807next_insn (rtx insn)
2808{
2809  if (insn)
2810    {
2811      insn = NEXT_INSN (insn);
2812      if (insn && NONJUMP_INSN_P (insn)
2813	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
2814	insn = XVECEXP (PATTERN (insn), 0, 0);
2815    }
2816
2817  return insn;
2818}
2819
2820/* Return the previous insn.  If it is a SEQUENCE, return the last insn
2821   of the sequence.  */
2822
2823rtx
2824previous_insn (rtx insn)
2825{
2826  if (insn)
2827    {
2828      insn = PREV_INSN (insn);
2829      if (insn && NONJUMP_INSN_P (insn)
2830	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
2831	insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2832    }
2833
2834  return insn;
2835}
2836
2837/* Return the next insn after INSN that is not a NOTE.  This routine does not
2838   look inside SEQUENCEs.  */
2839
2840rtx
2841next_nonnote_insn (rtx insn)
2842{
2843  while (insn)
2844    {
2845      insn = NEXT_INSN (insn);
2846      if (insn == 0 || !NOTE_P (insn))
2847	break;
2848    }
2849
2850  return insn;
2851}
2852
2853/* Return the previous insn before INSN that is not a NOTE.  This routine does
2854   not look inside SEQUENCEs.  */
2855
2856rtx
2857prev_nonnote_insn (rtx insn)
2858{
2859  while (insn)
2860    {
2861      insn = PREV_INSN (insn);
2862      if (insn == 0 || !NOTE_P (insn))
2863	break;
2864    }
2865
2866  return insn;
2867}
2868
2869/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2870   or 0, if there is none.  This routine does not look inside
2871   SEQUENCEs.  */
2872
2873rtx
2874next_real_insn (rtx insn)
2875{
2876  while (insn)
2877    {
2878      insn = NEXT_INSN (insn);
2879      if (insn == 0 || INSN_P (insn))
2880	break;
2881    }
2882
2883  return insn;
2884}
2885
2886/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2887   or 0, if there is none.  This routine does not look inside
2888   SEQUENCEs.  */
2889
2890rtx
2891prev_real_insn (rtx insn)
2892{
2893  while (insn)
2894    {
2895      insn = PREV_INSN (insn);
2896      if (insn == 0 || INSN_P (insn))
2897	break;
2898    }
2899
2900  return insn;
2901}
2902
2903/* Return the last CALL_INSN in the current list, or 0 if there is none.
2904   This routine does not look inside SEQUENCEs.  */
2905
2906rtx
2907last_call_insn (void)
2908{
2909  rtx insn;
2910
2911  for (insn = get_last_insn ();
2912       insn && !CALL_P (insn);
2913       insn = PREV_INSN (insn))
2914    ;
2915
2916  return insn;
2917}
2918
2919/* Find the next insn after INSN that really does something.  This routine
2920   does not look inside SEQUENCEs.  Until reload has completed, this is the
2921   same as next_real_insn.  */
2922
2923int
2924active_insn_p (rtx insn)
2925{
2926  return (CALL_P (insn) || JUMP_P (insn)
2927	  || (NONJUMP_INSN_P (insn)
2928	      && (! reload_completed
2929		  || (GET_CODE (PATTERN (insn)) != USE
2930		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
2931}
2932
2933rtx
2934next_active_insn (rtx insn)
2935{
2936  while (insn)
2937    {
2938      insn = NEXT_INSN (insn);
2939      if (insn == 0 || active_insn_p (insn))
2940	break;
2941    }
2942
2943  return insn;
2944}
2945
2946/* Find the last insn before INSN that really does something.  This routine
2947   does not look inside SEQUENCEs.  Until reload has completed, this is the
2948   same as prev_real_insn.  */
2949
2950rtx
2951prev_active_insn (rtx insn)
2952{
2953  while (insn)
2954    {
2955      insn = PREV_INSN (insn);
2956      if (insn == 0 || active_insn_p (insn))
2957	break;
2958    }
2959
2960  return insn;
2961}
2962
2963/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none.  */
2964
2965rtx
2966next_label (rtx insn)
2967{
2968  while (insn)
2969    {
2970      insn = NEXT_INSN (insn);
2971      if (insn == 0 || LABEL_P (insn))
2972	break;
2973    }
2974
2975  return insn;
2976}
2977
2978/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none.  */
2979
2980rtx
2981prev_label (rtx insn)
2982{
2983  while (insn)
2984    {
2985      insn = PREV_INSN (insn);
2986      if (insn == 0 || LABEL_P (insn))
2987	break;
2988    }
2989
2990  return insn;
2991}
2992
2993/* Return the last label to mark the same position as LABEL.  Return null
2994   if LABEL itself is null.  */
2995
2996rtx
2997skip_consecutive_labels (rtx label)
2998{
2999  rtx insn;
3000
3001  for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3002    if (LABEL_P (insn))
3003      label = insn;
3004
3005  return label;
3006}
3007
3008#ifdef HAVE_cc0
3009/* INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
3010   and REG_CC_USER notes so we can find it.  */
3011
3012void
3013link_cc0_insns (rtx insn)
3014{
3015  rtx user = next_nonnote_insn (insn);
3016
3017  if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3018    user = XVECEXP (PATTERN (user), 0, 0);
3019
3020  REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3021					REG_NOTES (user));
3022  REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3023}
3024
3025/* Return the next insn that uses CC0 after INSN, which is assumed to
3026   set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3027   applied to the result of this function should yield INSN).
3028
3029   Normally, this is simply the next insn.  However, if a REG_CC_USER note
3030   is present, it contains the insn that uses CC0.
3031
3032   Return 0 if we can't find the insn.  */
3033
3034rtx
3035next_cc0_user (rtx insn)
3036{
3037  rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3038
3039  if (note)
3040    return XEXP (note, 0);
3041
3042  insn = next_nonnote_insn (insn);
3043  if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3044    insn = XVECEXP (PATTERN (insn), 0, 0);
3045
3046  if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3047    return insn;
3048
3049  return 0;
3050}
3051
3052/* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3053   note, it is the previous insn.  */
3054
3055rtx
3056prev_cc0_setter (rtx insn)
3057{
3058  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3059
3060  if (note)
3061    return XEXP (note, 0);
3062
3063  insn = prev_nonnote_insn (insn);
3064  gcc_assert (sets_cc0_p (PATTERN (insn)));
3065
3066  return insn;
3067}
3068#endif
3069
3070/* Increment the label uses for all labels present in rtx.  */
3071
3072static void
3073mark_label_nuses (rtx x)
3074{
3075  enum rtx_code code;
3076  int i, j;
3077  const char *fmt;
3078
3079  code = GET_CODE (x);
3080  if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3081    LABEL_NUSES (XEXP (x, 0))++;
3082
3083  fmt = GET_RTX_FORMAT (code);
3084  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3085    {
3086      if (fmt[i] == 'e')
3087	mark_label_nuses (XEXP (x, i));
3088      else if (fmt[i] == 'E')
3089	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3090	  mark_label_nuses (XVECEXP (x, i, j));
3091    }
3092}
3093
3094
3095/* Try splitting insns that can be split for better scheduling.
3096   PAT is the pattern which might split.
3097   TRIAL is the insn providing PAT.
3098   LAST is nonzero if we should return the last insn of the sequence produced.
3099
3100   If this routine succeeds in splitting, it returns the first or last
3101   replacement insn depending on the value of LAST.  Otherwise, it
3102   returns TRIAL.  If the insn to be returned can be split, it will be.  */
3103
3104rtx
3105try_split (rtx pat, rtx trial, int last)
3106{
3107  rtx before = PREV_INSN (trial);
3108  rtx after = NEXT_INSN (trial);
3109  int has_barrier = 0;
3110  rtx tem;
3111  rtx note, seq;
3112  int probability;
3113  rtx insn_last, insn;
3114  int njumps = 0;
3115
3116  if (any_condjump_p (trial)
3117      && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3118    split_branch_probability = INTVAL (XEXP (note, 0));
3119  probability = split_branch_probability;
3120
3121  seq = split_insns (pat, trial);
3122
3123  split_branch_probability = -1;
3124
3125  /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3126     We may need to handle this specially.  */
3127  if (after && BARRIER_P (after))
3128    {
3129      has_barrier = 1;
3130      after = NEXT_INSN (after);
3131    }
3132
3133  if (!seq)
3134    return trial;
3135
3136  /* Avoid infinite loop if any insn of the result matches
3137     the original pattern.  */
3138  insn_last = seq;
3139  while (1)
3140    {
3141      if (INSN_P (insn_last)
3142	  && rtx_equal_p (PATTERN (insn_last), pat))
3143	return trial;
3144      if (!NEXT_INSN (insn_last))
3145	break;
3146      insn_last = NEXT_INSN (insn_last);
3147    }
3148
3149  /* Mark labels.  */
3150  for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3151    {
3152      if (JUMP_P (insn))
3153	{
3154	  mark_jump_label (PATTERN (insn), insn, 0);
3155	  njumps++;
3156	  if (probability != -1
3157	      && any_condjump_p (insn)
3158	      && !find_reg_note (insn, REG_BR_PROB, 0))
3159	    {
3160	      /* We can preserve the REG_BR_PROB notes only if exactly
3161		 one jump is created, otherwise the machine description
3162		 is responsible for this step using
3163		 split_branch_probability variable.  */
3164	      gcc_assert (njumps == 1);
3165	      REG_NOTES (insn)
3166		= gen_rtx_EXPR_LIST (REG_BR_PROB,
3167				     GEN_INT (probability),
3168				     REG_NOTES (insn));
3169	    }
3170	}
3171    }
3172
3173  /* If we are splitting a CALL_INSN, look for the CALL_INSN
3174     in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it.  */
3175  if (CALL_P (trial))
3176    {
3177      for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3178	if (CALL_P (insn))
3179	  {
3180	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3181	    while (*p)
3182	      p = &XEXP (*p, 1);
3183	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3184	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3185	  }
3186    }
3187
3188  /* Copy notes, particularly those related to the CFG.  */
3189  for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3190    {
3191      switch (REG_NOTE_KIND (note))
3192	{
3193	case REG_EH_REGION:
3194	  insn = insn_last;
3195	  while (insn != NULL_RTX)
3196	    {
3197	      if (CALL_P (insn)
3198		  || (flag_non_call_exceptions && INSN_P (insn)
3199		      && may_trap_p (PATTERN (insn))))
3200		REG_NOTES (insn)
3201		  = gen_rtx_EXPR_LIST (REG_EH_REGION,
3202				       XEXP (note, 0),
3203				       REG_NOTES (insn));
3204	      insn = PREV_INSN (insn);
3205	    }
3206	  break;
3207
3208	case REG_NORETURN:
3209	case REG_SETJMP:
3210	  insn = insn_last;
3211	  while (insn != NULL_RTX)
3212	    {
3213	      if (CALL_P (insn))
3214		REG_NOTES (insn)
3215		  = gen_rtx_EXPR_LIST (GET_MODE (note),
3216				       XEXP (note, 0),
3217				       REG_NOTES (insn));
3218	      insn = PREV_INSN (insn);
3219	    }
3220	  break;
3221
3222	case REG_NON_LOCAL_GOTO:
3223	  insn = insn_last;
3224	  while (insn != NULL_RTX)
3225	    {
3226	      if (JUMP_P (insn))
3227		REG_NOTES (insn)
3228		  = gen_rtx_EXPR_LIST (GET_MODE (note),
3229				       XEXP (note, 0),
3230				       REG_NOTES (insn));
3231	      insn = PREV_INSN (insn);
3232	    }
3233	  break;
3234
3235	default:
3236	  break;
3237	}
3238    }
3239
3240  /* If there are LABELS inside the split insns increment the
3241     usage count so we don't delete the label.  */
3242  if (NONJUMP_INSN_P (trial))
3243    {
3244      insn = insn_last;
3245      while (insn != NULL_RTX)
3246	{
3247	  if (NONJUMP_INSN_P (insn))
3248	    mark_label_nuses (PATTERN (insn));
3249
3250	  insn = PREV_INSN (insn);
3251	}
3252    }
3253
3254  tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3255
3256  delete_insn (trial);
3257  if (has_barrier)
3258    emit_barrier_after (tem);
3259
3260  /* Recursively call try_split for each new insn created; by the
3261     time control returns here that insn will be fully split, so
3262     set LAST and continue from the insn after the one returned.
3263     We can't use next_active_insn here since AFTER may be a note.
3264     Ignore deleted insns, which can be occur if not optimizing.  */
3265  for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3266    if (! INSN_DELETED_P (tem) && INSN_P (tem))
3267      tem = try_split (PATTERN (tem), tem, 1);
3268
3269  /* Return either the first or the last insn, depending on which was
3270     requested.  */
3271  return last
3272    ? (after ? PREV_INSN (after) : last_insn)
3273    : NEXT_INSN (before);
3274}
3275
3276/* Make and return an INSN rtx, initializing all its slots.
3277   Store PATTERN in the pattern slots.  */
3278
3279rtx
3280make_insn_raw (rtx pattern)
3281{
3282  rtx insn;
3283
3284  insn = rtx_alloc (INSN);
3285
3286  INSN_UID (insn) = cur_insn_uid++;
3287  PATTERN (insn) = pattern;
3288  INSN_CODE (insn) = -1;
3289  LOG_LINKS (insn) = NULL;
3290  REG_NOTES (insn) = NULL;
3291  INSN_LOCATOR (insn) = 0;
3292  BLOCK_FOR_INSN (insn) = NULL;
3293
3294#ifdef ENABLE_RTL_CHECKING
3295  if (insn
3296      && INSN_P (insn)
3297      && (returnjump_p (insn)
3298	  || (GET_CODE (insn) == SET
3299	      && SET_DEST (insn) == pc_rtx)))
3300    {
3301      warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3302      debug_rtx (insn);
3303    }
3304#endif
3305
3306  return insn;
3307}
3308
3309/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
3310
3311rtx
3312make_jump_insn_raw (rtx pattern)
3313{
3314  rtx insn;
3315
3316  insn = rtx_alloc (JUMP_INSN);
3317  INSN_UID (insn) = cur_insn_uid++;
3318
3319  PATTERN (insn) = pattern;
3320  INSN_CODE (insn) = -1;
3321  LOG_LINKS (insn) = NULL;
3322  REG_NOTES (insn) = NULL;
3323  JUMP_LABEL (insn) = NULL;
3324  INSN_LOCATOR (insn) = 0;
3325  BLOCK_FOR_INSN (insn) = NULL;
3326
3327  return insn;
3328}
3329
3330/* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
3331
3332static rtx
3333make_call_insn_raw (rtx pattern)
3334{
3335  rtx insn;
3336
3337  insn = rtx_alloc (CALL_INSN);
3338  INSN_UID (insn) = cur_insn_uid++;
3339
3340  PATTERN (insn) = pattern;
3341  INSN_CODE (insn) = -1;
3342  LOG_LINKS (insn) = NULL;
3343  REG_NOTES (insn) = NULL;
3344  CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3345  INSN_LOCATOR (insn) = 0;
3346  BLOCK_FOR_INSN (insn) = NULL;
3347
3348  return insn;
3349}
3350
3351/* Add INSN to the end of the doubly-linked list.
3352   INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
3353
3354void
3355add_insn (rtx insn)
3356{
3357  PREV_INSN (insn) = last_insn;
3358  NEXT_INSN (insn) = 0;
3359
3360  if (NULL != last_insn)
3361    NEXT_INSN (last_insn) = insn;
3362
3363  if (NULL == first_insn)
3364    first_insn = insn;
3365
3366  last_insn = insn;
3367}
3368
3369/* Add INSN into the doubly-linked list after insn AFTER.  This and
3370   the next should be the only functions called to insert an insn once
3371   delay slots have been filled since only they know how to update a
3372   SEQUENCE.  */
3373
3374void
3375add_insn_after (rtx insn, rtx after)
3376{
3377  rtx next = NEXT_INSN (after);
3378  basic_block bb;
3379
3380  gcc_assert (!optimize || !INSN_DELETED_P (after));
3381
3382  NEXT_INSN (insn) = next;
3383  PREV_INSN (insn) = after;
3384
3385  if (next)
3386    {
3387      PREV_INSN (next) = insn;
3388      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3389	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3390    }
3391  else if (last_insn == after)
3392    last_insn = insn;
3393  else
3394    {
3395      struct sequence_stack *stack = seq_stack;
3396      /* Scan all pending sequences too.  */
3397      for (; stack; stack = stack->next)
3398	if (after == stack->last)
3399	  {
3400	    stack->last = insn;
3401	    break;
3402	  }
3403
3404      gcc_assert (stack);
3405    }
3406
3407  if (!BARRIER_P (after)
3408      && !BARRIER_P (insn)
3409      && (bb = BLOCK_FOR_INSN (after)))
3410    {
3411      set_block_for_insn (insn, bb);
3412      if (INSN_P (insn))
3413	bb->flags |= BB_DIRTY;
3414      /* Should not happen as first in the BB is always
3415	 either NOTE or LABEL.  */
3416      if (BB_END (bb) == after
3417	  /* Avoid clobbering of structure when creating new BB.  */
3418	  && !BARRIER_P (insn)
3419	  && (!NOTE_P (insn)
3420	      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3421	BB_END (bb) = insn;
3422    }
3423
3424  NEXT_INSN (after) = insn;
3425  if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3426    {
3427      rtx sequence = PATTERN (after);
3428      NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3429    }
3430}
3431
3432/* Add INSN into the doubly-linked list before insn BEFORE.  This and
3433   the previous should be the only functions called to insert an insn once
3434   delay slots have been filled since only they know how to update a
3435   SEQUENCE.  */
3436
3437void
3438add_insn_before (rtx insn, rtx before)
3439{
3440  rtx prev = PREV_INSN (before);
3441  basic_block bb;
3442
3443  gcc_assert (!optimize || !INSN_DELETED_P (before));
3444
3445  PREV_INSN (insn) = prev;
3446  NEXT_INSN (insn) = before;
3447
3448  if (prev)
3449    {
3450      NEXT_INSN (prev) = insn;
3451      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3452	{
3453	  rtx sequence = PATTERN (prev);
3454	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3455	}
3456    }
3457  else if (first_insn == before)
3458    first_insn = insn;
3459  else
3460    {
3461      struct sequence_stack *stack = seq_stack;
3462      /* Scan all pending sequences too.  */
3463      for (; stack; stack = stack->next)
3464	if (before == stack->first)
3465	  {
3466	    stack->first = insn;
3467	    break;
3468	  }
3469
3470      gcc_assert (stack);
3471    }
3472
3473  if (!BARRIER_P (before)
3474      && !BARRIER_P (insn)
3475      && (bb = BLOCK_FOR_INSN (before)))
3476    {
3477      set_block_for_insn (insn, bb);
3478      if (INSN_P (insn))
3479	bb->flags |= BB_DIRTY;
3480      /* Should not happen as first in the BB is always either NOTE or
3481	 LABEL.  */
3482      gcc_assert (BB_HEAD (bb) != insn
3483		  /* Avoid clobbering of structure when creating new BB.  */
3484		  || BARRIER_P (insn)
3485		  || (NOTE_P (insn)
3486		      && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3487    }
3488
3489  PREV_INSN (before) = insn;
3490  if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3491    PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3492}
3493
3494/* Remove an insn from its doubly-linked list.  This function knows how
3495   to handle sequences.  */
3496void
3497remove_insn (rtx insn)
3498{
3499  rtx next = NEXT_INSN (insn);
3500  rtx prev = PREV_INSN (insn);
3501  basic_block bb;
3502
3503  if (prev)
3504    {
3505      NEXT_INSN (prev) = next;
3506      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3507	{
3508	  rtx sequence = PATTERN (prev);
3509	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3510	}
3511    }
3512  else if (first_insn == insn)
3513    first_insn = next;
3514  else
3515    {
3516      struct sequence_stack *stack = seq_stack;
3517      /* Scan all pending sequences too.  */
3518      for (; stack; stack = stack->next)
3519	if (insn == stack->first)
3520	  {
3521	    stack->first = next;
3522	    break;
3523	  }
3524
3525      gcc_assert (stack);
3526    }
3527
3528  if (next)
3529    {
3530      PREV_INSN (next) = prev;
3531      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3532	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3533    }
3534  else if (last_insn == insn)
3535    last_insn = prev;
3536  else
3537    {
3538      struct sequence_stack *stack = seq_stack;
3539      /* Scan all pending sequences too.  */
3540      for (; stack; stack = stack->next)
3541	if (insn == stack->last)
3542	  {
3543	    stack->last = prev;
3544	    break;
3545	  }
3546
3547      gcc_assert (stack);
3548    }
3549  if (!BARRIER_P (insn)
3550      && (bb = BLOCK_FOR_INSN (insn)))
3551    {
3552      if (INSN_P (insn))
3553	bb->flags |= BB_DIRTY;
3554      if (BB_HEAD (bb) == insn)
3555	{
3556	  /* Never ever delete the basic block note without deleting whole
3557	     basic block.  */
3558	  gcc_assert (!NOTE_P (insn));
3559	  BB_HEAD (bb) = next;
3560	}
3561      if (BB_END (bb) == insn)
3562	BB_END (bb) = prev;
3563    }
3564}
3565
3566/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
3567
3568void
3569add_function_usage_to (rtx call_insn, rtx call_fusage)
3570{
3571  gcc_assert (call_insn && CALL_P (call_insn));
3572
3573  /* Put the register usage information on the CALL.  If there is already
3574     some usage information, put ours at the end.  */
3575  if (CALL_INSN_FUNCTION_USAGE (call_insn))
3576    {
3577      rtx link;
3578
3579      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3580	   link = XEXP (link, 1))
3581	;
3582
3583      XEXP (link, 1) = call_fusage;
3584    }
3585  else
3586    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3587}
3588
3589/* Delete all insns made since FROM.
3590   FROM becomes the new last instruction.  */
3591
3592void
3593delete_insns_since (rtx from)
3594{
3595  if (from == 0)
3596    first_insn = 0;
3597  else
3598    NEXT_INSN (from) = 0;
3599  last_insn = from;
3600}
3601
3602/* This function is deprecated, please use sequences instead.
3603
3604   Move a consecutive bunch of insns to a different place in the chain.
3605   The insns to be moved are those between FROM and TO.
3606   They are moved to a new position after the insn AFTER.
3607   AFTER must not be FROM or TO or any insn in between.
3608
3609   This function does not know about SEQUENCEs and hence should not be
3610   called after delay-slot filling has been done.  */
3611
3612void
3613reorder_insns_nobb (rtx from, rtx to, rtx after)
3614{
3615  /* Splice this bunch out of where it is now.  */
3616  if (PREV_INSN (from))
3617    NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3618  if (NEXT_INSN (to))
3619    PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3620  if (last_insn == to)
3621    last_insn = PREV_INSN (from);
3622  if (first_insn == from)
3623    first_insn = NEXT_INSN (to);
3624
3625  /* Make the new neighbors point to it and it to them.  */
3626  if (NEXT_INSN (after))
3627    PREV_INSN (NEXT_INSN (after)) = to;
3628
3629  NEXT_INSN (to) = NEXT_INSN (after);
3630  PREV_INSN (from) = after;
3631  NEXT_INSN (after) = from;
3632  if (after == last_insn)
3633    last_insn = to;
3634}
3635
3636/* Same as function above, but take care to update BB boundaries.  */
3637void
3638reorder_insns (rtx from, rtx to, rtx after)
3639{
3640  rtx prev = PREV_INSN (from);
3641  basic_block bb, bb2;
3642
3643  reorder_insns_nobb (from, to, after);
3644
3645  if (!BARRIER_P (after)
3646      && (bb = BLOCK_FOR_INSN (after)))
3647    {
3648      rtx x;
3649      bb->flags |= BB_DIRTY;
3650
3651      if (!BARRIER_P (from)
3652	  && (bb2 = BLOCK_FOR_INSN (from)))
3653	{
3654	  if (BB_END (bb2) == to)
3655	    BB_END (bb2) = prev;
3656	  bb2->flags |= BB_DIRTY;
3657	}
3658
3659      if (BB_END (bb) == after)
3660	BB_END (bb) = to;
3661
3662      for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3663	if (!BARRIER_P (x))
3664	  set_block_for_insn (x, bb);
3665    }
3666}
3667
3668/* Return the line note insn preceding INSN.  */
3669
3670static rtx
3671find_line_note (rtx insn)
3672{
3673  if (no_line_numbers)
3674    return 0;
3675
3676  for (; insn; insn = PREV_INSN (insn))
3677    if (NOTE_P (insn)
3678	&& NOTE_LINE_NUMBER (insn) >= 0)
3679      break;
3680
3681  return insn;
3682}
3683
3684
3685/* Emit insn(s) of given code and pattern
3686   at a specified place within the doubly-linked list.
3687
3688   All of the emit_foo global entry points accept an object
3689   X which is either an insn list or a PATTERN of a single
3690   instruction.
3691
3692   There are thus a few canonical ways to generate code and
3693   emit it at a specific place in the instruction stream.  For
3694   example, consider the instruction named SPOT and the fact that
3695   we would like to emit some instructions before SPOT.  We might
3696   do it like this:
3697
3698	start_sequence ();
3699	... emit the new instructions ...
3700	insns_head = get_insns ();
3701	end_sequence ();
3702
3703	emit_insn_before (insns_head, SPOT);
3704
3705   It used to be common to generate SEQUENCE rtl instead, but that
3706   is a relic of the past which no longer occurs.  The reason is that
3707   SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3708   generated would almost certainly die right after it was created.  */
3709
3710/* Make X be output before the instruction BEFORE.  */
3711
3712rtx
3713emit_insn_before_noloc (rtx x, rtx before)
3714{
3715  rtx last = before;
3716  rtx insn;
3717
3718  gcc_assert (before);
3719
3720  if (x == NULL_RTX)
3721    return last;
3722
3723  switch (GET_CODE (x))
3724    {
3725    case INSN:
3726    case JUMP_INSN:
3727    case CALL_INSN:
3728    case CODE_LABEL:
3729    case BARRIER:
3730    case NOTE:
3731      insn = x;
3732      while (insn)
3733	{
3734	  rtx next = NEXT_INSN (insn);
3735	  add_insn_before (insn, before);
3736	  last = insn;
3737	  insn = next;
3738	}
3739      break;
3740
3741#ifdef ENABLE_RTL_CHECKING
3742    case SEQUENCE:
3743      gcc_unreachable ();
3744      break;
3745#endif
3746
3747    default:
3748      last = make_insn_raw (x);
3749      add_insn_before (last, before);
3750      break;
3751    }
3752
3753  return last;
3754}
3755
3756/* Make an instruction with body X and code JUMP_INSN
3757   and output it before the instruction BEFORE.  */
3758
3759rtx
3760emit_jump_insn_before_noloc (rtx x, rtx before)
3761{
3762  rtx insn, last = NULL_RTX;
3763
3764  gcc_assert (before);
3765
3766  switch (GET_CODE (x))
3767    {
3768    case INSN:
3769    case JUMP_INSN:
3770    case CALL_INSN:
3771    case CODE_LABEL:
3772    case BARRIER:
3773    case NOTE:
3774      insn = x;
3775      while (insn)
3776	{
3777	  rtx next = NEXT_INSN (insn);
3778	  add_insn_before (insn, before);
3779	  last = insn;
3780	  insn = next;
3781	}
3782      break;
3783
3784#ifdef ENABLE_RTL_CHECKING
3785    case SEQUENCE:
3786      gcc_unreachable ();
3787      break;
3788#endif
3789
3790    default:
3791      last = make_jump_insn_raw (x);
3792      add_insn_before (last, before);
3793      break;
3794    }
3795
3796  return last;
3797}
3798
3799/* Make an instruction with body X and code CALL_INSN
3800   and output it before the instruction BEFORE.  */
3801
3802rtx
3803emit_call_insn_before_noloc (rtx x, rtx before)
3804{
3805  rtx last = NULL_RTX, insn;
3806
3807  gcc_assert (before);
3808
3809  switch (GET_CODE (x))
3810    {
3811    case INSN:
3812    case JUMP_INSN:
3813    case CALL_INSN:
3814    case CODE_LABEL:
3815    case BARRIER:
3816    case NOTE:
3817      insn = x;
3818      while (insn)
3819	{
3820	  rtx next = NEXT_INSN (insn);
3821	  add_insn_before (insn, before);
3822	  last = insn;
3823	  insn = next;
3824	}
3825      break;
3826
3827#ifdef ENABLE_RTL_CHECKING
3828    case SEQUENCE:
3829      gcc_unreachable ();
3830      break;
3831#endif
3832
3833    default:
3834      last = make_call_insn_raw (x);
3835      add_insn_before (last, before);
3836      break;
3837    }
3838
3839  return last;
3840}
3841
3842/* Make an insn of code BARRIER
3843   and output it before the insn BEFORE.  */
3844
3845rtx
3846emit_barrier_before (rtx before)
3847{
3848  rtx insn = rtx_alloc (BARRIER);
3849
3850  INSN_UID (insn) = cur_insn_uid++;
3851
3852  add_insn_before (insn, before);
3853  return insn;
3854}
3855
3856/* Emit the label LABEL before the insn BEFORE.  */
3857
3858rtx
3859emit_label_before (rtx label, rtx before)
3860{
3861  /* This can be called twice for the same label as a result of the
3862     confusion that follows a syntax error!  So make it harmless.  */
3863  if (INSN_UID (label) == 0)
3864    {
3865      INSN_UID (label) = cur_insn_uid++;
3866      add_insn_before (label, before);
3867    }
3868
3869  return label;
3870}
3871
3872/* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
3873
3874rtx
3875emit_note_before (int subtype, rtx before)
3876{
3877  rtx note = rtx_alloc (NOTE);
3878  INSN_UID (note) = cur_insn_uid++;
3879#ifndef USE_MAPPED_LOCATION
3880  NOTE_SOURCE_FILE (note) = 0;
3881#endif
3882  NOTE_LINE_NUMBER (note) = subtype;
3883  BLOCK_FOR_INSN (note) = NULL;
3884
3885  add_insn_before (note, before);
3886  return note;
3887}
3888
3889/* Helper for emit_insn_after, handles lists of instructions
3890   efficiently.  */
3891
3892static rtx emit_insn_after_1 (rtx, rtx);
3893
3894static rtx
3895emit_insn_after_1 (rtx first, rtx after)
3896{
3897  rtx last;
3898  rtx after_after;
3899  basic_block bb;
3900
3901  if (!BARRIER_P (after)
3902      && (bb = BLOCK_FOR_INSN (after)))
3903    {
3904      bb->flags |= BB_DIRTY;
3905      for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3906	if (!BARRIER_P (last))
3907	  set_block_for_insn (last, bb);
3908      if (!BARRIER_P (last))
3909	set_block_for_insn (last, bb);
3910      if (BB_END (bb) == after)
3911	BB_END (bb) = last;
3912    }
3913  else
3914    for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3915      continue;
3916
3917  after_after = NEXT_INSN (after);
3918
3919  NEXT_INSN (after) = first;
3920  PREV_INSN (first) = after;
3921  NEXT_INSN (last) = after_after;
3922  if (after_after)
3923    PREV_INSN (after_after) = last;
3924
3925  if (after == last_insn)
3926    last_insn = last;
3927  return last;
3928}
3929
3930/* Make X be output after the insn AFTER.  */
3931
3932rtx
3933emit_insn_after_noloc (rtx x, rtx after)
3934{
3935  rtx last = after;
3936
3937  gcc_assert (after);
3938
3939  if (x == NULL_RTX)
3940    return last;
3941
3942  switch (GET_CODE (x))
3943    {
3944    case INSN:
3945    case JUMP_INSN:
3946    case CALL_INSN:
3947    case CODE_LABEL:
3948    case BARRIER:
3949    case NOTE:
3950      last = emit_insn_after_1 (x, after);
3951      break;
3952
3953#ifdef ENABLE_RTL_CHECKING
3954    case SEQUENCE:
3955      gcc_unreachable ();
3956      break;
3957#endif
3958
3959    default:
3960      last = make_insn_raw (x);
3961      add_insn_after (last, after);
3962      break;
3963    }
3964
3965  return last;
3966}
3967
3968/* Similar to emit_insn_after, except that line notes are to be inserted so
3969   as to act as if this insn were at FROM.  */
3970
3971void
3972emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
3973{
3974  rtx from_line = find_line_note (from);
3975  rtx after_line = find_line_note (after);
3976  rtx insn = emit_insn_after (x, after);
3977
3978  if (from_line)
3979    emit_note_copy_after (from_line, after);
3980
3981  if (after_line)
3982    emit_note_copy_after (after_line, insn);
3983}
3984
3985/* Make an insn of code JUMP_INSN with body X
3986   and output it after the insn AFTER.  */
3987
3988rtx
3989emit_jump_insn_after_noloc (rtx x, rtx after)
3990{
3991  rtx last;
3992
3993  gcc_assert (after);
3994
3995  switch (GET_CODE (x))
3996    {
3997    case INSN:
3998    case JUMP_INSN:
3999    case CALL_INSN:
4000    case CODE_LABEL:
4001    case BARRIER:
4002    case NOTE:
4003      last = emit_insn_after_1 (x, after);
4004      break;
4005
4006#ifdef ENABLE_RTL_CHECKING
4007    case SEQUENCE:
4008      gcc_unreachable ();
4009      break;
4010#endif
4011
4012    default:
4013      last = make_jump_insn_raw (x);
4014      add_insn_after (last, after);
4015      break;
4016    }
4017
4018  return last;
4019}
4020
4021/* Make an instruction with body X and code CALL_INSN
4022   and output it after the instruction AFTER.  */
4023
4024rtx
4025emit_call_insn_after_noloc (rtx x, rtx after)
4026{
4027  rtx last;
4028
4029  gcc_assert (after);
4030
4031  switch (GET_CODE (x))
4032    {
4033    case INSN:
4034    case JUMP_INSN:
4035    case CALL_INSN:
4036    case CODE_LABEL:
4037    case BARRIER:
4038    case NOTE:
4039      last = emit_insn_after_1 (x, after);
4040      break;
4041
4042#ifdef ENABLE_RTL_CHECKING
4043    case SEQUENCE:
4044      gcc_unreachable ();
4045      break;
4046#endif
4047
4048    default:
4049      last = make_call_insn_raw (x);
4050      add_insn_after (last, after);
4051      break;
4052    }
4053
4054  return last;
4055}
4056
4057/* Make an insn of code BARRIER
4058   and output it after the insn AFTER.  */
4059
4060rtx
4061emit_barrier_after (rtx after)
4062{
4063  rtx insn = rtx_alloc (BARRIER);
4064
4065  INSN_UID (insn) = cur_insn_uid++;
4066
4067  add_insn_after (insn, after);
4068  return insn;
4069}
4070
4071/* Emit the label LABEL after the insn AFTER.  */
4072
4073rtx
4074emit_label_after (rtx label, rtx after)
4075{
4076  /* This can be called twice for the same label
4077     as a result of the confusion that follows a syntax error!
4078     So make it harmless.  */
4079  if (INSN_UID (label) == 0)
4080    {
4081      INSN_UID (label) = cur_insn_uid++;
4082      add_insn_after (label, after);
4083    }
4084
4085  return label;
4086}
4087
4088/* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4089
4090rtx
4091emit_note_after (int subtype, rtx after)
4092{
4093  rtx note = rtx_alloc (NOTE);
4094  INSN_UID (note) = cur_insn_uid++;
4095#ifndef USE_MAPPED_LOCATION
4096  NOTE_SOURCE_FILE (note) = 0;
4097#endif
4098  NOTE_LINE_NUMBER (note) = subtype;
4099  BLOCK_FOR_INSN (note) = NULL;
4100  add_insn_after (note, after);
4101  return note;
4102}
4103
4104/* Emit a copy of note ORIG after the insn AFTER.  */
4105
4106rtx
4107emit_note_copy_after (rtx orig, rtx after)
4108{
4109  rtx note;
4110
4111  if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4112    {
4113      cur_insn_uid++;
4114      return 0;
4115    }
4116
4117  note = rtx_alloc (NOTE);
4118  INSN_UID (note) = cur_insn_uid++;
4119  NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4120  NOTE_DATA (note) = NOTE_DATA (orig);
4121  BLOCK_FOR_INSN (note) = NULL;
4122  add_insn_after (note, after);
4123  return note;
4124}
4125
4126/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4127rtx
4128emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4129{
4130  rtx last = emit_insn_after_noloc (pattern, after);
4131
4132  if (pattern == NULL_RTX || !loc)
4133    return last;
4134
4135  after = NEXT_INSN (after);
4136  while (1)
4137    {
4138      if (active_insn_p (after) && !INSN_LOCATOR (after))
4139	INSN_LOCATOR (after) = loc;
4140      if (after == last)
4141	break;
4142      after = NEXT_INSN (after);
4143    }
4144  return last;
4145}
4146
4147/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4148rtx
4149emit_insn_after (rtx pattern, rtx after)
4150{
4151  if (INSN_P (after))
4152    return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4153  else
4154    return emit_insn_after_noloc (pattern, after);
4155}
4156
4157/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4158rtx
4159emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4160{
4161  rtx last = emit_jump_insn_after_noloc (pattern, after);
4162
4163  if (pattern == NULL_RTX || !loc)
4164    return last;
4165
4166  after = NEXT_INSN (after);
4167  while (1)
4168    {
4169      if (active_insn_p (after) && !INSN_LOCATOR (after))
4170	INSN_LOCATOR (after) = loc;
4171      if (after == last)
4172	break;
4173      after = NEXT_INSN (after);
4174    }
4175  return last;
4176}
4177
4178/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4179rtx
4180emit_jump_insn_after (rtx pattern, rtx after)
4181{
4182  if (INSN_P (after))
4183    return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4184  else
4185    return emit_jump_insn_after_noloc (pattern, after);
4186}
4187
4188/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4189rtx
4190emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4191{
4192  rtx last = emit_call_insn_after_noloc (pattern, after);
4193
4194  if (pattern == NULL_RTX || !loc)
4195    return last;
4196
4197  after = NEXT_INSN (after);
4198  while (1)
4199    {
4200      if (active_insn_p (after) && !INSN_LOCATOR (after))
4201	INSN_LOCATOR (after) = loc;
4202      if (after == last)
4203	break;
4204      after = NEXT_INSN (after);
4205    }
4206  return last;
4207}
4208
4209/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4210rtx
4211emit_call_insn_after (rtx pattern, rtx after)
4212{
4213  if (INSN_P (after))
4214    return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4215  else
4216    return emit_call_insn_after_noloc (pattern, after);
4217}
4218
4219/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE.  */
4220rtx
4221emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4222{
4223  rtx first = PREV_INSN (before);
4224  rtx last = emit_insn_before_noloc (pattern, before);
4225
4226  if (pattern == NULL_RTX || !loc)
4227    return last;
4228
4229  first = NEXT_INSN (first);
4230  while (1)
4231    {
4232      if (active_insn_p (first) && !INSN_LOCATOR (first))
4233	INSN_LOCATOR (first) = loc;
4234      if (first == last)
4235	break;
4236      first = NEXT_INSN (first);
4237    }
4238  return last;
4239}
4240
4241/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4242rtx
4243emit_insn_before (rtx pattern, rtx before)
4244{
4245  if (INSN_P (before))
4246    return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4247  else
4248    return emit_insn_before_noloc (pattern, before);
4249}
4250
4251/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4252rtx
4253emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4254{
4255  rtx first = PREV_INSN (before);
4256  rtx last = emit_jump_insn_before_noloc (pattern, before);
4257
4258  if (pattern == NULL_RTX)
4259    return last;
4260
4261  first = NEXT_INSN (first);
4262  while (1)
4263    {
4264      if (active_insn_p (first) && !INSN_LOCATOR (first))
4265	INSN_LOCATOR (first) = loc;
4266      if (first == last)
4267	break;
4268      first = NEXT_INSN (first);
4269    }
4270  return last;
4271}
4272
4273/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4274rtx
4275emit_jump_insn_before (rtx pattern, rtx before)
4276{
4277  if (INSN_P (before))
4278    return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4279  else
4280    return emit_jump_insn_before_noloc (pattern, before);
4281}
4282
4283/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4284rtx
4285emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4286{
4287  rtx first = PREV_INSN (before);
4288  rtx last = emit_call_insn_before_noloc (pattern, before);
4289
4290  if (pattern == NULL_RTX)
4291    return last;
4292
4293  first = NEXT_INSN (first);
4294  while (1)
4295    {
4296      if (active_insn_p (first) && !INSN_LOCATOR (first))
4297	INSN_LOCATOR (first) = loc;
4298      if (first == last)
4299	break;
4300      first = NEXT_INSN (first);
4301    }
4302  return last;
4303}
4304
4305/* like emit_call_insn_before_noloc,
4306   but set insn_locator according to before.  */
4307rtx
4308emit_call_insn_before (rtx pattern, rtx before)
4309{
4310  if (INSN_P (before))
4311    return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4312  else
4313    return emit_call_insn_before_noloc (pattern, before);
4314}
4315
4316/* Take X and emit it at the end of the doubly-linked
4317   INSN list.
4318
4319   Returns the last insn emitted.  */
4320
4321rtx
4322emit_insn (rtx x)
4323{
4324  rtx last = last_insn;
4325  rtx insn;
4326
4327  if (x == NULL_RTX)
4328    return last;
4329
4330  switch (GET_CODE (x))
4331    {
4332    case INSN:
4333    case JUMP_INSN:
4334    case CALL_INSN:
4335    case CODE_LABEL:
4336    case BARRIER:
4337    case NOTE:
4338      insn = x;
4339      while (insn)
4340	{
4341	  rtx next = NEXT_INSN (insn);
4342	  add_insn (insn);
4343	  last = insn;
4344	  insn = next;
4345	}
4346      break;
4347
4348#ifdef ENABLE_RTL_CHECKING
4349    case SEQUENCE:
4350      gcc_unreachable ();
4351      break;
4352#endif
4353
4354    default:
4355      last = make_insn_raw (x);
4356      add_insn (last);
4357      break;
4358    }
4359
4360  return last;
4361}
4362
4363/* Make an insn of code JUMP_INSN with pattern X
4364   and add it to the end of the doubly-linked list.  */
4365
4366rtx
4367emit_jump_insn (rtx x)
4368{
4369  rtx last = NULL_RTX, insn;
4370
4371  switch (GET_CODE (x))
4372    {
4373    case INSN:
4374    case JUMP_INSN:
4375    case CALL_INSN:
4376    case CODE_LABEL:
4377    case BARRIER:
4378    case NOTE:
4379      insn = x;
4380      while (insn)
4381	{
4382	  rtx next = NEXT_INSN (insn);
4383	  add_insn (insn);
4384	  last = insn;
4385	  insn = next;
4386	}
4387      break;
4388
4389#ifdef ENABLE_RTL_CHECKING
4390    case SEQUENCE:
4391      gcc_unreachable ();
4392      break;
4393#endif
4394
4395    default:
4396      last = make_jump_insn_raw (x);
4397      add_insn (last);
4398      break;
4399    }
4400
4401  return last;
4402}
4403
4404/* Make an insn of code CALL_INSN with pattern X
4405   and add it to the end of the doubly-linked list.  */
4406
4407rtx
4408emit_call_insn (rtx x)
4409{
4410  rtx insn;
4411
4412  switch (GET_CODE (x))
4413    {
4414    case INSN:
4415    case JUMP_INSN:
4416    case CALL_INSN:
4417    case CODE_LABEL:
4418    case BARRIER:
4419    case NOTE:
4420      insn = emit_insn (x);
4421      break;
4422
4423#ifdef ENABLE_RTL_CHECKING
4424    case SEQUENCE:
4425      gcc_unreachable ();
4426      break;
4427#endif
4428
4429    default:
4430      insn = make_call_insn_raw (x);
4431      add_insn (insn);
4432      break;
4433    }
4434
4435  return insn;
4436}
4437
4438/* Add the label LABEL to the end of the doubly-linked list.  */
4439
4440rtx
4441emit_label (rtx label)
4442{
4443  /* This can be called twice for the same label
4444     as a result of the confusion that follows a syntax error!
4445     So make it harmless.  */
4446  if (INSN_UID (label) == 0)
4447    {
4448      INSN_UID (label) = cur_insn_uid++;
4449      add_insn (label);
4450    }
4451  return label;
4452}
4453
4454/* Make an insn of code BARRIER
4455   and add it to the end of the doubly-linked list.  */
4456
4457rtx
4458emit_barrier (void)
4459{
4460  rtx barrier = rtx_alloc (BARRIER);
4461  INSN_UID (barrier) = cur_insn_uid++;
4462  add_insn (barrier);
4463  return barrier;
4464}
4465
4466/* Make line numbering NOTE insn for LOCATION add it to the end
4467   of the doubly-linked list, but only if line-numbers are desired for
4468   debugging info and it doesn't match the previous one.  */
4469
4470rtx
4471emit_line_note (location_t location)
4472{
4473  rtx note;
4474
4475#ifdef USE_MAPPED_LOCATION
4476  if (location == last_location)
4477    return NULL_RTX;
4478#else
4479  if (location.file && last_location.file
4480      && !strcmp (location.file, last_location.file)
4481      && location.line == last_location.line)
4482    return NULL_RTX;
4483#endif
4484  last_location = location;
4485
4486  if (no_line_numbers)
4487    {
4488      cur_insn_uid++;
4489      return NULL_RTX;
4490    }
4491
4492#ifdef USE_MAPPED_LOCATION
4493  note = emit_note ((int) location);
4494#else
4495  note = emit_note (location.line);
4496  NOTE_SOURCE_FILE (note) = location.file;
4497#endif
4498
4499  return note;
4500}
4501
4502/* Emit a copy of note ORIG.  */
4503
4504rtx
4505emit_note_copy (rtx orig)
4506{
4507  rtx note;
4508
4509  if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4510    {
4511      cur_insn_uid++;
4512      return NULL_RTX;
4513    }
4514
4515  note = rtx_alloc (NOTE);
4516
4517  INSN_UID (note) = cur_insn_uid++;
4518  NOTE_DATA (note) = NOTE_DATA (orig);
4519  NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4520  BLOCK_FOR_INSN (note) = NULL;
4521  add_insn (note);
4522
4523  return note;
4524}
4525
4526/* Make an insn of code NOTE or type NOTE_NO
4527   and add it to the end of the doubly-linked list.  */
4528
4529rtx
4530emit_note (int note_no)
4531{
4532  rtx note;
4533
4534  note = rtx_alloc (NOTE);
4535  INSN_UID (note) = cur_insn_uid++;
4536  NOTE_LINE_NUMBER (note) = note_no;
4537  memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4538  BLOCK_FOR_INSN (note) = NULL;
4539  add_insn (note);
4540  return note;
4541}
4542
4543/* Cause next statement to emit a line note even if the line number
4544   has not changed.  */
4545
4546void
4547force_next_line_note (void)
4548{
4549#ifdef USE_MAPPED_LOCATION
4550  last_location = -1;
4551#else
4552  last_location.line = -1;
4553#endif
4554}
4555
4556/* Place a note of KIND on insn INSN with DATUM as the datum. If a
4557   note of this type already exists, remove it first.  */
4558
4559rtx
4560set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4561{
4562  rtx note = find_reg_note (insn, kind, NULL_RTX);
4563
4564  switch (kind)
4565    {
4566    case REG_EQUAL:
4567    case REG_EQUIV:
4568      /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4569	 has multiple sets (some callers assume single_set
4570	 means the insn only has one set, when in fact it
4571	 means the insn only has one * useful * set).  */
4572      if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4573	{
4574	  gcc_assert (!note);
4575	  return NULL_RTX;
4576	}
4577
4578      /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4579	 It serves no useful purpose and breaks eliminate_regs.  */
4580      if (GET_CODE (datum) == ASM_OPERANDS)
4581	return NULL_RTX;
4582      break;
4583
4584    default:
4585      break;
4586    }
4587
4588  if (note)
4589    {
4590      XEXP (note, 0) = datum;
4591      return note;
4592    }
4593
4594  REG_NOTES (insn) = gen_rtx_EXPR_LIST ((enum machine_mode) kind, datum,
4595					REG_NOTES (insn));
4596  return REG_NOTES (insn);
4597}
4598
4599/* Return an indication of which type of insn should have X as a body.
4600   The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */
4601
4602static enum rtx_code
4603classify_insn (rtx x)
4604{
4605  if (LABEL_P (x))
4606    return CODE_LABEL;
4607  if (GET_CODE (x) == CALL)
4608    return CALL_INSN;
4609  if (GET_CODE (x) == RETURN)
4610    return JUMP_INSN;
4611  if (GET_CODE (x) == SET)
4612    {
4613      if (SET_DEST (x) == pc_rtx)
4614	return JUMP_INSN;
4615      else if (GET_CODE (SET_SRC (x)) == CALL)
4616	return CALL_INSN;
4617      else
4618	return INSN;
4619    }
4620  if (GET_CODE (x) == PARALLEL)
4621    {
4622      int j;
4623      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4624	if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4625	  return CALL_INSN;
4626	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4627		 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4628	  return JUMP_INSN;
4629	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4630		 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4631	  return CALL_INSN;
4632    }
4633  return INSN;
4634}
4635
4636/* Emit the rtl pattern X as an appropriate kind of insn.
4637   If X is a label, it is simply added into the insn chain.  */
4638
4639rtx
4640emit (rtx x)
4641{
4642  enum rtx_code code = classify_insn (x);
4643
4644  switch (code)
4645    {
4646    case CODE_LABEL:
4647      return emit_label (x);
4648    case INSN:
4649      return emit_insn (x);
4650    case  JUMP_INSN:
4651      {
4652	rtx insn = emit_jump_insn (x);
4653	if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4654	  return emit_barrier ();
4655	return insn;
4656      }
4657    case CALL_INSN:
4658      return emit_call_insn (x);
4659    default:
4660      gcc_unreachable ();
4661    }
4662}
4663
4664/* Space for free sequence stack entries.  */
4665static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4666
4667/* Begin emitting insns to a sequence.  If this sequence will contain
4668   something that might cause the compiler to pop arguments to function
4669   calls (because those pops have previously been deferred; see
4670   INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4671   before calling this function.  That will ensure that the deferred
4672   pops are not accidentally emitted in the middle of this sequence.  */
4673
4674void
4675start_sequence (void)
4676{
4677  struct sequence_stack *tem;
4678
4679  if (free_sequence_stack != NULL)
4680    {
4681      tem = free_sequence_stack;
4682      free_sequence_stack = tem->next;
4683    }
4684  else
4685    tem = ggc_alloc (sizeof (struct sequence_stack));
4686
4687  tem->next = seq_stack;
4688  tem->first = first_insn;
4689  tem->last = last_insn;
4690
4691  seq_stack = tem;
4692
4693  first_insn = 0;
4694  last_insn = 0;
4695}
4696
4697/* Set up the insn chain starting with FIRST as the current sequence,
4698   saving the previously current one.  See the documentation for
4699   start_sequence for more information about how to use this function.  */
4700
4701void
4702push_to_sequence (rtx first)
4703{
4704  rtx last;
4705
4706  start_sequence ();
4707
4708  for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4709
4710  first_insn = first;
4711  last_insn = last;
4712}
4713
4714/* Set up the outer-level insn chain
4715   as the current sequence, saving the previously current one.  */
4716
4717void
4718push_topmost_sequence (void)
4719{
4720  struct sequence_stack *stack, *top = NULL;
4721
4722  start_sequence ();
4723
4724  for (stack = seq_stack; stack; stack = stack->next)
4725    top = stack;
4726
4727  first_insn = top->first;
4728  last_insn = top->last;
4729}
4730
4731/* After emitting to the outer-level insn chain, update the outer-level
4732   insn chain, and restore the previous saved state.  */
4733
4734void
4735pop_topmost_sequence (void)
4736{
4737  struct sequence_stack *stack, *top = NULL;
4738
4739  for (stack = seq_stack; stack; stack = stack->next)
4740    top = stack;
4741
4742  top->first = first_insn;
4743  top->last = last_insn;
4744
4745  end_sequence ();
4746}
4747
4748/* After emitting to a sequence, restore previous saved state.
4749
4750   To get the contents of the sequence just made, you must call
4751   `get_insns' *before* calling here.
4752
4753   If the compiler might have deferred popping arguments while
4754   generating this sequence, and this sequence will not be immediately
4755   inserted into the instruction stream, use do_pending_stack_adjust
4756   before calling get_insns.  That will ensure that the deferred
4757   pops are inserted into this sequence, and not into some random
4758   location in the instruction stream.  See INHIBIT_DEFER_POP for more
4759   information about deferred popping of arguments.  */
4760
4761void
4762end_sequence (void)
4763{
4764  struct sequence_stack *tem = seq_stack;
4765
4766  first_insn = tem->first;
4767  last_insn = tem->last;
4768  seq_stack = tem->next;
4769
4770  memset (tem, 0, sizeof (*tem));
4771  tem->next = free_sequence_stack;
4772  free_sequence_stack = tem;
4773}
4774
4775/* Return 1 if currently emitting into a sequence.  */
4776
4777int
4778in_sequence_p (void)
4779{
4780  return seq_stack != 0;
4781}
4782
4783/* Put the various virtual registers into REGNO_REG_RTX.  */
4784
4785static void
4786init_virtual_regs (struct emit_status *es)
4787{
4788  rtx *ptr = es->x_regno_reg_rtx;
4789  ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4790  ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4791  ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4792  ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4793  ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4794}
4795
4796
4797/* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
4798static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4799static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4800static int copy_insn_n_scratches;
4801
4802/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4803   copied an ASM_OPERANDS.
4804   In that case, it is the original input-operand vector.  */
4805static rtvec orig_asm_operands_vector;
4806
4807/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4808   copied an ASM_OPERANDS.
4809   In that case, it is the copied input-operand vector.  */
4810static rtvec copy_asm_operands_vector;
4811
4812/* Likewise for the constraints vector.  */
4813static rtvec orig_asm_constraints_vector;
4814static rtvec copy_asm_constraints_vector;
4815
4816/* Recursively create a new copy of an rtx for copy_insn.
4817   This function differs from copy_rtx in that it handles SCRATCHes and
4818   ASM_OPERANDs properly.
4819   Normally, this function is not used directly; use copy_insn as front end.
4820   However, you could first copy an insn pattern with copy_insn and then use
4821   this function afterwards to properly copy any REG_NOTEs containing
4822   SCRATCHes.  */
4823
4824rtx
4825copy_insn_1 (rtx orig)
4826{
4827  rtx copy;
4828  int i, j;
4829  RTX_CODE code;
4830  const char *format_ptr;
4831
4832  code = GET_CODE (orig);
4833
4834  switch (code)
4835    {
4836    case REG:
4837    case CONST_INT:
4838    case CONST_DOUBLE:
4839    case CONST_VECTOR:
4840    case SYMBOL_REF:
4841    case CODE_LABEL:
4842    case PC:
4843    case CC0:
4844      return orig;
4845    case CLOBBER:
4846      if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4847	return orig;
4848      break;
4849
4850    case SCRATCH:
4851      for (i = 0; i < copy_insn_n_scratches; i++)
4852	if (copy_insn_scratch_in[i] == orig)
4853	  return copy_insn_scratch_out[i];
4854      break;
4855
4856    case CONST:
4857      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
4858	 a LABEL_REF, it isn't sharable.  */
4859      if (GET_CODE (XEXP (orig, 0)) == PLUS
4860	  && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4861	  && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4862	return orig;
4863      break;
4864
4865      /* A MEM with a constant address is not sharable.  The problem is that
4866	 the constant address may need to be reloaded.  If the mem is shared,
4867	 then reloading one copy of this mem will cause all copies to appear
4868	 to have been reloaded.  */
4869
4870    default:
4871      break;
4872    }
4873
4874  /* Copy the various flags, fields, and other information.  We assume
4875     that all fields need copying, and then clear the fields that should
4876     not be copied.  That is the sensible default behavior, and forces
4877     us to explicitly document why we are *not* copying a flag.  */
4878  copy = shallow_copy_rtx (orig);
4879
4880  /* We do not copy the USED flag, which is used as a mark bit during
4881     walks over the RTL.  */
4882  RTX_FLAG (copy, used) = 0;
4883
4884  /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
4885  if (INSN_P (orig))
4886    {
4887      RTX_FLAG (copy, jump) = 0;
4888      RTX_FLAG (copy, call) = 0;
4889      RTX_FLAG (copy, frame_related) = 0;
4890    }
4891
4892  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4893
4894  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4895    switch (*format_ptr++)
4896      {
4897      case 'e':
4898	if (XEXP (orig, i) != NULL)
4899	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4900	break;
4901
4902      case 'E':
4903      case 'V':
4904	if (XVEC (orig, i) == orig_asm_constraints_vector)
4905	  XVEC (copy, i) = copy_asm_constraints_vector;
4906	else if (XVEC (orig, i) == orig_asm_operands_vector)
4907	  XVEC (copy, i) = copy_asm_operands_vector;
4908	else if (XVEC (orig, i) != NULL)
4909	  {
4910	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4911	    for (j = 0; j < XVECLEN (copy, i); j++)
4912	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4913	  }
4914	break;
4915
4916      case 't':
4917      case 'w':
4918      case 'i':
4919      case 's':
4920      case 'S':
4921      case 'u':
4922      case '0':
4923	/* These are left unchanged.  */
4924	break;
4925
4926      default:
4927	gcc_unreachable ();
4928      }
4929
4930  if (code == SCRATCH)
4931    {
4932      i = copy_insn_n_scratches++;
4933      gcc_assert (i < MAX_RECOG_OPERANDS);
4934      copy_insn_scratch_in[i] = orig;
4935      copy_insn_scratch_out[i] = copy;
4936    }
4937  else if (code == ASM_OPERANDS)
4938    {
4939      orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4940      copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4941      orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4942      copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4943    }
4944
4945  return copy;
4946}
4947
4948/* Create a new copy of an rtx.
4949   This function differs from copy_rtx in that it handles SCRATCHes and
4950   ASM_OPERANDs properly.
4951   INSN doesn't really have to be a full INSN; it could be just the
4952   pattern.  */
4953rtx
4954copy_insn (rtx insn)
4955{
4956  copy_insn_n_scratches = 0;
4957  orig_asm_operands_vector = 0;
4958  orig_asm_constraints_vector = 0;
4959  copy_asm_operands_vector = 0;
4960  copy_asm_constraints_vector = 0;
4961  return copy_insn_1 (insn);
4962}
4963
4964/* Initialize data structures and variables in this file
4965   before generating rtl for each function.  */
4966
4967void
4968init_emit (void)
4969{
4970  struct function *f = cfun;
4971
4972  f->emit = ggc_alloc (sizeof (struct emit_status));
4973  first_insn = NULL;
4974  last_insn = NULL;
4975  cur_insn_uid = 1;
4976  reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4977  last_location = UNKNOWN_LOCATION;
4978  first_label_num = label_num;
4979  seq_stack = NULL;
4980
4981  /* Init the tables that describe all the pseudo regs.  */
4982
4983  f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4984
4985  f->emit->regno_pointer_align
4986    = ggc_alloc_cleared (f->emit->regno_pointer_align_length
4987			 * sizeof (unsigned char));
4988
4989  regno_reg_rtx
4990    = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
4991
4992  /* Put copies of all the hard registers into regno_reg_rtx.  */
4993  memcpy (regno_reg_rtx,
4994	  static_regno_reg_rtx,
4995	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
4996
4997  /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
4998  init_virtual_regs (f->emit);
4999
5000  /* Indicate that the virtual registers and stack locations are
5001     all pointers.  */
5002  REG_POINTER (stack_pointer_rtx) = 1;
5003  REG_POINTER (frame_pointer_rtx) = 1;
5004  REG_POINTER (hard_frame_pointer_rtx) = 1;
5005  REG_POINTER (arg_pointer_rtx) = 1;
5006
5007  REG_POINTER (virtual_incoming_args_rtx) = 1;
5008  REG_POINTER (virtual_stack_vars_rtx) = 1;
5009  REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5010  REG_POINTER (virtual_outgoing_args_rtx) = 1;
5011  REG_POINTER (virtual_cfa_rtx) = 1;
5012
5013#ifdef STACK_BOUNDARY
5014  REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5015  REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5016  REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5017  REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5018
5019  REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5020  REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5021  REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5022  REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5023  REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5024#endif
5025
5026#ifdef INIT_EXPANDERS
5027  INIT_EXPANDERS;
5028#endif
5029}
5030
5031/* Generate a vector constant for mode MODE and constant value CONSTANT.  */
5032
5033static rtx
5034gen_const_vector (enum machine_mode mode, int constant)
5035{
5036  rtx tem;
5037  rtvec v;
5038  int units, i;
5039  enum machine_mode inner;
5040
5041  units = GET_MODE_NUNITS (mode);
5042  inner = GET_MODE_INNER (mode);
5043
5044  gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5045
5046  v = rtvec_alloc (units);
5047
5048  /* We need to call this function after we set the scalar const_tiny_rtx
5049     entries.  */
5050  gcc_assert (const_tiny_rtx[constant][(int) inner]);
5051
5052  for (i = 0; i < units; ++i)
5053    RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5054
5055  tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5056  return tem;
5057}
5058
5059/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5060   all elements are zero, and the one vector when all elements are one.  */
5061rtx
5062gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5063{
5064  enum machine_mode inner = GET_MODE_INNER (mode);
5065  int nunits = GET_MODE_NUNITS (mode);
5066  rtx x;
5067  int i;
5068
5069  /* Check to see if all of the elements have the same value.  */
5070  x = RTVEC_ELT (v, nunits - 1);
5071  for (i = nunits - 2; i >= 0; i--)
5072    if (RTVEC_ELT (v, i) != x)
5073      break;
5074
5075  /* If the values are all the same, check to see if we can use one of the
5076     standard constant vectors.  */
5077  if (i == -1)
5078    {
5079      if (x == CONST0_RTX (inner))
5080	return CONST0_RTX (mode);
5081      else if (x == CONST1_RTX (inner))
5082	return CONST1_RTX (mode);
5083    }
5084
5085  return gen_rtx_raw_CONST_VECTOR (mode, v);
5086}
5087
5088/* Create some permanent unique rtl objects shared between all functions.
5089   LINE_NUMBERS is nonzero if line numbers are to be generated.  */
5090
5091void
5092init_emit_once (int line_numbers)
5093{
5094  int i;
5095  enum machine_mode mode;
5096  enum machine_mode double_mode;
5097
5098  /* We need reg_raw_mode, so initialize the modes now.  */
5099  init_reg_modes_once ();
5100
5101  /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5102     tables.  */
5103  const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5104				    const_int_htab_eq, NULL);
5105
5106  const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5107				       const_double_htab_eq, NULL);
5108
5109  mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5110				    mem_attrs_htab_eq, NULL);
5111  reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5112				    reg_attrs_htab_eq, NULL);
5113
5114  no_line_numbers = ! line_numbers;
5115
5116  /* Compute the word and byte modes.  */
5117
5118  byte_mode = VOIDmode;
5119  word_mode = VOIDmode;
5120  double_mode = VOIDmode;
5121
5122  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5123       mode != VOIDmode;
5124       mode = GET_MODE_WIDER_MODE (mode))
5125    {
5126      if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5127	  && byte_mode == VOIDmode)
5128	byte_mode = mode;
5129
5130      if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5131	  && word_mode == VOIDmode)
5132	word_mode = mode;
5133    }
5134
5135  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5136       mode != VOIDmode;
5137       mode = GET_MODE_WIDER_MODE (mode))
5138    {
5139      if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5140	  && double_mode == VOIDmode)
5141	double_mode = mode;
5142    }
5143
5144  ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5145
5146  /* Assign register numbers to the globally defined register rtx.
5147     This must be done at runtime because the register number field
5148     is in a union and some compilers can't initialize unions.  */
5149
5150  pc_rtx = gen_rtx_PC (VOIDmode);
5151  cc0_rtx = gen_rtx_CC0 (VOIDmode);
5152  stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5153  frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5154  if (hard_frame_pointer_rtx == 0)
5155    hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5156					  HARD_FRAME_POINTER_REGNUM);
5157  if (arg_pointer_rtx == 0)
5158    arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5159  virtual_incoming_args_rtx =
5160    gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5161  virtual_stack_vars_rtx =
5162    gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5163  virtual_stack_dynamic_rtx =
5164    gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5165  virtual_outgoing_args_rtx =
5166    gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5167  virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5168
5169  /* Initialize RTL for commonly used hard registers.  These are
5170     copied into regno_reg_rtx as we begin to compile each function.  */
5171  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5172    static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5173
5174#ifdef INIT_EXPANDERS
5175  /* This is to initialize {init|mark|free}_machine_status before the first
5176     call to push_function_context_to.  This is needed by the Chill front
5177     end which calls push_function_context_to before the first call to
5178     init_function_start.  */
5179  INIT_EXPANDERS;
5180#endif
5181
5182  /* Create the unique rtx's for certain rtx codes and operand values.  */
5183
5184  /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5185     tries to use these variables.  */
5186  for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5187    const_int_rtx[i + MAX_SAVED_CONST_INT] =
5188      gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5189
5190  if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5191      && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5192    const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5193  else
5194    const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5195
5196  REAL_VALUE_FROM_INT (dconst0,   0,  0, double_mode);
5197  REAL_VALUE_FROM_INT (dconst1,   1,  0, double_mode);
5198  REAL_VALUE_FROM_INT (dconst2,   2,  0, double_mode);
5199  REAL_VALUE_FROM_INT (dconst3,   3,  0, double_mode);
5200  REAL_VALUE_FROM_INT (dconst10, 10,  0, double_mode);
5201  REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5202  REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5203
5204  dconsthalf = dconst1;
5205  SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5206
5207  real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5208
5209  /* Initialize mathematical constants for constant folding builtins.
5210     These constants need to be given to at least 160 bits precision.  */
5211  real_from_string (&dconstpi,
5212    "3.1415926535897932384626433832795028841971693993751058209749445923078");
5213  real_from_string (&dconste,
5214    "2.7182818284590452353602874713526624977572470936999595749669676277241");
5215
5216  for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5217    {
5218      REAL_VALUE_TYPE *r =
5219	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5220
5221      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5222	   mode != VOIDmode;
5223	   mode = GET_MODE_WIDER_MODE (mode))
5224	const_tiny_rtx[i][(int) mode] =
5225	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5226
5227      for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5228	   mode != VOIDmode;
5229	   mode = GET_MODE_WIDER_MODE (mode))
5230	const_tiny_rtx[i][(int) mode] =
5231	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5232
5233      const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5234
5235      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5236	   mode != VOIDmode;
5237	   mode = GET_MODE_WIDER_MODE (mode))
5238	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5239
5240      for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5241	   mode != VOIDmode;
5242	   mode = GET_MODE_WIDER_MODE (mode))
5243	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5244    }
5245
5246  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5247       mode != VOIDmode;
5248       mode = GET_MODE_WIDER_MODE (mode))
5249    {
5250      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5251      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5252    }
5253
5254  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5255       mode != VOIDmode;
5256       mode = GET_MODE_WIDER_MODE (mode))
5257    {
5258      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5259      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5260    }
5261
5262  for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5263    if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5264      const_tiny_rtx[0][i] = const0_rtx;
5265
5266  const_tiny_rtx[0][(int) BImode] = const0_rtx;
5267  if (STORE_FLAG_VALUE == 1)
5268    const_tiny_rtx[1][(int) BImode] = const1_rtx;
5269
5270#ifdef RETURN_ADDRESS_POINTER_REGNUM
5271  return_address_pointer_rtx
5272    = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5273#endif
5274
5275#ifdef STATIC_CHAIN_REGNUM
5276  static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5277
5278#ifdef STATIC_CHAIN_INCOMING_REGNUM
5279  if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5280    static_chain_incoming_rtx
5281      = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5282  else
5283#endif
5284    static_chain_incoming_rtx = static_chain_rtx;
5285#endif
5286
5287#ifdef STATIC_CHAIN
5288  static_chain_rtx = STATIC_CHAIN;
5289
5290#ifdef STATIC_CHAIN_INCOMING
5291  static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5292#else
5293  static_chain_incoming_rtx = static_chain_rtx;
5294#endif
5295#endif
5296
5297  if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5298    pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5299}
5300
5301/* Produce exact duplicate of insn INSN after AFTER.
5302   Care updating of libcall regions if present.  */
5303
5304rtx
5305emit_copy_of_insn_after (rtx insn, rtx after)
5306{
5307  rtx new;
5308  rtx note1, note2, link;
5309
5310  switch (GET_CODE (insn))
5311    {
5312    case INSN:
5313      new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5314      break;
5315
5316    case JUMP_INSN:
5317      new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5318      break;
5319
5320    case CALL_INSN:
5321      new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5322      if (CALL_INSN_FUNCTION_USAGE (insn))
5323	CALL_INSN_FUNCTION_USAGE (new)
5324	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5325      SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5326      CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5327      break;
5328
5329    default:
5330      gcc_unreachable ();
5331    }
5332
5333  /* Update LABEL_NUSES.  */
5334  mark_jump_label (PATTERN (new), new, 0);
5335
5336  INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5337
5338  /* If the old insn is frame related, then so is the new one.  This is
5339     primarily needed for IA-64 unwind info which marks epilogue insns,
5340     which may be duplicated by the basic block reordering code.  */
5341  RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5342
5343  /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5344     make them.  */
5345  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5346    if (REG_NOTE_KIND (link) != REG_LABEL)
5347      {
5348	if (GET_CODE (link) == EXPR_LIST)
5349	  REG_NOTES (new)
5350	    = copy_insn_1 (gen_rtx_EXPR_LIST (GET_MODE (link),
5351					      XEXP (link, 0),
5352					      REG_NOTES (new)));
5353	else
5354	  REG_NOTES (new)
5355	    = copy_insn_1 (gen_rtx_INSN_LIST (GET_MODE (link),
5356					      XEXP (link, 0),
5357					      REG_NOTES (new)));
5358      }
5359
5360  /* Fix the libcall sequences.  */
5361  if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5362    {
5363      rtx p = new;
5364      while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5365	p = PREV_INSN (p);
5366      XEXP (note1, 0) = p;
5367      XEXP (note2, 0) = new;
5368    }
5369  INSN_CODE (new) = INSN_CODE (insn);
5370  return new;
5371}
5372
5373static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5374rtx
5375gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5376{
5377  if (hard_reg_clobbers[mode][regno])
5378    return hard_reg_clobbers[mode][regno];
5379  else
5380    return (hard_reg_clobbers[mode][regno] =
5381	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5382}
5383
5384#include "gt-emit-rtl.h"
5385