1/* Subroutines used by or related to instruction recognition.
2   Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "hash-set.h"
26#include "machmode.h"
27#include "vec.h"
28#include "double-int.h"
29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"
35#include "rtl-error.h"
36#include "tm_p.h"
37#include "insn-config.h"
38#include "insn-attr.h"
39#include "hard-reg-set.h"
40#include "recog.h"
41#include "regs.h"
42#include "addresses.h"
43#include "hashtab.h"
44#include "function.h"
45#include "rtl.h"
46#include "flags.h"
47#include "statistics.h"
48#include "real.h"
49#include "fixed-value.h"
50#include "expmed.h"
51#include "dojump.h"
52#include "explow.h"
53#include "calls.h"
54#include "emit-rtl.h"
55#include "varasm.h"
56#include "stmt.h"
57#include "expr.h"
58#include "predict.h"
59#include "dominance.h"
60#include "cfg.h"
61#include "cfgrtl.h"
62#include "cfgbuild.h"
63#include "cfgcleanup.h"
64#include "basic-block.h"
65#include "reload.h"
66#include "target.h"
67#include "tree-pass.h"
68#include "df.h"
69#include "insn-codes.h"
70
71#ifndef STACK_PUSH_CODE
72#ifdef STACK_GROWS_DOWNWARD
73#define STACK_PUSH_CODE PRE_DEC
74#else
75#define STACK_PUSH_CODE PRE_INC
76#endif
77#endif
78
79#ifndef STACK_POP_CODE
80#ifdef STACK_GROWS_DOWNWARD
81#define STACK_POP_CODE POST_INC
82#else
83#define STACK_POP_CODE POST_DEC
84#endif
85#endif
86
87static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
88static void validate_replace_src_1 (rtx *, void *);
89static rtx split_insn (rtx_insn *);
90
91struct target_recog default_target_recog;
92#if SWITCHABLE_TARGET
93struct target_recog *this_target_recog = &default_target_recog;
94#endif
95
96/* Nonzero means allow operands to be volatile.
97   This should be 0 if you are generating rtl, such as if you are calling
98   the functions in optabs.c and expmed.c (most of the time).
99   This should be 1 if all valid insns need to be recognized,
100   such as in reginfo.c and final.c and reload.c.
101
102   init_recog and init_recog_no_volatile are responsible for setting this.  */
103
104int volatile_ok;
105
106struct recog_data_d recog_data;
107
108/* Contains a vector of operand_alternative structures, such that
109   operand OP of alternative A is at index A * n_operands + OP.
110   Set up by preprocess_constraints.  */
111const operand_alternative *recog_op_alt;
112
113/* Used to provide recog_op_alt for asms.  */
114static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
115				      * MAX_RECOG_ALTERNATIVES];
116
117/* On return from `constrain_operands', indicate which alternative
118   was satisfied.  */
119
120int which_alternative;
121
122/* Nonzero after end of reload pass.
123   Set to 1 or 0 by toplev.c.
124   Controls the significance of (SUBREG (MEM)).  */
125
126int reload_completed;
127
128/* Nonzero after thread_prologue_and_epilogue_insns has run.  */
129int epilogue_completed;
130
131/* Initialize data used by the function `recog'.
132   This must be called once in the compilation of a function
133   before any insn recognition may be done in the function.  */
134
135void
136init_recog_no_volatile (void)
137{
138  volatile_ok = 0;
139}
140
141void
142init_recog (void)
143{
144  volatile_ok = 1;
145}
146
147
148/* Return true if labels in asm operands BODY are LABEL_REFs.  */
149
150static bool
151asm_labels_ok (rtx body)
152{
153  rtx asmop;
154  int i;
155
156  asmop = extract_asm_operands (body);
157  if (asmop == NULL_RTX)
158    return true;
159
160  for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
161    if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
162      return false;
163
164  return true;
165}
166
167/* Check that X is an insn-body for an `asm' with operands
168   and that the operands mentioned in it are legitimate.  */
169
170int
171check_asm_operands (rtx x)
172{
173  int noperands;
174  rtx *operands;
175  const char **constraints;
176  int i;
177
178  if (!asm_labels_ok (x))
179    return 0;
180
181  /* Post-reload, be more strict with things.  */
182  if (reload_completed)
183    {
184      /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
185      rtx_insn *insn = make_insn_raw (x);
186      extract_insn (insn);
187      constrain_operands (1, get_enabled_alternatives (insn));
188      return which_alternative >= 0;
189    }
190
191  noperands = asm_noperands (x);
192  if (noperands < 0)
193    return 0;
194  if (noperands == 0)
195    return 1;
196
197  operands = XALLOCAVEC (rtx, noperands);
198  constraints = XALLOCAVEC (const char *, noperands);
199
200  decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
201
202  for (i = 0; i < noperands; i++)
203    {
204      const char *c = constraints[i];
205      if (c[0] == '%')
206	c++;
207      if (! asm_operand_ok (operands[i], c, constraints))
208	return 0;
209    }
210
211  return 1;
212}
213
214/* Static data for the next two routines.  */
215
216typedef struct change_t
217{
218  rtx object;
219  int old_code;
220  rtx *loc;
221  rtx old;
222  bool unshare;
223} change_t;
224
225static change_t *changes;
226static int changes_allocated;
227
228static int num_changes = 0;
229
230/* Validate a proposed change to OBJECT.  LOC is the location in the rtl
231   at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
232   the change is simply made.
233
234   Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
235   will be called with the address and mode as parameters.  If OBJECT is
236   an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
237   the change in place.
238
239   IN_GROUP is nonzero if this is part of a group of changes that must be
240   performed as a group.  In that case, the changes will be stored.  The
241   function `apply_change_group' will validate and apply the changes.
242
243   If IN_GROUP is zero, this is a single change.  Try to recognize the insn
244   or validate the memory reference with the change applied.  If the result
245   is not valid for the machine, suppress the change and return zero.
246   Otherwise, perform the change and return 1.  */
247
248static bool
249validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
250{
251  rtx old = *loc;
252
253  if (old == new_rtx || rtx_equal_p (old, new_rtx))
254    return 1;
255
256  gcc_assert (in_group != 0 || num_changes == 0);
257
258  *loc = new_rtx;
259
260  /* Save the information describing this change.  */
261  if (num_changes >= changes_allocated)
262    {
263      if (changes_allocated == 0)
264	/* This value allows for repeated substitutions inside complex
265	   indexed addresses, or changes in up to 5 insns.  */
266	changes_allocated = MAX_RECOG_OPERANDS * 5;
267      else
268	changes_allocated *= 2;
269
270      changes = XRESIZEVEC (change_t, changes, changes_allocated);
271    }
272
273  changes[num_changes].object = object;
274  changes[num_changes].loc = loc;
275  changes[num_changes].old = old;
276  changes[num_changes].unshare = unshare;
277
278  if (object && !MEM_P (object))
279    {
280      /* Set INSN_CODE to force rerecognition of insn.  Save old code in
281	 case invalid.  */
282      changes[num_changes].old_code = INSN_CODE (object);
283      INSN_CODE (object) = -1;
284    }
285
286  num_changes++;
287
288  /* If we are making a group of changes, return 1.  Otherwise, validate the
289     change group we made.  */
290
291  if (in_group)
292    return 1;
293  else
294    return apply_change_group ();
295}
296
297/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
298   UNSHARE to false.  */
299
300bool
301validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
302{
303  return validate_change_1 (object, loc, new_rtx, in_group, false);
304}
305
306/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
307   UNSHARE to true.  */
308
309bool
310validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
311{
312  return validate_change_1 (object, loc, new_rtx, in_group, true);
313}
314
315
316/* Keep X canonicalized if some changes have made it non-canonical; only
317   modifies the operands of X, not (for example) its code.  Simplifications
318   are not the job of this routine.
319
320   Return true if anything was changed.  */
321bool
322canonicalize_change_group (rtx insn, rtx x)
323{
324  if (COMMUTATIVE_P (x)
325      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
326    {
327      /* Oops, the caller has made X no longer canonical.
328	 Let's redo the changes in the correct order.  */
329      rtx tem = XEXP (x, 0);
330      validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
331      validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
332      return true;
333    }
334  else
335    return false;
336}
337
338
339/* This subroutine of apply_change_group verifies whether the changes to INSN
340   were valid; i.e. whether INSN can still be recognized.
341
342   If IN_GROUP is true clobbers which have to be added in order to
343   match the instructions will be added to the current change group.
344   Otherwise the changes will take effect immediately.  */
345
346int
347insn_invalid_p (rtx_insn *insn, bool in_group)
348{
349  rtx pat = PATTERN (insn);
350  int num_clobbers = 0;
351  /* If we are before reload and the pattern is a SET, see if we can add
352     clobbers.  */
353  int icode = recog (pat, insn,
354		     (GET_CODE (pat) == SET
355		      && ! reload_completed
356                      && ! reload_in_progress)
357		     ? &num_clobbers : 0);
358  int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
359
360
361  /* If this is an asm and the operand aren't legal, then fail.  Likewise if
362     this is not an asm and the insn wasn't recognized.  */
363  if ((is_asm && ! check_asm_operands (PATTERN (insn)))
364      || (!is_asm && icode < 0))
365    return 1;
366
367  /* If we have to add CLOBBERs, fail if we have to add ones that reference
368     hard registers since our callers can't know if they are live or not.
369     Otherwise, add them.  */
370  if (num_clobbers > 0)
371    {
372      rtx newpat;
373
374      if (added_clobbers_hard_reg_p (icode))
375	return 1;
376
377      newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
378      XVECEXP (newpat, 0, 0) = pat;
379      add_clobbers (newpat, icode);
380      if (in_group)
381	validate_change (insn, &PATTERN (insn), newpat, 1);
382      else
383	PATTERN (insn) = pat = newpat;
384    }
385
386  /* After reload, verify that all constraints are satisfied.  */
387  if (reload_completed)
388    {
389      extract_insn (insn);
390
391      if (! constrain_operands (1, get_preferred_alternatives (insn)))
392	return 1;
393    }
394
395  INSN_CODE (insn) = icode;
396  return 0;
397}
398
399/* Return number of changes made and not validated yet.  */
400int
401num_changes_pending (void)
402{
403  return num_changes;
404}
405
406/* Tentatively apply the changes numbered NUM and up.
407   Return 1 if all changes are valid, zero otherwise.  */
408
409int
410verify_changes (int num)
411{
412  int i;
413  rtx last_validated = NULL_RTX;
414
415  /* The changes have been applied and all INSN_CODEs have been reset to force
416     rerecognition.
417
418     The changes are valid if we aren't given an object, or if we are
419     given a MEM and it still is a valid address, or if this is in insn
420     and it is recognized.  In the latter case, if reload has completed,
421     we also require that the operands meet the constraints for
422     the insn.  */
423
424  for (i = num; i < num_changes; i++)
425    {
426      rtx object = changes[i].object;
427
428      /* If there is no object to test or if it is the same as the one we
429         already tested, ignore it.  */
430      if (object == 0 || object == last_validated)
431	continue;
432
433      if (MEM_P (object))
434	{
435	  if (! memory_address_addr_space_p (GET_MODE (object),
436					     XEXP (object, 0),
437					     MEM_ADDR_SPACE (object)))
438	    break;
439	}
440      else if (/* changes[i].old might be zero, e.g. when putting a
441	       REG_FRAME_RELATED_EXPR into a previously empty list.  */
442	       changes[i].old
443	       && REG_P (changes[i].old)
444	       && asm_noperands (PATTERN (object)) > 0
445	       && REG_EXPR (changes[i].old) != NULL_TREE
446	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
447	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
448	{
449	  /* Don't allow changes of hard register operands to inline
450	     assemblies if they have been defined as register asm ("x").  */
451	  break;
452	}
453      else if (DEBUG_INSN_P (object))
454	continue;
455      else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
456	{
457	  rtx pat = PATTERN (object);
458
459	  /* Perhaps we couldn't recognize the insn because there were
460	     extra CLOBBERs at the end.  If so, try to re-recognize
461	     without the last CLOBBER (later iterations will cause each of
462	     them to be eliminated, in turn).  But don't do this if we
463	     have an ASM_OPERAND.  */
464	  if (GET_CODE (pat) == PARALLEL
465	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
466	      && asm_noperands (PATTERN (object)) < 0)
467	    {
468	      rtx newpat;
469
470	      if (XVECLEN (pat, 0) == 2)
471		newpat = XVECEXP (pat, 0, 0);
472	      else
473		{
474		  int j;
475
476		  newpat
477		    = gen_rtx_PARALLEL (VOIDmode,
478					rtvec_alloc (XVECLEN (pat, 0) - 1));
479		  for (j = 0; j < XVECLEN (newpat, 0); j++)
480		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
481		}
482
483	      /* Add a new change to this group to replace the pattern
484		 with this new pattern.  Then consider this change
485		 as having succeeded.  The change we added will
486		 cause the entire call to fail if things remain invalid.
487
488		 Note that this can lose if a later change than the one
489		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
490		 but this shouldn't occur.  */
491
492	      validate_change (object, &PATTERN (object), newpat, 1);
493	      continue;
494	    }
495	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
496		   || GET_CODE (pat) == VAR_LOCATION)
497	    /* If this insn is a CLOBBER or USE, it is always valid, but is
498	       never recognized.  */
499	    continue;
500	  else
501	    break;
502	}
503      last_validated = object;
504    }
505
506  return (i == num_changes);
507}
508
509/* A group of changes has previously been issued with validate_change
510   and verified with verify_changes.  Call df_insn_rescan for each of
511   the insn changed and clear num_changes.  */
512
513void
514confirm_change_group (void)
515{
516  int i;
517  rtx last_object = NULL;
518
519  for (i = 0; i < num_changes; i++)
520    {
521      rtx object = changes[i].object;
522
523      if (changes[i].unshare)
524	*changes[i].loc = copy_rtx (*changes[i].loc);
525
526      /* Avoid unnecessary rescanning when multiple changes to same instruction
527         are made.  */
528      if (object)
529	{
530	  if (object != last_object && last_object && INSN_P (last_object))
531	    df_insn_rescan (as_a <rtx_insn *> (last_object));
532	  last_object = object;
533	}
534    }
535
536  if (last_object && INSN_P (last_object))
537    df_insn_rescan (as_a <rtx_insn *> (last_object));
538  num_changes = 0;
539}
540
541/* Apply a group of changes previously issued with `validate_change'.
542   If all changes are valid, call confirm_change_group and return 1,
543   otherwise, call cancel_changes and return 0.  */
544
545int
546apply_change_group (void)
547{
548  if (verify_changes (0))
549    {
550      confirm_change_group ();
551      return 1;
552    }
553  else
554    {
555      cancel_changes (0);
556      return 0;
557    }
558}
559
560
561/* Return the number of changes so far in the current group.  */
562
563int
564num_validated_changes (void)
565{
566  return num_changes;
567}
568
569/* Retract the changes numbered NUM and up.  */
570
571void
572cancel_changes (int num)
573{
574  int i;
575
576  /* Back out all the changes.  Do this in the opposite order in which
577     they were made.  */
578  for (i = num_changes - 1; i >= num; i--)
579    {
580      *changes[i].loc = changes[i].old;
581      if (changes[i].object && !MEM_P (changes[i].object))
582	INSN_CODE (changes[i].object) = changes[i].old_code;
583    }
584  num_changes = num;
585}
586
587/* Reduce conditional compilation elsewhere.  */
588#ifndef HAVE_extv
589#define HAVE_extv	0
590#define CODE_FOR_extv	CODE_FOR_nothing
591#endif
592#ifndef HAVE_extzv
593#define HAVE_extzv	0
594#define CODE_FOR_extzv	CODE_FOR_nothing
595#endif
596
597/* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
598   rtx.  */
599
600static void
601simplify_while_replacing (rtx *loc, rtx to, rtx object,
602                          machine_mode op0_mode)
603{
604  rtx x = *loc;
605  enum rtx_code code = GET_CODE (x);
606  rtx new_rtx = NULL_RTX;
607
608  if (SWAPPABLE_OPERANDS_P (x)
609      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
610    {
611      validate_unshare_change (object, loc,
612			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
613					       : swap_condition (code),
614					       GET_MODE (x), XEXP (x, 1),
615					       XEXP (x, 0)), 1);
616      x = *loc;
617      code = GET_CODE (x);
618    }
619
620  /* Canonicalize arithmetics with all constant operands.  */
621  switch (GET_RTX_CLASS (code))
622    {
623    case RTX_UNARY:
624      if (CONSTANT_P (XEXP (x, 0)))
625	new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
626					    op0_mode);
627      break;
628    case RTX_COMM_ARITH:
629    case RTX_BIN_ARITH:
630      if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
631	new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
632					     XEXP (x, 1));
633      break;
634    case RTX_COMPARE:
635    case RTX_COMM_COMPARE:
636      if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
637	new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
638						 XEXP (x, 0), XEXP (x, 1));
639      break;
640    default:
641      break;
642    }
643  if (new_rtx)
644    {
645      validate_change (object, loc, new_rtx, 1);
646      return;
647    }
648
649  switch (code)
650    {
651    case PLUS:
652      /* If we have a PLUS whose second operand is now a CONST_INT, use
653         simplify_gen_binary to try to simplify it.
654         ??? We may want later to remove this, once simplification is
655         separated from this function.  */
656      if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
657	validate_change (object, loc,
658			 simplify_gen_binary
659			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
660      break;
661    case MINUS:
662      if (CONST_SCALAR_INT_P (XEXP (x, 1)))
663	validate_change (object, loc,
664			 simplify_gen_binary
665			 (PLUS, GET_MODE (x), XEXP (x, 0),
666			  simplify_gen_unary (NEG,
667					      GET_MODE (x), XEXP (x, 1),
668					      GET_MODE (x))), 1);
669      break;
670    case ZERO_EXTEND:
671    case SIGN_EXTEND:
672      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
673	{
674	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
675				    op0_mode);
676	  /* If any of the above failed, substitute in something that
677	     we know won't be recognized.  */
678	  if (!new_rtx)
679	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
680	  validate_change (object, loc, new_rtx, 1);
681	}
682      break;
683    case SUBREG:
684      /* All subregs possible to simplify should be simplified.  */
685      new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
686			     SUBREG_BYTE (x));
687
688      /* Subregs of VOIDmode operands are incorrect.  */
689      if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
690	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
691      if (new_rtx)
692	validate_change (object, loc, new_rtx, 1);
693      break;
694    case ZERO_EXTRACT:
695    case SIGN_EXTRACT:
696      /* If we are replacing a register with memory, try to change the memory
697         to be the mode required for memory in extract operations (this isn't
698         likely to be an insertion operation; if it was, nothing bad will
699         happen, we might just fail in some cases).  */
700
701      if (MEM_P (XEXP (x, 0))
702	  && CONST_INT_P (XEXP (x, 1))
703	  && CONST_INT_P (XEXP (x, 2))
704	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
705					MEM_ADDR_SPACE (XEXP (x, 0)))
706	  && !MEM_VOLATILE_P (XEXP (x, 0)))
707	{
708	  machine_mode wanted_mode = VOIDmode;
709	  machine_mode is_mode = GET_MODE (XEXP (x, 0));
710	  int pos = INTVAL (XEXP (x, 2));
711
712	  if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
713	    {
714	      wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
715	      if (wanted_mode == VOIDmode)
716		wanted_mode = word_mode;
717	    }
718	  else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
719	    {
720	      wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
721	      if (wanted_mode == VOIDmode)
722		wanted_mode = word_mode;
723	    }
724
725	  /* If we have a narrower mode, we can do something.  */
726	  if (wanted_mode != VOIDmode
727	      && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
728	    {
729	      int offset = pos / BITS_PER_UNIT;
730	      rtx newmem;
731
732	      /* If the bytes and bits are counted differently, we
733	         must adjust the offset.  */
734	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
735		offset =
736		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
737		   offset);
738
739	      gcc_assert (GET_MODE_PRECISION (wanted_mode)
740			  == GET_MODE_BITSIZE (wanted_mode));
741	      pos %= GET_MODE_BITSIZE (wanted_mode);
742
743	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
744
745	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
746	      validate_change (object, &XEXP (x, 0), newmem, 1);
747	    }
748	}
749
750      break;
751
752    default:
753      break;
754    }
755}
756
757/* Replace every occurrence of FROM in X with TO.  Mark each change with
758   validate_change passing OBJECT.  */
759
760static void
761validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
762                        bool simplify)
763{
764  int i, j;
765  const char *fmt;
766  rtx x = *loc;
767  enum rtx_code code;
768  machine_mode op0_mode = VOIDmode;
769  int prev_changes = num_changes;
770
771  if (!x)
772    return;
773
774  code = GET_CODE (x);
775  fmt = GET_RTX_FORMAT (code);
776  if (fmt[0] == 'e')
777    op0_mode = GET_MODE (XEXP (x, 0));
778
779  /* X matches FROM if it is the same rtx or they are both referring to the
780     same register in the same mode.  Avoid calling rtx_equal_p unless the
781     operands look similar.  */
782
783  if (x == from
784      || (REG_P (x) && REG_P (from)
785	  && GET_MODE (x) == GET_MODE (from)
786	  && REGNO (x) == REGNO (from))
787      || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
788	  && rtx_equal_p (x, from)))
789    {
790      validate_unshare_change (object, loc, to, 1);
791      return;
792    }
793
794  /* Call ourself recursively to perform the replacements.
795     We must not replace inside already replaced expression, otherwise we
796     get infinite recursion for replacements like (reg X)->(subreg (reg X))
797     so we must special case shared ASM_OPERANDS.  */
798
799  if (GET_CODE (x) == PARALLEL)
800    {
801      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
802	{
803	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
804	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
805	    {
806	      /* Verify that operands are really shared.  */
807	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
808			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
809							      (x, 0, j))));
810	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
811				      from, to, object, simplify);
812	    }
813	  else
814	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
815                                    simplify);
816	}
817    }
818  else
819    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
820      {
821	if (fmt[i] == 'e')
822	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
823	else if (fmt[i] == 'E')
824	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
825	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
826                                    simplify);
827      }
828
829  /* If we didn't substitute, there is nothing more to do.  */
830  if (num_changes == prev_changes)
831    return;
832
833  /* ??? The regmove is no more, so is this aberration still necessary?  */
834  /* Allow substituted expression to have different mode.  This is used by
835     regmove to change mode of pseudo register.  */
836  if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
837    op0_mode = GET_MODE (XEXP (x, 0));
838
839  /* Do changes needed to keep rtx consistent.  Don't do any other
840     simplifications, as it is not our job.  */
841  if (simplify)
842    simplify_while_replacing (loc, to, object, op0_mode);
843}
844
845/* Try replacing every occurrence of FROM in subexpression LOC of INSN
846   with TO.  After all changes have been made, validate by seeing
847   if INSN is still valid.  */
848
849int
850validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
851{
852  validate_replace_rtx_1 (loc, from, to, insn, true);
853  return apply_change_group ();
854}
855
856/* Try replacing every occurrence of FROM in INSN with TO.  After all
857   changes have been made, validate by seeing if INSN is still valid.  */
858
859int
860validate_replace_rtx (rtx from, rtx to, rtx insn)
861{
862  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
863  return apply_change_group ();
864}
865
866/* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
867   is a part of INSN.  After all changes have been made, validate by seeing if
868   INSN is still valid.
869   validate_replace_rtx (from, to, insn) is equivalent to
870   validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
871
872int
873validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
874{
875  validate_replace_rtx_1 (where, from, to, insn, true);
876  return apply_change_group ();
877}
878
879/* Same as above, but do not simplify rtx afterwards.  */
880int
881validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
882                                      rtx insn)
883{
884  validate_replace_rtx_1 (where, from, to, insn, false);
885  return apply_change_group ();
886
887}
888
889/* Try replacing every occurrence of FROM in INSN with TO.  This also
890   will replace in REG_EQUAL and REG_EQUIV notes.  */
891
892void
893validate_replace_rtx_group (rtx from, rtx to, rtx insn)
894{
895  rtx note;
896  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
897  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
898    if (REG_NOTE_KIND (note) == REG_EQUAL
899	|| REG_NOTE_KIND (note) == REG_EQUIV)
900      validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
901}
902
903/* Function called by note_uses to replace used subexpressions.  */
904struct validate_replace_src_data
905{
906  rtx from;			/* Old RTX */
907  rtx to;			/* New RTX */
908  rtx insn;			/* Insn in which substitution is occurring.  */
909};
910
911static void
912validate_replace_src_1 (rtx *x, void *data)
913{
914  struct validate_replace_src_data *d
915    = (struct validate_replace_src_data *) data;
916
917  validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
918}
919
920/* Try replacing every occurrence of FROM in INSN with TO, avoiding
921   SET_DESTs.  */
922
923void
924validate_replace_src_group (rtx from, rtx to, rtx insn)
925{
926  struct validate_replace_src_data d;
927
928  d.from = from;
929  d.to = to;
930  d.insn = insn;
931  note_uses (&PATTERN (insn), validate_replace_src_1, &d);
932}
933
934/* Try simplify INSN.
935   Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
936   pattern and return true if something was simplified.  */
937
938bool
939validate_simplify_insn (rtx insn)
940{
941  int i;
942  rtx pat = NULL;
943  rtx newpat = NULL;
944
945  pat = PATTERN (insn);
946
947  if (GET_CODE (pat) == SET)
948    {
949      newpat = simplify_rtx (SET_SRC (pat));
950      if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
951	validate_change (insn, &SET_SRC (pat), newpat, 1);
952      newpat = simplify_rtx (SET_DEST (pat));
953      if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
954	validate_change (insn, &SET_DEST (pat), newpat, 1);
955    }
956  else if (GET_CODE (pat) == PARALLEL)
957    for (i = 0; i < XVECLEN (pat, 0); i++)
958      {
959	rtx s = XVECEXP (pat, 0, i);
960
961	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
962	  {
963	    newpat = simplify_rtx (SET_SRC (s));
964	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
965	      validate_change (insn, &SET_SRC (s), newpat, 1);
966	    newpat = simplify_rtx (SET_DEST (s));
967	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
968	      validate_change (insn, &SET_DEST (s), newpat, 1);
969	  }
970      }
971  return ((num_changes_pending () > 0) && (apply_change_group () > 0));
972}
973
974#ifdef HAVE_cc0
975/* Return 1 if the insn using CC0 set by INSN does not contain
976   any ordered tests applied to the condition codes.
977   EQ and NE tests do not count.  */
978
979int
980next_insn_tests_no_inequality (rtx insn)
981{
982  rtx next = next_cc0_user (insn);
983
984  /* If there is no next insn, we have to take the conservative choice.  */
985  if (next == 0)
986    return 0;
987
988  return (INSN_P (next)
989	  && ! inequality_comparisons_p (PATTERN (next)));
990}
991#endif
992
993/* Return 1 if OP is a valid general operand for machine mode MODE.
994   This is either a register reference, a memory reference,
995   or a constant.  In the case of a memory reference, the address
996   is checked for general validity for the target machine.
997
998   Register and memory references must have mode MODE in order to be valid,
999   but some constants have no machine mode and are valid for any mode.
1000
1001   If MODE is VOIDmode, OP is checked for validity for whatever mode
1002   it has.
1003
1004   The main use of this function is as a predicate in match_operand
1005   expressions in the machine description.  */
1006
1007int
1008general_operand (rtx op, machine_mode mode)
1009{
1010  enum rtx_code code = GET_CODE (op);
1011
1012  if (mode == VOIDmode)
1013    mode = GET_MODE (op);
1014
1015  /* Don't accept CONST_INT or anything similar
1016     if the caller wants something floating.  */
1017  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1018      && GET_MODE_CLASS (mode) != MODE_INT
1019      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1020    return 0;
1021
1022  if (CONST_INT_P (op)
1023      && mode != VOIDmode
1024      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1025    return 0;
1026
1027  if (CONSTANT_P (op))
1028    return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1029	     || mode == VOIDmode)
1030	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1031	    && targetm.legitimate_constant_p (mode == VOIDmode
1032					      ? GET_MODE (op)
1033					      : mode, op));
1034
1035  /* Except for certain constants with VOIDmode, already checked for,
1036     OP's mode must match MODE if MODE specifies a mode.  */
1037
1038  if (GET_MODE (op) != mode)
1039    return 0;
1040
1041  if (code == SUBREG)
1042    {
1043      rtx sub = SUBREG_REG (op);
1044
1045#ifdef INSN_SCHEDULING
1046      /* On machines that have insn scheduling, we want all memory
1047	 reference to be explicit, so outlaw paradoxical SUBREGs.
1048	 However, we must allow them after reload so that they can
1049	 get cleaned up by cleanup_subreg_operands.  */
1050      if (!reload_completed && MEM_P (sub)
1051	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1052	return 0;
1053#endif
1054      /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1055         may result in incorrect reference.  We should simplify all valid
1056         subregs of MEM anyway.  But allow this after reload because we
1057	 might be called from cleanup_subreg_operands.
1058
1059	 ??? This is a kludge.  */
1060      if (!reload_completed && SUBREG_BYTE (op) != 0
1061	  && MEM_P (sub))
1062	return 0;
1063
1064#ifdef CANNOT_CHANGE_MODE_CLASS
1065      if (REG_P (sub)
1066	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1067	  && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1068	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1069	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1070	  /* LRA can generate some invalid SUBREGS just for matched
1071	     operand reload presentation.  LRA needs to treat them as
1072	     valid.  */
1073	  && ! LRA_SUBREG_P (op))
1074	return 0;
1075#endif
1076
1077      /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1078	 create such rtl, and we must reject it.  */
1079      if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1080	  /* LRA can use subreg to store a floating point value in an
1081	     integer mode.  Although the floating point and the
1082	     integer modes need the same number of hard registers, the
1083	     size of floating point mode can be less than the integer
1084	     mode.  */
1085	  && ! lra_in_progress
1086	  && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1087	return 0;
1088
1089      op = sub;
1090      code = GET_CODE (op);
1091    }
1092
1093  if (code == REG)
1094    return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1095	    || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1096
1097  if (code == MEM)
1098    {
1099      rtx y = XEXP (op, 0);
1100
1101      if (! volatile_ok && MEM_VOLATILE_P (op))
1102	return 0;
1103
1104      /* Use the mem's mode, since it will be reloaded thus.  LRA can
1105	 generate move insn with invalid addresses which is made valid
1106	 and efficiently calculated by LRA through further numerous
1107	 transformations.  */
1108      if (lra_in_progress
1109	  || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1110	return 1;
1111    }
1112
1113  return 0;
1114}
1115
1116/* Return 1 if OP is a valid memory address for a memory reference
1117   of mode MODE.
1118
1119   The main use of this function is as a predicate in match_operand
1120   expressions in the machine description.  */
1121
1122int
1123address_operand (rtx op, machine_mode mode)
1124{
1125  return memory_address_p (mode, op);
1126}
1127
1128/* Return 1 if OP is a register reference of mode MODE.
1129   If MODE is VOIDmode, accept a register in any mode.
1130
1131   The main use of this function is as a predicate in match_operand
1132   expressions in the machine description.  */
1133
1134int
1135register_operand (rtx op, machine_mode mode)
1136{
1137  if (GET_CODE (op) == SUBREG)
1138    {
1139      rtx sub = SUBREG_REG (op);
1140
1141      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1142	 because it is guaranteed to be reloaded into one.
1143	 Just make sure the MEM is valid in itself.
1144	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1145	 but currently it does result from (SUBREG (REG)...) where the
1146	 reg went on the stack.)  */
1147      if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1148	return 0;
1149    }
1150  else if (!REG_P (op))
1151    return 0;
1152  return general_operand (op, mode);
1153}
1154
1155/* Return 1 for a register in Pmode; ignore the tested mode.  */
1156
1157int
1158pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1159{
1160  return register_operand (op, Pmode);
1161}
1162
1163/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1164   or a hard register.  */
1165
1166int
1167scratch_operand (rtx op, machine_mode mode)
1168{
1169  if (GET_MODE (op) != mode && mode != VOIDmode)
1170    return 0;
1171
1172  return (GET_CODE (op) == SCRATCH
1173	  || (REG_P (op)
1174	      && (lra_in_progress
1175		  || (REGNO (op) < FIRST_PSEUDO_REGISTER
1176		      && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1177}
1178
1179/* Return 1 if OP is a valid immediate operand for mode MODE.
1180
1181   The main use of this function is as a predicate in match_operand
1182   expressions in the machine description.  */
1183
1184int
1185immediate_operand (rtx op, machine_mode mode)
1186{
1187  /* Don't accept CONST_INT or anything similar
1188     if the caller wants something floating.  */
1189  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1190      && GET_MODE_CLASS (mode) != MODE_INT
1191      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1192    return 0;
1193
1194  if (CONST_INT_P (op)
1195      && mode != VOIDmode
1196      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1197    return 0;
1198
1199  return (CONSTANT_P (op)
1200	  && (GET_MODE (op) == mode || mode == VOIDmode
1201	      || GET_MODE (op) == VOIDmode)
1202	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1203	  && targetm.legitimate_constant_p (mode == VOIDmode
1204					    ? GET_MODE (op)
1205					    : mode, op));
1206}
1207
1208/* Returns 1 if OP is an operand that is a CONST_INT of mode MODE.  */
1209
1210int
1211const_int_operand (rtx op, machine_mode mode)
1212{
1213  if (!CONST_INT_P (op))
1214    return 0;
1215
1216  if (mode != VOIDmode
1217      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1218    return 0;
1219
1220  return 1;
1221}
1222
1223#if TARGET_SUPPORTS_WIDE_INT
1224/* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1225   of mode MODE.  */
1226int
1227const_scalar_int_operand (rtx op, machine_mode mode)
1228{
1229  if (!CONST_SCALAR_INT_P (op))
1230    return 0;
1231
1232  if (CONST_INT_P (op))
1233    return const_int_operand (op, mode);
1234
1235  if (mode != VOIDmode)
1236    {
1237      int prec = GET_MODE_PRECISION (mode);
1238      int bitsize = GET_MODE_BITSIZE (mode);
1239
1240      if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1241	return 0;
1242
1243      if (prec == bitsize)
1244	return 1;
1245      else
1246	{
1247	  /* Multiword partial int.  */
1248	  HOST_WIDE_INT x
1249	    = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1250	  return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1251	}
1252    }
1253  return 1;
1254}
1255
1256/* Returns 1 if OP is an operand that is a constant integer or constant
1257   floating-point number of MODE.  */
1258
1259int
1260const_double_operand (rtx op, machine_mode mode)
1261{
1262  return (GET_CODE (op) == CONST_DOUBLE)
1263	  && (GET_MODE (op) == mode || mode == VOIDmode);
1264}
1265#else
1266/* Returns 1 if OP is an operand that is a constant integer or constant
1267   floating-point number of MODE.  */
1268
1269int
1270const_double_operand (rtx op, machine_mode mode)
1271{
1272  /* Don't accept CONST_INT or anything similar
1273     if the caller wants something floating.  */
1274  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1275      && GET_MODE_CLASS (mode) != MODE_INT
1276      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1277    return 0;
1278
1279  return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1280	  && (mode == VOIDmode || GET_MODE (op) == mode
1281	      || GET_MODE (op) == VOIDmode));
1282}
1283#endif
1284/* Return 1 if OP is a general operand that is not an immediate
1285   operand of mode MODE.  */
1286
1287int
1288nonimmediate_operand (rtx op, machine_mode mode)
1289{
1290  return (general_operand (op, mode) && ! CONSTANT_P (op));
1291}
1292
1293/* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1294
1295int
1296nonmemory_operand (rtx op, machine_mode mode)
1297{
1298  if (CONSTANT_P (op))
1299    return immediate_operand (op, mode);
1300  return register_operand (op, mode);
1301}
1302
1303/* Return 1 if OP is a valid operand that stands for pushing a
1304   value of mode MODE onto the stack.
1305
1306   The main use of this function is as a predicate in match_operand
1307   expressions in the machine description.  */
1308
1309int
1310push_operand (rtx op, machine_mode mode)
1311{
1312  unsigned int rounded_size = GET_MODE_SIZE (mode);
1313
1314#ifdef PUSH_ROUNDING
1315  rounded_size = PUSH_ROUNDING (rounded_size);
1316#endif
1317
1318  if (!MEM_P (op))
1319    return 0;
1320
1321  if (mode != VOIDmode && GET_MODE (op) != mode)
1322    return 0;
1323
1324  op = XEXP (op, 0);
1325
1326  if (rounded_size == GET_MODE_SIZE (mode))
1327    {
1328      if (GET_CODE (op) != STACK_PUSH_CODE)
1329	return 0;
1330    }
1331  else
1332    {
1333      if (GET_CODE (op) != PRE_MODIFY
1334	  || GET_CODE (XEXP (op, 1)) != PLUS
1335	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1336	  || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1337#ifdef STACK_GROWS_DOWNWARD
1338	  || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1339#else
1340	  || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1341#endif
1342	  )
1343	return 0;
1344    }
1345
1346  return XEXP (op, 0) == stack_pointer_rtx;
1347}
1348
1349/* Return 1 if OP is a valid operand that stands for popping a
1350   value of mode MODE off the stack.
1351
1352   The main use of this function is as a predicate in match_operand
1353   expressions in the machine description.  */
1354
1355int
1356pop_operand (rtx op, machine_mode mode)
1357{
1358  if (!MEM_P (op))
1359    return 0;
1360
1361  if (mode != VOIDmode && GET_MODE (op) != mode)
1362    return 0;
1363
1364  op = XEXP (op, 0);
1365
1366  if (GET_CODE (op) != STACK_POP_CODE)
1367    return 0;
1368
1369  return XEXP (op, 0) == stack_pointer_rtx;
1370}
1371
1372/* Return 1 if ADDR is a valid memory address
1373   for mode MODE in address space AS.  */
1374
1375int
1376memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1377			     rtx addr, addr_space_t as)
1378{
1379#ifdef GO_IF_LEGITIMATE_ADDRESS
1380  gcc_assert (ADDR_SPACE_GENERIC_P (as));
1381  GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1382  return 0;
1383
1384 win:
1385  return 1;
1386#else
1387  return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1388#endif
1389}
1390
1391/* Return 1 if OP is a valid memory reference with mode MODE,
1392   including a valid address.
1393
1394   The main use of this function is as a predicate in match_operand
1395   expressions in the machine description.  */
1396
1397int
1398memory_operand (rtx op, machine_mode mode)
1399{
1400  rtx inner;
1401
1402  if (! reload_completed)
1403    /* Note that no SUBREG is a memory operand before end of reload pass,
1404       because (SUBREG (MEM...)) forces reloading into a register.  */
1405    return MEM_P (op) && general_operand (op, mode);
1406
1407  if (mode != VOIDmode && GET_MODE (op) != mode)
1408    return 0;
1409
1410  inner = op;
1411  if (GET_CODE (inner) == SUBREG)
1412    inner = SUBREG_REG (inner);
1413
1414  return (MEM_P (inner) && general_operand (op, mode));
1415}
1416
1417/* Return 1 if OP is a valid indirect memory reference with mode MODE;
1418   that is, a memory reference whose address is a general_operand.  */
1419
1420int
1421indirect_operand (rtx op, machine_mode mode)
1422{
1423  /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1424  if (! reload_completed
1425      && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1426    {
1427      int offset = SUBREG_BYTE (op);
1428      rtx inner = SUBREG_REG (op);
1429
1430      if (mode != VOIDmode && GET_MODE (op) != mode)
1431	return 0;
1432
1433      /* The only way that we can have a general_operand as the resulting
1434	 address is if OFFSET is zero and the address already is an operand
1435	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1436	 operand.  */
1437
1438      return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1439	      || (GET_CODE (XEXP (inner, 0)) == PLUS
1440		  && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1441		  && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1442		  && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1443    }
1444
1445  return (MEM_P (op)
1446	  && memory_operand (op, mode)
1447	  && general_operand (XEXP (op, 0), Pmode));
1448}
1449
1450/* Return 1 if this is an ordered comparison operator (not including
1451   ORDERED and UNORDERED).  */
1452
1453int
1454ordered_comparison_operator (rtx op, machine_mode mode)
1455{
1456  if (mode != VOIDmode && GET_MODE (op) != mode)
1457    return false;
1458  switch (GET_CODE (op))
1459    {
1460    case EQ:
1461    case NE:
1462    case LT:
1463    case LTU:
1464    case LE:
1465    case LEU:
1466    case GT:
1467    case GTU:
1468    case GE:
1469    case GEU:
1470      return true;
1471    default:
1472      return false;
1473    }
1474}
1475
1476/* Return 1 if this is a comparison operator.  This allows the use of
1477   MATCH_OPERATOR to recognize all the branch insns.  */
1478
1479int
1480comparison_operator (rtx op, machine_mode mode)
1481{
1482  return ((mode == VOIDmode || GET_MODE (op) == mode)
1483	  && COMPARISON_P (op));
1484}
1485
1486/* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
1487
1488rtx
1489extract_asm_operands (rtx body)
1490{
1491  rtx tmp;
1492  switch (GET_CODE (body))
1493    {
1494    case ASM_OPERANDS:
1495      return body;
1496
1497    case SET:
1498      /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1499      tmp = SET_SRC (body);
1500      if (GET_CODE (tmp) == ASM_OPERANDS)
1501	return tmp;
1502      break;
1503
1504    case PARALLEL:
1505      tmp = XVECEXP (body, 0, 0);
1506      if (GET_CODE (tmp) == ASM_OPERANDS)
1507	return tmp;
1508      if (GET_CODE (tmp) == SET)
1509	{
1510	  tmp = SET_SRC (tmp);
1511	  if (GET_CODE (tmp) == ASM_OPERANDS)
1512	    return tmp;
1513	}
1514      break;
1515
1516    default:
1517      break;
1518    }
1519  return NULL;
1520}
1521
1522/* If BODY is an insn body that uses ASM_OPERANDS,
1523   return the number of operands (both input and output) in the insn.
1524   Otherwise return -1.  */
1525
1526int
1527asm_noperands (const_rtx body)
1528{
1529  rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1530  int n_sets = 0;
1531
1532  if (asm_op == NULL)
1533    return -1;
1534
1535  if (GET_CODE (body) == SET)
1536    n_sets = 1;
1537  else if (GET_CODE (body) == PARALLEL)
1538    {
1539      int i;
1540      if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1541	{
1542	  /* Multiple output operands, or 1 output plus some clobbers:
1543	     body is
1544	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1545	  /* Count backwards through CLOBBERs to determine number of SETs.  */
1546	  for (i = XVECLEN (body, 0); i > 0; i--)
1547	    {
1548	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1549		break;
1550	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1551		return -1;
1552	    }
1553
1554	  /* N_SETS is now number of output operands.  */
1555	  n_sets = i;
1556
1557	  /* Verify that all the SETs we have
1558	     came from a single original asm_operands insn
1559	     (so that invalid combinations are blocked).  */
1560	  for (i = 0; i < n_sets; i++)
1561	    {
1562	      rtx elt = XVECEXP (body, 0, i);
1563	      if (GET_CODE (elt) != SET)
1564		return -1;
1565	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1566		return -1;
1567	      /* If these ASM_OPERANDS rtx's came from different original insns
1568	         then they aren't allowed together.  */
1569	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1570		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1571		return -1;
1572	    }
1573	}
1574      else
1575	{
1576	  /* 0 outputs, but some clobbers:
1577	     body is [(asm_operands ...) (clobber (reg ...))...].  */
1578	  /* Make sure all the other parallel things really are clobbers.  */
1579	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1580	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1581	      return -1;
1582	}
1583    }
1584
1585  return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1586	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1587}
1588
1589/* Assuming BODY is an insn body that uses ASM_OPERANDS,
1590   copy its operands (both input and output) into the vector OPERANDS,
1591   the locations of the operands within the insn into the vector OPERAND_LOCS,
1592   and the constraints for the operands into CONSTRAINTS.
1593   Write the modes of the operands into MODES.
1594   Return the assembler-template.
1595
1596   If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1597   we don't store that info.  */
1598
1599const char *
1600decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1601		     const char **constraints, machine_mode *modes,
1602		     location_t *loc)
1603{
1604  int nbase = 0, n, i;
1605  rtx asmop;
1606
1607  switch (GET_CODE (body))
1608    {
1609    case ASM_OPERANDS:
1610      /* Zero output asm: BODY is (asm_operands ...).  */
1611      asmop = body;
1612      break;
1613
1614    case SET:
1615      /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
1616      asmop = SET_SRC (body);
1617
1618      /* The output is in the SET.
1619	 Its constraint is in the ASM_OPERANDS itself.  */
1620      if (operands)
1621	operands[0] = SET_DEST (body);
1622      if (operand_locs)
1623	operand_locs[0] = &SET_DEST (body);
1624      if (constraints)
1625	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1626      if (modes)
1627	modes[0] = GET_MODE (SET_DEST (body));
1628      nbase = 1;
1629      break;
1630
1631    case PARALLEL:
1632      {
1633	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1634
1635	asmop = XVECEXP (body, 0, 0);
1636	if (GET_CODE (asmop) == SET)
1637	  {
1638	    asmop = SET_SRC (asmop);
1639
1640	    /* At least one output, plus some CLOBBERs.  The outputs are in
1641	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
1642	    for (i = 0; i < nparallel; i++)
1643	      {
1644		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1645		  break;		/* Past last SET */
1646		if (operands)
1647		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
1648		if (operand_locs)
1649		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1650		if (constraints)
1651		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1652		if (modes)
1653		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1654	      }
1655	    nbase = i;
1656	  }
1657	break;
1658      }
1659
1660    default:
1661      gcc_unreachable ();
1662    }
1663
1664  n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1665  for (i = 0; i < n; i++)
1666    {
1667      if (operand_locs)
1668	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1669      if (operands)
1670	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1671      if (constraints)
1672	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1673      if (modes)
1674	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1675    }
1676  nbase += n;
1677
1678  n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1679  for (i = 0; i < n; i++)
1680    {
1681      if (operand_locs)
1682	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1683      if (operands)
1684	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1685      if (constraints)
1686	constraints[nbase + i] = "";
1687      if (modes)
1688	modes[nbase + i] = Pmode;
1689    }
1690
1691  if (loc)
1692    *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1693
1694  return ASM_OPERANDS_TEMPLATE (asmop);
1695}
1696
1697/* Parse inline assembly string STRING and determine which operands are
1698   referenced by % markers.  For the first NOPERANDS operands, set USED[I]
1699   to true if operand I is referenced.
1700
1701   This is intended to distinguish barrier-like asms such as:
1702
1703      asm ("" : "=m" (...));
1704
1705   from real references such as:
1706
1707      asm ("sw\t$0, %0" : "=m" (...));  */
1708
1709void
1710get_referenced_operands (const char *string, bool *used,
1711			 unsigned int noperands)
1712{
1713  memset (used, 0, sizeof (bool) * noperands);
1714  const char *p = string;
1715  while (*p)
1716    switch (*p)
1717      {
1718      case '%':
1719	p += 1;
1720	/* A letter followed by a digit indicates an operand number.  */
1721	if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1722	  p += 1;
1723	if (ISDIGIT (*p))
1724	  {
1725	    char *endptr;
1726	    unsigned long opnum = strtoul (p, &endptr, 10);
1727	    if (endptr != p && opnum < noperands)
1728	      used[opnum] = true;
1729	    p = endptr;
1730	  }
1731	else
1732	  p += 1;
1733	break;
1734
1735      default:
1736	p++;
1737	break;
1738      }
1739}
1740
1741/* Check if an asm_operand matches its constraints.
1742   Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1743
1744int
1745asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1746{
1747  int result = 0;
1748#ifdef AUTO_INC_DEC
1749  bool incdec_ok = false;
1750#endif
1751
1752  /* Use constrain_operands after reload.  */
1753  gcc_assert (!reload_completed);
1754
1755  /* Empty constraint string is the same as "X,...,X", i.e. X for as
1756     many alternatives as required to match the other operands.  */
1757  if (*constraint == '\0')
1758    result = 1;
1759
1760  while (*constraint)
1761    {
1762      enum constraint_num cn;
1763      char c = *constraint;
1764      int len;
1765      switch (c)
1766	{
1767	case ',':
1768	  constraint++;
1769	  continue;
1770
1771	case '0': case '1': case '2': case '3': case '4':
1772	case '5': case '6': case '7': case '8': case '9':
1773	  /* If caller provided constraints pointer, look up
1774	     the matching constraint.  Otherwise, our caller should have
1775	     given us the proper matching constraint, but we can't
1776	     actually fail the check if they didn't.  Indicate that
1777	     results are inconclusive.  */
1778	  if (constraints)
1779	    {
1780	      char *end;
1781	      unsigned long match;
1782
1783	      match = strtoul (constraint, &end, 10);
1784	      if (!result)
1785		result = asm_operand_ok (op, constraints[match], NULL);
1786	      constraint = (const char *) end;
1787	    }
1788	  else
1789	    {
1790	      do
1791		constraint++;
1792	      while (ISDIGIT (*constraint));
1793	      if (! result)
1794		result = -1;
1795	    }
1796	  continue;
1797
1798	  /* The rest of the compiler assumes that reloading the address
1799	     of a MEM into a register will make it fit an 'o' constraint.
1800	     That is, if it sees a MEM operand for an 'o' constraint,
1801	     it assumes that (mem (base-reg)) will fit.
1802
1803	     That assumption fails on targets that don't have offsettable
1804	     addresses at all.  We therefore need to treat 'o' asm
1805	     constraints as a special case and only accept operands that
1806	     are already offsettable, thus proving that at least one
1807	     offsettable address exists.  */
1808	case 'o': /* offsettable */
1809	  if (offsettable_nonstrict_memref_p (op))
1810	    result = 1;
1811	  break;
1812
1813	case 'g':
1814	  if (general_operand (op, VOIDmode))
1815	    result = 1;
1816	  break;
1817
1818#ifdef AUTO_INC_DEC
1819	case '<':
1820	case '>':
1821	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1822	     to exist, excepting those that expand_call created.  Further,
1823	     on some machines which do not have generalized auto inc/dec,
1824	     an inc/dec is not a memory_operand.
1825
1826	     Match any memory and hope things are resolved after reload.  */
1827	  incdec_ok = true;
1828#endif
1829	default:
1830	  cn = lookup_constraint (constraint);
1831	  switch (get_constraint_type (cn))
1832	    {
1833	    case CT_REGISTER:
1834	      if (!result
1835		  && reg_class_for_constraint (cn) != NO_REGS
1836		  && GET_MODE (op) != BLKmode
1837		  && register_operand (op, VOIDmode))
1838		result = 1;
1839	      break;
1840
1841	    case CT_CONST_INT:
1842	      if (!result
1843		  && CONST_INT_P (op)
1844		  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1845		result = 1;
1846	      break;
1847
1848	    case CT_MEMORY:
1849	      /* Every memory operand can be reloaded to fit.  */
1850	      result = result || memory_operand (op, VOIDmode);
1851	      break;
1852
1853	    case CT_ADDRESS:
1854	      /* Every address operand can be reloaded to fit.  */
1855	      result = result || address_operand (op, VOIDmode);
1856	      break;
1857
1858	    case CT_FIXED_FORM:
1859	      result = result || constraint_satisfied_p (op, cn);
1860	      break;
1861	    }
1862	  break;
1863	}
1864      len = CONSTRAINT_LEN (c, constraint);
1865      do
1866	constraint++;
1867      while (--len && *constraint);
1868      if (len)
1869	return 0;
1870    }
1871
1872#ifdef AUTO_INC_DEC
1873  /* For operands without < or > constraints reject side-effects.  */
1874  if (!incdec_ok && result && MEM_P (op))
1875    switch (GET_CODE (XEXP (op, 0)))
1876      {
1877      case PRE_INC:
1878      case POST_INC:
1879      case PRE_DEC:
1880      case POST_DEC:
1881      case PRE_MODIFY:
1882      case POST_MODIFY:
1883	return 0;
1884      default:
1885	break;
1886      }
1887#endif
1888
1889  return result;
1890}
1891
1892/* Given an rtx *P, if it is a sum containing an integer constant term,
1893   return the location (type rtx *) of the pointer to that constant term.
1894   Otherwise, return a null pointer.  */
1895
1896rtx *
1897find_constant_term_loc (rtx *p)
1898{
1899  rtx *tem;
1900  enum rtx_code code = GET_CODE (*p);
1901
1902  /* If *P IS such a constant term, P is its location.  */
1903
1904  if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1905      || code == CONST)
1906    return p;
1907
1908  /* Otherwise, if not a sum, it has no constant term.  */
1909
1910  if (GET_CODE (*p) != PLUS)
1911    return 0;
1912
1913  /* If one of the summands is constant, return its location.  */
1914
1915  if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1916      && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1917    return p;
1918
1919  /* Otherwise, check each summand for containing a constant term.  */
1920
1921  if (XEXP (*p, 0) != 0)
1922    {
1923      tem = find_constant_term_loc (&XEXP (*p, 0));
1924      if (tem != 0)
1925	return tem;
1926    }
1927
1928  if (XEXP (*p, 1) != 0)
1929    {
1930      tem = find_constant_term_loc (&XEXP (*p, 1));
1931      if (tem != 0)
1932	return tem;
1933    }
1934
1935  return 0;
1936}
1937
1938/* Return 1 if OP is a memory reference
1939   whose address contains no side effects
1940   and remains valid after the addition
1941   of a positive integer less than the
1942   size of the object being referenced.
1943
1944   We assume that the original address is valid and do not check it.
1945
1946   This uses strict_memory_address_p as a subroutine, so
1947   don't use it before reload.  */
1948
1949int
1950offsettable_memref_p (rtx op)
1951{
1952  return ((MEM_P (op))
1953	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1954					       MEM_ADDR_SPACE (op)));
1955}
1956
1957/* Similar, but don't require a strictly valid mem ref:
1958   consider pseudo-regs valid as index or base regs.  */
1959
1960int
1961offsettable_nonstrict_memref_p (rtx op)
1962{
1963  return ((MEM_P (op))
1964	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1965					       MEM_ADDR_SPACE (op)));
1966}
1967
1968/* Return 1 if Y is a memory address which contains no side effects
1969   and would remain valid for address space AS after the addition of
1970   a positive integer less than the size of that mode.
1971
1972   We assume that the original address is valid and do not check it.
1973   We do check that it is valid for narrower modes.
1974
1975   If STRICTP is nonzero, we require a strictly valid address,
1976   for the sake of use in reload.c.  */
1977
1978int
1979offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1980				  addr_space_t as)
1981{
1982  enum rtx_code ycode = GET_CODE (y);
1983  rtx z;
1984  rtx y1 = y;
1985  rtx *y2;
1986  int (*addressp) (machine_mode, rtx, addr_space_t) =
1987    (strictp ? strict_memory_address_addr_space_p
1988	     : memory_address_addr_space_p);
1989  unsigned int mode_sz = GET_MODE_SIZE (mode);
1990
1991  if (CONSTANT_ADDRESS_P (y))
1992    return 1;
1993
1994  /* Adjusting an offsettable address involves changing to a narrower mode.
1995     Make sure that's OK.  */
1996
1997  if (mode_dependent_address_p (y, as))
1998    return 0;
1999
2000  machine_mode address_mode = GET_MODE (y);
2001  if (address_mode == VOIDmode)
2002    address_mode = targetm.addr_space.address_mode (as);
2003#ifdef POINTERS_EXTEND_UNSIGNED
2004  machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2005#endif
2006
2007  /* ??? How much offset does an offsettable BLKmode reference need?
2008     Clearly that depends on the situation in which it's being used.
2009     However, the current situation in which we test 0xffffffff is
2010     less than ideal.  Caveat user.  */
2011  if (mode_sz == 0)
2012    mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2013
2014  /* If the expression contains a constant term,
2015     see if it remains valid when max possible offset is added.  */
2016
2017  if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2018    {
2019      int good;
2020
2021      y1 = *y2;
2022      *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2023      /* Use QImode because an odd displacement may be automatically invalid
2024	 for any wider mode.  But it should be valid for a single byte.  */
2025      good = (*addressp) (QImode, y, as);
2026
2027      /* In any case, restore old contents of memory.  */
2028      *y2 = y1;
2029      return good;
2030    }
2031
2032  if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2033    return 0;
2034
2035  /* The offset added here is chosen as the maximum offset that
2036     any instruction could need to add when operating on something
2037     of the specified mode.  We assume that if Y and Y+c are
2038     valid addresses then so is Y+d for all 0<d<c.  adjust_address will
2039     go inside a LO_SUM here, so we do so as well.  */
2040  if (GET_CODE (y) == LO_SUM
2041      && mode != BLKmode
2042      && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2043    z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2044			plus_constant (address_mode, XEXP (y, 1),
2045				       mode_sz - 1));
2046#ifdef POINTERS_EXTEND_UNSIGNED
2047  /* Likewise for a ZERO_EXTEND from pointer_mode.  */
2048  else if (POINTERS_EXTEND_UNSIGNED > 0
2049	   && GET_CODE (y) == ZERO_EXTEND
2050	   && GET_MODE (XEXP (y, 0)) == pointer_mode)
2051    z = gen_rtx_ZERO_EXTEND (address_mode,
2052			     plus_constant (pointer_mode, XEXP (y, 0),
2053					    mode_sz - 1));
2054#endif
2055  else
2056    z = plus_constant (address_mode, y, mode_sz - 1);
2057
2058  /* Use QImode because an odd displacement may be automatically invalid
2059     for any wider mode.  But it should be valid for a single byte.  */
2060  return (*addressp) (QImode, z, as);
2061}
2062
2063/* Return 1 if ADDR is an address-expression whose effect depends
2064   on the mode of the memory reference it is used in.
2065
2066   ADDRSPACE is the address space associated with the address.
2067
2068   Autoincrement addressing is a typical example of mode-dependence
2069   because the amount of the increment depends on the mode.  */
2070
2071bool
2072mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2073{
2074  /* Auto-increment addressing with anything other than post_modify
2075     or pre_modify always introduces a mode dependency.  Catch such
2076     cases now instead of deferring to the target.  */
2077  if (GET_CODE (addr) == PRE_INC
2078      || GET_CODE (addr) == POST_INC
2079      || GET_CODE (addr) == PRE_DEC
2080      || GET_CODE (addr) == POST_DEC)
2081    return true;
2082
2083  return targetm.mode_dependent_address_p (addr, addrspace);
2084}
2085
2086/* Return true if boolean attribute ATTR is supported.  */
2087
2088static bool
2089have_bool_attr (bool_attr attr)
2090{
2091  switch (attr)
2092    {
2093    case BA_ENABLED:
2094      return HAVE_ATTR_enabled;
2095    case BA_PREFERRED_FOR_SIZE:
2096      return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2097    case BA_PREFERRED_FOR_SPEED:
2098      return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2099    }
2100  gcc_unreachable ();
2101}
2102
2103/* Return the value of ATTR for instruction INSN.  */
2104
2105static bool
2106get_bool_attr (rtx_insn *insn, bool_attr attr)
2107{
2108  switch (attr)
2109    {
2110    case BA_ENABLED:
2111      return get_attr_enabled (insn);
2112    case BA_PREFERRED_FOR_SIZE:
2113      return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2114    case BA_PREFERRED_FOR_SPEED:
2115      return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2116    }
2117  gcc_unreachable ();
2118}
2119
2120/* Like get_bool_attr_mask, but don't use the cache.  */
2121
2122static alternative_mask
2123get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2124{
2125  /* Temporarily install enough information for get_attr_<foo> to assume
2126     that the insn operands are already cached.  As above, the attribute
2127     mustn't depend on the values of operands, so we don't provide their
2128     real values here.  */
2129  rtx old_insn = recog_data.insn;
2130  int old_alternative = which_alternative;
2131
2132  recog_data.insn = insn;
2133  alternative_mask mask = ALL_ALTERNATIVES;
2134  int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2135  for (int i = 0; i < n_alternatives; i++)
2136    {
2137      which_alternative = i;
2138      if (!get_bool_attr (insn, attr))
2139	mask &= ~ALTERNATIVE_BIT (i);
2140    }
2141
2142  recog_data.insn = old_insn;
2143  which_alternative = old_alternative;
2144  return mask;
2145}
2146
2147/* Return the mask of operand alternatives that are allowed for INSN
2148   by boolean attribute ATTR.  This mask depends only on INSN and on
2149   the current target; it does not depend on things like the values of
2150   operands.  */
2151
2152static alternative_mask
2153get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2154{
2155  /* Quick exit for asms and for targets that don't use these attributes.  */
2156  int code = INSN_CODE (insn);
2157  if (code < 0 || !have_bool_attr (attr))
2158    return ALL_ALTERNATIVES;
2159
2160  /* Calling get_attr_<foo> can be expensive, so cache the mask
2161     for speed.  */
2162  if (!this_target_recog->x_bool_attr_masks[code][attr])
2163    this_target_recog->x_bool_attr_masks[code][attr]
2164      = get_bool_attr_mask_uncached (insn, attr);
2165  return this_target_recog->x_bool_attr_masks[code][attr];
2166}
2167
2168/* Return the set of alternatives of INSN that are allowed by the current
2169   target.  */
2170
2171alternative_mask
2172get_enabled_alternatives (rtx_insn *insn)
2173{
2174  return get_bool_attr_mask (insn, BA_ENABLED);
2175}
2176
2177/* Return the set of alternatives of INSN that are allowed by the current
2178   target and are preferred for the current size/speed optimization
2179   choice.  */
2180
2181alternative_mask
2182get_preferred_alternatives (rtx_insn *insn)
2183{
2184  if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2185    return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2186  else
2187    return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2188}
2189
2190/* Return the set of alternatives of INSN that are allowed by the current
2191   target and are preferred for the size/speed optimization choice
2192   associated with BB.  Passing a separate BB is useful if INSN has not
2193   been emitted yet or if we are considering moving it to a different
2194   block.  */
2195
2196alternative_mask
2197get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2198{
2199  if (optimize_bb_for_speed_p (bb))
2200    return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2201  else
2202    return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2203}
2204
2205/* Assert that the cached boolean attributes for INSN are still accurate.
2206   The backend is required to define these attributes in a way that only
2207   depends on the current target (rather than operands, compiler phase,
2208   etc.).  */
2209
2210bool
2211check_bool_attrs (rtx_insn *insn)
2212{
2213  int code = INSN_CODE (insn);
2214  if (code >= 0)
2215    for (int i = 0; i <= BA_LAST; ++i)
2216      {
2217	enum bool_attr attr = (enum bool_attr) i;
2218	if (this_target_recog->x_bool_attr_masks[code][attr])
2219	  gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2220		      == get_bool_attr_mask_uncached (insn, attr));
2221      }
2222  return true;
2223}
2224
2225/* Like extract_insn, but save insn extracted and don't extract again, when
2226   called again for the same insn expecting that recog_data still contain the
2227   valid information.  This is used primary by gen_attr infrastructure that
2228   often does extract insn again and again.  */
2229void
2230extract_insn_cached (rtx_insn *insn)
2231{
2232  if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2233    return;
2234  extract_insn (insn);
2235  recog_data.insn = insn;
2236}
2237
2238/* Do uncached extract_insn, constrain_operands and complain about failures.
2239   This should be used when extracting a pre-existing constrained instruction
2240   if the caller wants to know which alternative was chosen.  */
2241void
2242extract_constrain_insn (rtx_insn *insn)
2243{
2244  extract_insn (insn);
2245  if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2246    fatal_insn_not_found (insn);
2247}
2248
2249/* Do cached extract_insn, constrain_operands and complain about failures.
2250   Used by insn_attrtab.  */
2251void
2252extract_constrain_insn_cached (rtx_insn *insn)
2253{
2254  extract_insn_cached (insn);
2255  if (which_alternative == -1
2256      && !constrain_operands (reload_completed,
2257			      get_enabled_alternatives (insn)))
2258    fatal_insn_not_found (insn);
2259}
2260
2261/* Do cached constrain_operands on INSN and complain about failures.  */
2262int
2263constrain_operands_cached (rtx_insn *insn, int strict)
2264{
2265  if (which_alternative == -1)
2266    return constrain_operands (strict, get_enabled_alternatives (insn));
2267  else
2268    return 1;
2269}
2270
2271/* Analyze INSN and fill in recog_data.  */
2272
2273void
2274extract_insn (rtx_insn *insn)
2275{
2276  int i;
2277  int icode;
2278  int noperands;
2279  rtx body = PATTERN (insn);
2280
2281  recog_data.n_operands = 0;
2282  recog_data.n_alternatives = 0;
2283  recog_data.n_dups = 0;
2284  recog_data.is_asm = false;
2285
2286  switch (GET_CODE (body))
2287    {
2288    case USE:
2289    case CLOBBER:
2290    case ASM_INPUT:
2291    case ADDR_VEC:
2292    case ADDR_DIFF_VEC:
2293    case VAR_LOCATION:
2294      return;
2295
2296    case SET:
2297      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2298	goto asm_insn;
2299      else
2300	goto normal_insn;
2301    case PARALLEL:
2302      if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2303	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2304	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2305	goto asm_insn;
2306      else
2307	goto normal_insn;
2308    case ASM_OPERANDS:
2309    asm_insn:
2310      recog_data.n_operands = noperands = asm_noperands (body);
2311      if (noperands >= 0)
2312	{
2313	  /* This insn is an `asm' with operands.  */
2314
2315	  /* expand_asm_operands makes sure there aren't too many operands.  */
2316	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2317
2318	  /* Now get the operand values and constraints out of the insn.  */
2319	  decode_asm_operands (body, recog_data.operand,
2320			       recog_data.operand_loc,
2321			       recog_data.constraints,
2322			       recog_data.operand_mode, NULL);
2323	  memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2324	  if (noperands > 0)
2325	    {
2326	      const char *p =  recog_data.constraints[0];
2327	      recog_data.n_alternatives = 1;
2328	      while (*p)
2329		recog_data.n_alternatives += (*p++ == ',');
2330	    }
2331	  recog_data.is_asm = true;
2332	  break;
2333	}
2334      fatal_insn_not_found (insn);
2335
2336    default:
2337    normal_insn:
2338      /* Ordinary insn: recognize it, get the operands via insn_extract
2339	 and get the constraints.  */
2340
2341      icode = recog_memoized (insn);
2342      if (icode < 0)
2343	fatal_insn_not_found (insn);
2344
2345      recog_data.n_operands = noperands = insn_data[icode].n_operands;
2346      recog_data.n_alternatives = insn_data[icode].n_alternatives;
2347      recog_data.n_dups = insn_data[icode].n_dups;
2348
2349      insn_extract (insn);
2350
2351      for (i = 0; i < noperands; i++)
2352	{
2353	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2354	  recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2355	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2356	  /* VOIDmode match_operands gets mode from their real operand.  */
2357	  if (recog_data.operand_mode[i] == VOIDmode)
2358	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2359	}
2360    }
2361  for (i = 0; i < noperands; i++)
2362    recog_data.operand_type[i]
2363      = (recog_data.constraints[i][0] == '=' ? OP_OUT
2364	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2365	 : OP_IN);
2366
2367  gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2368
2369  recog_data.insn = NULL;
2370  which_alternative = -1;
2371}
2372
2373/* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2374   N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2375   OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2376   has N_OPERANDS entries.  */
2377
2378void
2379preprocess_constraints (int n_operands, int n_alternatives,
2380			const char **constraints,
2381			operand_alternative *op_alt_base)
2382{
2383  for (int i = 0; i < n_operands; i++)
2384    {
2385      int j;
2386      struct operand_alternative *op_alt;
2387      const char *p = constraints[i];
2388
2389      op_alt = op_alt_base;
2390
2391      for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2392	{
2393	  op_alt[i].cl = NO_REGS;
2394	  op_alt[i].constraint = p;
2395	  op_alt[i].matches = -1;
2396	  op_alt[i].matched = -1;
2397
2398	  if (*p == '\0' || *p == ',')
2399	    {
2400	      op_alt[i].anything_ok = 1;
2401	      continue;
2402	    }
2403
2404	  for (;;)
2405	    {
2406	      char c = *p;
2407	      if (c == '#')
2408		do
2409		  c = *++p;
2410		while (c != ',' && c != '\0');
2411	      if (c == ',' || c == '\0')
2412		{
2413		  p++;
2414		  break;
2415		}
2416
2417	      switch (c)
2418		{
2419		case '?':
2420		  op_alt[i].reject += 6;
2421		  break;
2422		case '!':
2423		  op_alt[i].reject += 600;
2424		  break;
2425		case '&':
2426		  op_alt[i].earlyclobber = 1;
2427		  break;
2428
2429		case '0': case '1': case '2': case '3': case '4':
2430		case '5': case '6': case '7': case '8': case '9':
2431		  {
2432		    char *end;
2433		    op_alt[i].matches = strtoul (p, &end, 10);
2434		    op_alt[op_alt[i].matches].matched = i;
2435		    p = end;
2436		  }
2437		  continue;
2438
2439		case 'X':
2440		  op_alt[i].anything_ok = 1;
2441		  break;
2442
2443		case 'g':
2444		  op_alt[i].cl =
2445		   reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2446		  break;
2447
2448		default:
2449		  enum constraint_num cn = lookup_constraint (p);
2450		  enum reg_class cl;
2451		  switch (get_constraint_type (cn))
2452		    {
2453		    case CT_REGISTER:
2454		      cl = reg_class_for_constraint (cn);
2455		      if (cl != NO_REGS)
2456			op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2457		      break;
2458
2459		    case CT_CONST_INT:
2460		      break;
2461
2462		    case CT_MEMORY:
2463		      op_alt[i].memory_ok = 1;
2464		      break;
2465
2466		    case CT_ADDRESS:
2467		      op_alt[i].is_address = 1;
2468		      op_alt[i].cl
2469			= (reg_class_subunion
2470			   [(int) op_alt[i].cl]
2471			   [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2472						  ADDRESS, SCRATCH)]);
2473		      break;
2474
2475		    case CT_FIXED_FORM:
2476		      break;
2477		    }
2478		  break;
2479		}
2480	      p += CONSTRAINT_LEN (c, p);
2481	    }
2482	}
2483    }
2484}
2485
2486/* Return an array of operand_alternative instructions for
2487   instruction ICODE.  */
2488
2489const operand_alternative *
2490preprocess_insn_constraints (int icode)
2491{
2492  gcc_checking_assert (IN_RANGE (icode, 0, LAST_INSN_CODE));
2493  if (this_target_recog->x_op_alt[icode])
2494    return this_target_recog->x_op_alt[icode];
2495
2496  int n_operands = insn_data[icode].n_operands;
2497  if (n_operands == 0)
2498    return 0;
2499  /* Always provide at least one alternative so that which_op_alt ()
2500     works correctly.  If the instruction has 0 alternatives (i.e. all
2501     constraint strings are empty) then each operand in this alternative
2502     will have anything_ok set.  */
2503  int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2504  int n_entries = n_operands * n_alternatives;
2505
2506  operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2507  const char **constraints = XALLOCAVEC (const char *, n_operands);
2508
2509  for (int i = 0; i < n_operands; ++i)
2510    constraints[i] = insn_data[icode].operand[i].constraint;
2511  preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2512
2513  this_target_recog->x_op_alt[icode] = op_alt;
2514  return op_alt;
2515}
2516
2517/* After calling extract_insn, you can use this function to extract some
2518   information from the constraint strings into a more usable form.
2519   The collected data is stored in recog_op_alt.  */
2520
2521void
2522preprocess_constraints (rtx insn)
2523{
2524  int icode = INSN_CODE (insn);
2525  if (icode >= 0)
2526    recog_op_alt = preprocess_insn_constraints (icode);
2527  else
2528    {
2529      int n_operands = recog_data.n_operands;
2530      int n_alternatives = recog_data.n_alternatives;
2531      int n_entries = n_operands * n_alternatives;
2532      memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2533      preprocess_constraints (n_operands, n_alternatives,
2534			      recog_data.constraints, asm_op_alt);
2535      recog_op_alt = asm_op_alt;
2536    }
2537}
2538
2539/* Check the operands of an insn against the insn's operand constraints
2540   and return 1 if they match any of the alternatives in ALTERNATIVES.
2541
2542   The information about the insn's operands, constraints, operand modes
2543   etc. is obtained from the global variables set up by extract_insn.
2544
2545   WHICH_ALTERNATIVE is set to a number which indicates which
2546   alternative of constraints was matched: 0 for the first alternative,
2547   1 for the next, etc.
2548
2549   In addition, when two operands are required to match
2550   and it happens that the output operand is (reg) while the
2551   input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2552   make the output operand look like the input.
2553   This is because the output operand is the one the template will print.
2554
2555   This is used in final, just before printing the assembler code and by
2556   the routines that determine an insn's attribute.
2557
2558   If STRICT is a positive nonzero value, it means that we have been
2559   called after reload has been completed.  In that case, we must
2560   do all checks strictly.  If it is zero, it means that we have been called
2561   before reload has completed.  In that case, we first try to see if we can
2562   find an alternative that matches strictly.  If not, we try again, this
2563   time assuming that reload will fix up the insn.  This provides a "best
2564   guess" for the alternative and is used to compute attributes of insns prior
2565   to reload.  A negative value of STRICT is used for this internal call.  */
2566
2567struct funny_match
2568{
2569  int this_op, other;
2570};
2571
2572int
2573constrain_operands (int strict, alternative_mask alternatives)
2574{
2575  const char *constraints[MAX_RECOG_OPERANDS];
2576  int matching_operands[MAX_RECOG_OPERANDS];
2577  int earlyclobber[MAX_RECOG_OPERANDS];
2578  int c;
2579
2580  struct funny_match funny_match[MAX_RECOG_OPERANDS];
2581  int funny_match_index;
2582
2583  which_alternative = 0;
2584  if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2585    return 1;
2586
2587  for (c = 0; c < recog_data.n_operands; c++)
2588    {
2589      constraints[c] = recog_data.constraints[c];
2590      matching_operands[c] = -1;
2591    }
2592
2593  do
2594    {
2595      int seen_earlyclobber_at = -1;
2596      int opno;
2597      int lose = 0;
2598      funny_match_index = 0;
2599
2600      if (!TEST_BIT (alternatives, which_alternative))
2601	{
2602	  int i;
2603
2604	  for (i = 0; i < recog_data.n_operands; i++)
2605	    constraints[i] = skip_alternative (constraints[i]);
2606
2607	  which_alternative++;
2608	  continue;
2609	}
2610
2611      for (opno = 0; opno < recog_data.n_operands; opno++)
2612	{
2613	  rtx op = recog_data.operand[opno];
2614	  machine_mode mode = GET_MODE (op);
2615	  const char *p = constraints[opno];
2616	  int offset = 0;
2617	  int win = 0;
2618	  int val;
2619	  int len;
2620
2621	  earlyclobber[opno] = 0;
2622
2623	  /* A unary operator may be accepted by the predicate, but it
2624	     is irrelevant for matching constraints.  */
2625	  if (UNARY_P (op))
2626	    op = XEXP (op, 0);
2627
2628	  if (GET_CODE (op) == SUBREG)
2629	    {
2630	      if (REG_P (SUBREG_REG (op))
2631		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2632		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2633					      GET_MODE (SUBREG_REG (op)),
2634					      SUBREG_BYTE (op),
2635					      GET_MODE (op));
2636	      op = SUBREG_REG (op);
2637	    }
2638
2639	  /* An empty constraint or empty alternative
2640	     allows anything which matched the pattern.  */
2641	  if (*p == 0 || *p == ',')
2642	    win = 1;
2643
2644	  do
2645	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2646	      {
2647	      case '\0':
2648		len = 0;
2649		break;
2650	      case ',':
2651		c = '\0';
2652		break;
2653
2654	      case '#':
2655		/* Ignore rest of this alternative as far as
2656		   constraint checking is concerned.  */
2657		do
2658		  p++;
2659		while (*p && *p != ',');
2660		len = 0;
2661		break;
2662
2663	      case '&':
2664		earlyclobber[opno] = 1;
2665		if (seen_earlyclobber_at < 0)
2666		  seen_earlyclobber_at = opno;
2667		break;
2668
2669	      case '0':  case '1':  case '2':  case '3':  case '4':
2670	      case '5':  case '6':  case '7':  case '8':  case '9':
2671		{
2672		  /* This operand must be the same as a previous one.
2673		     This kind of constraint is used for instructions such
2674		     as add when they take only two operands.
2675
2676		     Note that the lower-numbered operand is passed first.
2677
2678		     If we are not testing strictly, assume that this
2679		     constraint will be satisfied.  */
2680
2681		  char *end;
2682		  int match;
2683
2684		  match = strtoul (p, &end, 10);
2685		  p = end;
2686
2687		  if (strict < 0)
2688		    val = 1;
2689		  else
2690		    {
2691		      rtx op1 = recog_data.operand[match];
2692		      rtx op2 = recog_data.operand[opno];
2693
2694		      /* A unary operator may be accepted by the predicate,
2695			 but it is irrelevant for matching constraints.  */
2696		      if (UNARY_P (op1))
2697			op1 = XEXP (op1, 0);
2698		      if (UNARY_P (op2))
2699			op2 = XEXP (op2, 0);
2700
2701		      val = operands_match_p (op1, op2);
2702		    }
2703
2704		  matching_operands[opno] = match;
2705		  matching_operands[match] = opno;
2706
2707		  if (val != 0)
2708		    win = 1;
2709
2710		  /* If output is *x and input is *--x, arrange later
2711		     to change the output to *--x as well, since the
2712		     output op is the one that will be printed.  */
2713		  if (val == 2 && strict > 0)
2714		    {
2715		      funny_match[funny_match_index].this_op = opno;
2716		      funny_match[funny_match_index++].other = match;
2717		    }
2718		}
2719		len = 0;
2720		break;
2721
2722	      case 'p':
2723		/* p is used for address_operands.  When we are called by
2724		   gen_reload, no one will have checked that the address is
2725		   strictly valid, i.e., that all pseudos requiring hard regs
2726		   have gotten them.  */
2727		if (strict <= 0
2728		    || (strict_memory_address_p (recog_data.operand_mode[opno],
2729						 op)))
2730		  win = 1;
2731		break;
2732
2733		/* No need to check general_operand again;
2734		   it was done in insn-recog.c.  Well, except that reload
2735		   doesn't check the validity of its replacements, but
2736		   that should only matter when there's a bug.  */
2737	      case 'g':
2738		/* Anything goes unless it is a REG and really has a hard reg
2739		   but the hard reg is not in the class GENERAL_REGS.  */
2740		if (REG_P (op))
2741		  {
2742		    if (strict < 0
2743			|| GENERAL_REGS == ALL_REGS
2744			|| (reload_in_progress
2745			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2746			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2747		      win = 1;
2748		  }
2749		else if (strict < 0 || general_operand (op, mode))
2750		  win = 1;
2751		break;
2752
2753	      default:
2754		{
2755		  enum constraint_num cn = lookup_constraint (p);
2756		  enum reg_class cl = reg_class_for_constraint (cn);
2757		  if (cl != NO_REGS)
2758		    {
2759		      if (strict < 0
2760			  || (strict == 0
2761			      && REG_P (op)
2762			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2763			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2764			  || (REG_P (op)
2765			      && reg_fits_class_p (op, cl, offset, mode)))
2766		        win = 1;
2767		    }
2768
2769		  else if (constraint_satisfied_p (op, cn))
2770		    win = 1;
2771
2772		  else if (insn_extra_memory_constraint (cn)
2773			   /* Every memory operand can be reloaded to fit.  */
2774			   && ((strict < 0 && MEM_P (op))
2775			       /* Before reload, accept what reload can turn
2776				  into a mem.  */
2777			       || (strict < 0 && CONSTANT_P (op))
2778			       /* Before reload, accept a pseudo,
2779				  since LRA can turn it into a mem.  */
2780			       || (strict < 0 && targetm.lra_p () && REG_P (op)
2781				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2782			       /* During reload, accept a pseudo  */
2783			       || (reload_in_progress && REG_P (op)
2784				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2785		    win = 1;
2786		  else if (insn_extra_address_constraint (cn)
2787			   /* Every address operand can be reloaded to fit.  */
2788			   && strict < 0)
2789		    win = 1;
2790		  /* Cater to architectures like IA-64 that define extra memory
2791		     constraints without using define_memory_constraint.  */
2792		  else if (reload_in_progress
2793			   && REG_P (op)
2794			   && REGNO (op) >= FIRST_PSEUDO_REGISTER
2795			   && reg_renumber[REGNO (op)] < 0
2796			   && reg_equiv_mem (REGNO (op)) != 0
2797			   && constraint_satisfied_p
2798			      (reg_equiv_mem (REGNO (op)), cn))
2799		    win = 1;
2800		  break;
2801		}
2802	      }
2803	  while (p += len, c);
2804
2805	  constraints[opno] = p;
2806	  /* If this operand did not win somehow,
2807	     this alternative loses.  */
2808	  if (! win)
2809	    lose = 1;
2810	}
2811      /* This alternative won; the operands are ok.
2812	 Change whichever operands this alternative says to change.  */
2813      if (! lose)
2814	{
2815	  int opno, eopno;
2816
2817	  /* See if any earlyclobber operand conflicts with some other
2818	     operand.  */
2819
2820	  if (strict > 0  && seen_earlyclobber_at >= 0)
2821	    for (eopno = seen_earlyclobber_at;
2822		 eopno < recog_data.n_operands;
2823		 eopno++)
2824	      /* Ignore earlyclobber operands now in memory,
2825		 because we would often report failure when we have
2826		 two memory operands, one of which was formerly a REG.  */
2827	      if (earlyclobber[eopno]
2828		  && REG_P (recog_data.operand[eopno]))
2829		for (opno = 0; opno < recog_data.n_operands; opno++)
2830		  if ((MEM_P (recog_data.operand[opno])
2831		       || recog_data.operand_type[opno] != OP_OUT)
2832		      && opno != eopno
2833		      /* Ignore things like match_operator operands.  */
2834		      && *recog_data.constraints[opno] != 0
2835		      && ! (matching_operands[opno] == eopno
2836			    && operands_match_p (recog_data.operand[opno],
2837						 recog_data.operand[eopno]))
2838		      && ! safe_from_earlyclobber (recog_data.operand[opno],
2839						   recog_data.operand[eopno]))
2840		    lose = 1;
2841
2842	  if (! lose)
2843	    {
2844	      while (--funny_match_index >= 0)
2845		{
2846		  recog_data.operand[funny_match[funny_match_index].other]
2847		    = recog_data.operand[funny_match[funny_match_index].this_op];
2848		}
2849
2850#ifdef AUTO_INC_DEC
2851	      /* For operands without < or > constraints reject side-effects.  */
2852	      if (recog_data.is_asm)
2853		{
2854		  for (opno = 0; opno < recog_data.n_operands; opno++)
2855		    if (MEM_P (recog_data.operand[opno]))
2856		      switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2857			{
2858			case PRE_INC:
2859			case POST_INC:
2860			case PRE_DEC:
2861			case POST_DEC:
2862			case PRE_MODIFY:
2863			case POST_MODIFY:
2864			  if (strchr (recog_data.constraints[opno], '<') == NULL
2865			      && strchr (recog_data.constraints[opno], '>')
2866				 == NULL)
2867			    return 0;
2868			  break;
2869			default:
2870			  break;
2871			}
2872		}
2873#endif
2874	      return 1;
2875	    }
2876	}
2877
2878      which_alternative++;
2879    }
2880  while (which_alternative < recog_data.n_alternatives);
2881
2882  which_alternative = -1;
2883  /* If we are about to reject this, but we are not to test strictly,
2884     try a very loose test.  Only return failure if it fails also.  */
2885  if (strict == 0)
2886    return constrain_operands (-1, alternatives);
2887  else
2888    return 0;
2889}
2890
2891/* Return true iff OPERAND (assumed to be a REG rtx)
2892   is a hard reg in class CLASS when its regno is offset by OFFSET
2893   and changed to mode MODE.
2894   If REG occupies multiple hard regs, all of them must be in CLASS.  */
2895
2896bool
2897reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2898		  machine_mode mode)
2899{
2900  unsigned int regno = REGNO (operand);
2901
2902  if (cl == NO_REGS)
2903    return false;
2904
2905  /* Regno must not be a pseudo register.  Offset may be negative.  */
2906  return (HARD_REGISTER_NUM_P (regno)
2907	  && HARD_REGISTER_NUM_P (regno + offset)
2908	  && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2909				regno + offset));
2910}
2911
2912/* Split single instruction.  Helper function for split_all_insns and
2913   split_all_insns_noflow.  Return last insn in the sequence if successful,
2914   or NULL if unsuccessful.  */
2915
2916static rtx
2917split_insn (rtx_insn *insn)
2918{
2919  /* Split insns here to get max fine-grain parallelism.  */
2920  rtx_insn *first = PREV_INSN (insn);
2921  rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2922  rtx insn_set, last_set, note;
2923
2924  if (last == insn)
2925    return NULL_RTX;
2926
2927  /* If the original instruction was a single set that was known to be
2928     equivalent to a constant, see if we can say the same about the last
2929     instruction in the split sequence.  The two instructions must set
2930     the same destination.  */
2931  insn_set = single_set (insn);
2932  if (insn_set)
2933    {
2934      last_set = single_set (last);
2935      if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2936	{
2937	  note = find_reg_equal_equiv_note (insn);
2938	  if (note && CONSTANT_P (XEXP (note, 0)))
2939	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2940	  else if (CONSTANT_P (SET_SRC (insn_set)))
2941	    set_unique_reg_note (last, REG_EQUAL,
2942				 copy_rtx (SET_SRC (insn_set)));
2943	}
2944    }
2945
2946  /* try_split returns the NOTE that INSN became.  */
2947  SET_INSN_DELETED (insn);
2948
2949  /* ??? Coddle to md files that generate subregs in post-reload
2950     splitters instead of computing the proper hard register.  */
2951  if (reload_completed && first != last)
2952    {
2953      first = NEXT_INSN (first);
2954      for (;;)
2955	{
2956	  if (INSN_P (first))
2957	    cleanup_subreg_operands (first);
2958	  if (first == last)
2959	    break;
2960	  first = NEXT_INSN (first);
2961	}
2962    }
2963
2964  return last;
2965}
2966
2967/* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2968
2969void
2970split_all_insns (void)
2971{
2972  sbitmap blocks;
2973  bool changed;
2974  basic_block bb;
2975
2976  blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2977  bitmap_clear (blocks);
2978  changed = false;
2979
2980  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2981    {
2982      rtx_insn *insn, *next;
2983      bool finish = false;
2984
2985      rtl_profile_for_bb (bb);
2986      for (insn = BB_HEAD (bb); !finish ; insn = next)
2987	{
2988	  /* Can't use `next_real_insn' because that might go across
2989	     CODE_LABELS and short-out basic blocks.  */
2990	  next = NEXT_INSN (insn);
2991	  finish = (insn == BB_END (bb));
2992	  if (INSN_P (insn))
2993	    {
2994	      rtx set = single_set (insn);
2995
2996	      /* Don't split no-op move insns.  These should silently
2997		 disappear later in final.  Splitting such insns would
2998		 break the code that handles LIBCALL blocks.  */
2999	      if (set && set_noop_p (set))
3000		{
3001		  /* Nops get in the way while scheduling, so delete them
3002		     now if register allocation has already been done.  It
3003		     is too risky to try to do this before register
3004		     allocation, and there are unlikely to be very many
3005		     nops then anyways.  */
3006		  if (reload_completed)
3007		      delete_insn_and_edges (insn);
3008		}
3009	      else
3010		{
3011		  if (split_insn (insn))
3012		    {
3013		      bitmap_set_bit (blocks, bb->index);
3014		      changed = true;
3015		    }
3016		}
3017	    }
3018	}
3019    }
3020
3021  default_rtl_profile ();
3022  if (changed)
3023    find_many_sub_basic_blocks (blocks);
3024
3025#ifdef ENABLE_CHECKING
3026  verify_flow_info ();
3027#endif
3028
3029  sbitmap_free (blocks);
3030}
3031
3032/* Same as split_all_insns, but do not expect CFG to be available.
3033   Used by machine dependent reorg passes.  */
3034
3035unsigned int
3036split_all_insns_noflow (void)
3037{
3038  rtx_insn *next, *insn;
3039
3040  for (insn = get_insns (); insn; insn = next)
3041    {
3042      next = NEXT_INSN (insn);
3043      if (INSN_P (insn))
3044	{
3045	  /* Don't split no-op move insns.  These should silently
3046	     disappear later in final.  Splitting such insns would
3047	     break the code that handles LIBCALL blocks.  */
3048	  rtx set = single_set (insn);
3049	  if (set && set_noop_p (set))
3050	    {
3051	      /* Nops get in the way while scheduling, so delete them
3052		 now if register allocation has already been done.  It
3053		 is too risky to try to do this before register
3054		 allocation, and there are unlikely to be very many
3055		 nops then anyways.
3056
3057		 ??? Should we use delete_insn when the CFG isn't valid?  */
3058	      if (reload_completed)
3059		delete_insn_and_edges (insn);
3060	    }
3061	  else
3062	    split_insn (insn);
3063	}
3064    }
3065  return 0;
3066}
3067
3068#ifdef HAVE_peephole2
3069struct peep2_insn_data
3070{
3071  rtx insn;
3072  regset live_before;
3073};
3074
3075static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3076static int peep2_current;
3077
3078static bool peep2_do_rebuild_jump_labels;
3079static bool peep2_do_cleanup_cfg;
3080
3081/* The number of instructions available to match a peep2.  */
3082int peep2_current_count;
3083
3084/* A non-insn marker indicating the last insn of the block.
3085   The live_before regset for this element is correct, indicating
3086   DF_LIVE_OUT for the block.  */
3087#define PEEP2_EOB	pc_rtx
3088
3089/* Wrap N to fit into the peep2_insn_data buffer.  */
3090
3091static int
3092peep2_buf_position (int n)
3093{
3094  if (n >= MAX_INSNS_PER_PEEP2 + 1)
3095    n -= MAX_INSNS_PER_PEEP2 + 1;
3096  return n;
3097}
3098
3099/* Return the Nth non-note insn after `current', or return NULL_RTX if it
3100   does not exist.  Used by the recognizer to find the next insn to match
3101   in a multi-insn pattern.  */
3102
3103rtx
3104peep2_next_insn (int n)
3105{
3106  gcc_assert (n <= peep2_current_count);
3107
3108  n = peep2_buf_position (peep2_current + n);
3109
3110  return peep2_insn_data[n].insn;
3111}
3112
3113/* Return true if REGNO is dead before the Nth non-note insn
3114   after `current'.  */
3115
3116int
3117peep2_regno_dead_p (int ofs, int regno)
3118{
3119  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3120
3121  ofs = peep2_buf_position (peep2_current + ofs);
3122
3123  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3124
3125  return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3126}
3127
3128/* Similarly for a REG.  */
3129
3130int
3131peep2_reg_dead_p (int ofs, rtx reg)
3132{
3133  int regno, n;
3134
3135  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3136
3137  ofs = peep2_buf_position (peep2_current + ofs);
3138
3139  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3140
3141  regno = REGNO (reg);
3142  n = hard_regno_nregs[regno][GET_MODE (reg)];
3143  while (--n >= 0)
3144    if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3145      return 0;
3146  return 1;
3147}
3148
3149/* Regno offset to be used in the register search.  */
3150static int search_ofs;
3151
3152/* Try to find a hard register of mode MODE, matching the register class in
3153   CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3154   remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
3155   in which case the only condition is that the register must be available
3156   before CURRENT_INSN.
3157   Registers that already have bits set in REG_SET will not be considered.
3158
3159   If an appropriate register is available, it will be returned and the
3160   corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3161   returned.  */
3162
3163rtx
3164peep2_find_free_register (int from, int to, const char *class_str,
3165			  machine_mode mode, HARD_REG_SET *reg_set)
3166{
3167  enum reg_class cl;
3168  HARD_REG_SET live;
3169  df_ref def;
3170  int i;
3171
3172  gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3173  gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3174
3175  from = peep2_buf_position (peep2_current + from);
3176  to = peep2_buf_position (peep2_current + to);
3177
3178  gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3179  REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3180
3181  while (from != to)
3182    {
3183      gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3184
3185      /* Don't use registers set or clobbered by the insn.  */
3186      FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3187	SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3188
3189      from = peep2_buf_position (from + 1);
3190    }
3191
3192  cl = reg_class_for_constraint (lookup_constraint (class_str));
3193
3194  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3195    {
3196      int raw_regno, regno, success, j;
3197
3198      /* Distribute the free registers as much as possible.  */
3199      raw_regno = search_ofs + i;
3200      if (raw_regno >= FIRST_PSEUDO_REGISTER)
3201	raw_regno -= FIRST_PSEUDO_REGISTER;
3202#ifdef REG_ALLOC_ORDER
3203      regno = reg_alloc_order[raw_regno];
3204#else
3205      regno = raw_regno;
3206#endif
3207
3208      /* Can it support the mode we need?  */
3209      if (! HARD_REGNO_MODE_OK (regno, mode))
3210	continue;
3211
3212      success = 1;
3213      for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3214	{
3215	  /* Don't allocate fixed registers.  */
3216	  if (fixed_regs[regno + j])
3217	    {
3218	      success = 0;
3219	      break;
3220	    }
3221	  /* Don't allocate global registers.  */
3222	  if (global_regs[regno + j])
3223	    {
3224	      success = 0;
3225	      break;
3226	    }
3227	  /* Make sure the register is of the right class.  */
3228	  if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3229	    {
3230	      success = 0;
3231	      break;
3232	    }
3233	  /* And that we don't create an extra save/restore.  */
3234	  if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3235	    {
3236	      success = 0;
3237	      break;
3238	    }
3239
3240	  if (! targetm.hard_regno_scratch_ok (regno + j))
3241	    {
3242	      success = 0;
3243	      break;
3244	    }
3245
3246	  /* And we don't clobber traceback for noreturn functions.  */
3247	  if ((regno + j == FRAME_POINTER_REGNUM
3248	       || regno + j == HARD_FRAME_POINTER_REGNUM)
3249	      && (! reload_completed || frame_pointer_needed))
3250	    {
3251	      success = 0;
3252	      break;
3253	    }
3254
3255	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3256	      || TEST_HARD_REG_BIT (live, regno + j))
3257	    {
3258	      success = 0;
3259	      break;
3260	    }
3261	}
3262
3263      if (success)
3264	{
3265	  add_to_hard_reg_set (reg_set, mode, regno);
3266
3267	  /* Start the next search with the next register.  */
3268	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3269	    raw_regno = 0;
3270	  search_ofs = raw_regno;
3271
3272	  return gen_rtx_REG (mode, regno);
3273	}
3274    }
3275
3276  search_ofs = 0;
3277  return NULL_RTX;
3278}
3279
3280/* Forget all currently tracked instructions, only remember current
3281   LIVE regset.  */
3282
3283static void
3284peep2_reinit_state (regset live)
3285{
3286  int i;
3287
3288  /* Indicate that all slots except the last holds invalid data.  */
3289  for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3290    peep2_insn_data[i].insn = NULL_RTX;
3291  peep2_current_count = 0;
3292
3293  /* Indicate that the last slot contains live_after data.  */
3294  peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3295  peep2_current = MAX_INSNS_PER_PEEP2;
3296
3297  COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3298}
3299
3300/* While scanning basic block BB, we found a match of length MATCH_LEN,
3301   starting at INSN.  Perform the replacement, removing the old insns and
3302   replacing them with ATTEMPT.  Returns the last insn emitted, or NULL
3303   if the replacement is rejected.  */
3304
3305static rtx_insn *
3306peep2_attempt (basic_block bb, rtx uncast_insn, int match_len, rtx_insn *attempt)
3307{
3308  rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3309  int i;
3310  rtx_insn *last, *before_try, *x;
3311  rtx eh_note, as_note;
3312  rtx_insn *old_insn;
3313  rtx_insn *new_insn;
3314  bool was_call = false;
3315
3316  /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3317     match more than one insn, or to be split into more than one insn.  */
3318  old_insn = as_a <rtx_insn *> (peep2_insn_data[peep2_current].insn);
3319  if (RTX_FRAME_RELATED_P (old_insn))
3320    {
3321      bool any_note = false;
3322      rtx note;
3323
3324      if (match_len != 0)
3325	return NULL;
3326
3327      /* Look for one "active" insn.  I.e. ignore any "clobber" insns that
3328	 may be in the stream for the purpose of register allocation.  */
3329      if (active_insn_p (attempt))
3330	new_insn = attempt;
3331      else
3332	new_insn = next_active_insn (attempt);
3333      if (next_active_insn (new_insn))
3334	return NULL;
3335
3336      /* We have a 1-1 replacement.  Copy over any frame-related info.  */
3337      RTX_FRAME_RELATED_P (new_insn) = 1;
3338
3339      /* Allow the backend to fill in a note during the split.  */
3340      for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3341	switch (REG_NOTE_KIND (note))
3342	  {
3343	  case REG_FRAME_RELATED_EXPR:
3344	  case REG_CFA_DEF_CFA:
3345	  case REG_CFA_ADJUST_CFA:
3346	  case REG_CFA_OFFSET:
3347	  case REG_CFA_REGISTER:
3348	  case REG_CFA_EXPRESSION:
3349	  case REG_CFA_RESTORE:
3350	  case REG_CFA_SET_VDRAP:
3351	    any_note = true;
3352	    break;
3353	  default:
3354	    break;
3355	  }
3356
3357      /* If the backend didn't supply a note, copy one over.  */
3358      if (!any_note)
3359        for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3360	  switch (REG_NOTE_KIND (note))
3361	    {
3362	    case REG_FRAME_RELATED_EXPR:
3363	    case REG_CFA_DEF_CFA:
3364	    case REG_CFA_ADJUST_CFA:
3365	    case REG_CFA_OFFSET:
3366	    case REG_CFA_REGISTER:
3367	    case REG_CFA_EXPRESSION:
3368	    case REG_CFA_RESTORE:
3369	    case REG_CFA_SET_VDRAP:
3370	      add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3371	      any_note = true;
3372	      break;
3373	    default:
3374	      break;
3375	    }
3376
3377      /* If there still isn't a note, make sure the unwind info sees the
3378	 same expression as before the split.  */
3379      if (!any_note)
3380	{
3381	  rtx old_set, new_set;
3382
3383	  /* The old insn had better have been simple, or annotated.  */
3384	  old_set = single_set (old_insn);
3385	  gcc_assert (old_set != NULL);
3386
3387	  new_set = single_set (new_insn);
3388	  if (!new_set || !rtx_equal_p (new_set, old_set))
3389	    add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3390	}
3391
3392      /* Copy prologue/epilogue status.  This is required in order to keep
3393	 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state.  */
3394      maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3395    }
3396
3397  /* If we are splitting a CALL_INSN, look for the CALL_INSN
3398     in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3399     cfg-related call notes.  */
3400  for (i = 0; i <= match_len; ++i)
3401    {
3402      int j;
3403      rtx note;
3404
3405      j = peep2_buf_position (peep2_current + i);
3406      old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3407      if (!CALL_P (old_insn))
3408	continue;
3409      was_call = true;
3410
3411      new_insn = attempt;
3412      while (new_insn != NULL_RTX)
3413	{
3414	  if (CALL_P (new_insn))
3415	    break;
3416	  new_insn = NEXT_INSN (new_insn);
3417	}
3418
3419      gcc_assert (new_insn != NULL_RTX);
3420
3421      CALL_INSN_FUNCTION_USAGE (new_insn)
3422	= CALL_INSN_FUNCTION_USAGE (old_insn);
3423      SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3424
3425      for (note = REG_NOTES (old_insn);
3426	   note;
3427	   note = XEXP (note, 1))
3428	switch (REG_NOTE_KIND (note))
3429	  {
3430	  case REG_NORETURN:
3431	  case REG_SETJMP:
3432	  case REG_TM:
3433	    add_reg_note (new_insn, REG_NOTE_KIND (note),
3434			  XEXP (note, 0));
3435	    break;
3436	  default:
3437	    /* Discard all other reg notes.  */
3438	    break;
3439	  }
3440
3441      /* Croak if there is another call in the sequence.  */
3442      while (++i <= match_len)
3443	{
3444	  j = peep2_buf_position (peep2_current + i);
3445	  old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3446	  gcc_assert (!CALL_P (old_insn));
3447	}
3448      break;
3449    }
3450
3451  /* If we matched any instruction that had a REG_ARGS_SIZE, then
3452     move those notes over to the new sequence.  */
3453  as_note = NULL;
3454  for (i = match_len; i >= 0; --i)
3455    {
3456      int j = peep2_buf_position (peep2_current + i);
3457      old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3458
3459      as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3460      if (as_note)
3461	break;
3462    }
3463
3464  i = peep2_buf_position (peep2_current + match_len);
3465  eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3466
3467  /* Replace the old sequence with the new.  */
3468  rtx_insn *peepinsn = as_a <rtx_insn *> (peep2_insn_data[i].insn);
3469  last = emit_insn_after_setloc (attempt,
3470				 peep2_insn_data[i].insn,
3471				 INSN_LOCATION (peepinsn));
3472  before_try = PREV_INSN (insn);
3473  delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3474
3475  /* Re-insert the EH_REGION notes.  */
3476  if (eh_note || (was_call && nonlocal_goto_handler_labels))
3477    {
3478      edge eh_edge;
3479      edge_iterator ei;
3480
3481      FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3482	if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3483	  break;
3484
3485      if (eh_note)
3486	copy_reg_eh_region_note_backward (eh_note, last, before_try);
3487
3488      if (eh_edge)
3489	for (x = last; x != before_try; x = PREV_INSN (x))
3490	  if (x != BB_END (bb)
3491	      && (can_throw_internal (x)
3492		  || can_nonlocal_goto (x)))
3493	    {
3494	      edge nfte, nehe;
3495	      int flags;
3496
3497	      nfte = split_block (bb, x);
3498	      flags = (eh_edge->flags
3499		       & (EDGE_EH | EDGE_ABNORMAL));
3500	      if (CALL_P (x))
3501		flags |= EDGE_ABNORMAL_CALL;
3502	      nehe = make_edge (nfte->src, eh_edge->dest,
3503				flags);
3504
3505	      nehe->probability = eh_edge->probability;
3506	      nfte->probability
3507		= REG_BR_PROB_BASE - nehe->probability;
3508
3509	      peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3510	      bb = nfte->src;
3511	      eh_edge = nehe;
3512	    }
3513
3514      /* Converting possibly trapping insn to non-trapping is
3515	 possible.  Zap dummy outgoing edges.  */
3516      peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3517    }
3518
3519  /* Re-insert the ARGS_SIZE notes.  */
3520  if (as_note)
3521    fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3522
3523  /* If we generated a jump instruction, it won't have
3524     JUMP_LABEL set.  Recompute after we're done.  */
3525  for (x = last; x != before_try; x = PREV_INSN (x))
3526    if (JUMP_P (x))
3527      {
3528	peep2_do_rebuild_jump_labels = true;
3529	break;
3530      }
3531
3532  return last;
3533}
3534
3535/* After performing a replacement in basic block BB, fix up the life
3536   information in our buffer.  LAST is the last of the insns that we
3537   emitted as a replacement.  PREV is the insn before the start of
3538   the replacement.  MATCH_LEN is the number of instructions that were
3539   matched, and which now need to be replaced in the buffer.  */
3540
3541static void
3542peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3543		   rtx_insn *prev)
3544{
3545  int i = peep2_buf_position (peep2_current + match_len + 1);
3546  rtx_insn *x;
3547  regset_head live;
3548
3549  INIT_REG_SET (&live);
3550  COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3551
3552  gcc_assert (peep2_current_count >= match_len + 1);
3553  peep2_current_count -= match_len + 1;
3554
3555  x = last;
3556  do
3557    {
3558      if (INSN_P (x))
3559	{
3560	  df_insn_rescan (x);
3561	  if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3562	    {
3563	      peep2_current_count++;
3564	      if (--i < 0)
3565		i = MAX_INSNS_PER_PEEP2;
3566	      peep2_insn_data[i].insn = x;
3567	      df_simulate_one_insn_backwards (bb, x, &live);
3568	      COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3569	    }
3570	}
3571      x = PREV_INSN (x);
3572    }
3573  while (x != prev);
3574  CLEAR_REG_SET (&live);
3575
3576  peep2_current = i;
3577}
3578
3579/* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3580   Return true if we added it, false otherwise.  The caller will try to match
3581   peepholes against the buffer if we return false; otherwise it will try to
3582   add more instructions to the buffer.  */
3583
3584static bool
3585peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3586{
3587  int pos;
3588
3589  /* Once we have filled the maximum number of insns the buffer can hold,
3590     allow the caller to match the insns against peepholes.  We wait until
3591     the buffer is full in case the target has similar peepholes of different
3592     length; we always want to match the longest if possible.  */
3593  if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3594    return false;
3595
3596  /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3597     any other pattern, lest it change the semantics of the frame info.  */
3598  if (RTX_FRAME_RELATED_P (insn))
3599    {
3600      /* Let the buffer drain first.  */
3601      if (peep2_current_count > 0)
3602	return false;
3603      /* Now the insn will be the only thing in the buffer.  */
3604    }
3605
3606  pos = peep2_buf_position (peep2_current + peep2_current_count);
3607  peep2_insn_data[pos].insn = insn;
3608  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3609  peep2_current_count++;
3610
3611  df_simulate_one_insn_forwards (bb, as_a <rtx_insn *> (insn), live);
3612  return true;
3613}
3614
3615/* Perform the peephole2 optimization pass.  */
3616
3617static void
3618peephole2_optimize (void)
3619{
3620  rtx_insn *insn;
3621  bitmap live;
3622  int i;
3623  basic_block bb;
3624
3625  peep2_do_cleanup_cfg = false;
3626  peep2_do_rebuild_jump_labels = false;
3627
3628  df_set_flags (DF_LR_RUN_DCE);
3629  df_note_add_problem ();
3630  df_analyze ();
3631
3632  /* Initialize the regsets we're going to use.  */
3633  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3634    peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3635  search_ofs = 0;
3636  live = BITMAP_ALLOC (&reg_obstack);
3637
3638  FOR_EACH_BB_REVERSE_FN (bb, cfun)
3639    {
3640      bool past_end = false;
3641      int pos;
3642
3643      rtl_profile_for_bb (bb);
3644
3645      /* Start up propagation.  */
3646      bitmap_copy (live, DF_LR_IN (bb));
3647      df_simulate_initialize_forwards (bb, live);
3648      peep2_reinit_state (live);
3649
3650      insn = BB_HEAD (bb);
3651      for (;;)
3652	{
3653	  rtx_insn *attempt;
3654	  rtx head;
3655	  int match_len;
3656
3657	  if (!past_end && !NONDEBUG_INSN_P (insn))
3658	    {
3659	    next_insn:
3660	      insn = NEXT_INSN (insn);
3661	      if (insn == NEXT_INSN (BB_END (bb)))
3662		past_end = true;
3663	      continue;
3664	    }
3665	  if (!past_end && peep2_fill_buffer (bb, insn, live))
3666	    goto next_insn;
3667
3668	  /* If we did not fill an empty buffer, it signals the end of the
3669	     block.  */
3670	  if (peep2_current_count == 0)
3671	    break;
3672
3673	  /* The buffer filled to the current maximum, so try to match.  */
3674
3675	  pos = peep2_buf_position (peep2_current + peep2_current_count);
3676	  peep2_insn_data[pos].insn = PEEP2_EOB;
3677	  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3678
3679	  /* Match the peephole.  */
3680	  head = peep2_insn_data[peep2_current].insn;
3681	  attempt = safe_as_a <rtx_insn *> (
3682		      peephole2_insns (PATTERN (head), head, &match_len));
3683	  if (attempt != NULL)
3684	    {
3685	      rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3686	      if (last)
3687		{
3688		  peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3689		  continue;
3690		}
3691	    }
3692
3693	  /* No match: advance the buffer by one insn.  */
3694	  peep2_current = peep2_buf_position (peep2_current + 1);
3695	  peep2_current_count--;
3696	}
3697    }
3698
3699  default_rtl_profile ();
3700  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3701    BITMAP_FREE (peep2_insn_data[i].live_before);
3702  BITMAP_FREE (live);
3703  if (peep2_do_rebuild_jump_labels)
3704    rebuild_jump_labels (get_insns ());
3705  if (peep2_do_cleanup_cfg)
3706    cleanup_cfg (CLEANUP_CFG_CHANGED);
3707}
3708#endif /* HAVE_peephole2 */
3709
3710/* Common predicates for use with define_bypass.  */
3711
3712/* True if the dependency between OUT_INSN and IN_INSN is on the store
3713   data not the address operand(s) of the store.  IN_INSN and OUT_INSN
3714   must be either a single_set or a PARALLEL with SETs inside.  */
3715
3716int
3717store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3718{
3719  rtx out_set, in_set;
3720  rtx out_pat, in_pat;
3721  rtx out_exp, in_exp;
3722  int i, j;
3723
3724  in_set = single_set (in_insn);
3725  if (in_set)
3726    {
3727      if (!MEM_P (SET_DEST (in_set)))
3728	return false;
3729
3730      out_set = single_set (out_insn);
3731      if (out_set)
3732        {
3733          if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3734            return false;
3735        }
3736      else
3737        {
3738          out_pat = PATTERN (out_insn);
3739
3740	  if (GET_CODE (out_pat) != PARALLEL)
3741	    return false;
3742
3743          for (i = 0; i < XVECLEN (out_pat, 0); i++)
3744          {
3745            out_exp = XVECEXP (out_pat, 0, i);
3746
3747            if (GET_CODE (out_exp) == CLOBBER)
3748              continue;
3749
3750            gcc_assert (GET_CODE (out_exp) == SET);
3751
3752            if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3753              return false;
3754          }
3755      }
3756    }
3757  else
3758    {
3759      in_pat = PATTERN (in_insn);
3760      gcc_assert (GET_CODE (in_pat) == PARALLEL);
3761
3762      for (i = 0; i < XVECLEN (in_pat, 0); i++)
3763	{
3764	  in_exp = XVECEXP (in_pat, 0, i);
3765
3766	  if (GET_CODE (in_exp) == CLOBBER)
3767	    continue;
3768
3769	  gcc_assert (GET_CODE (in_exp) == SET);
3770
3771	  if (!MEM_P (SET_DEST (in_exp)))
3772	    return false;
3773
3774          out_set = single_set (out_insn);
3775          if (out_set)
3776            {
3777              if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3778                return false;
3779            }
3780          else
3781            {
3782              out_pat = PATTERN (out_insn);
3783              gcc_assert (GET_CODE (out_pat) == PARALLEL);
3784
3785              for (j = 0; j < XVECLEN (out_pat, 0); j++)
3786                {
3787                  out_exp = XVECEXP (out_pat, 0, j);
3788
3789                  if (GET_CODE (out_exp) == CLOBBER)
3790                    continue;
3791
3792                  gcc_assert (GET_CODE (out_exp) == SET);
3793
3794                  if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3795                    return false;
3796                }
3797            }
3798        }
3799    }
3800
3801  return true;
3802}
3803
3804/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3805   condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
3806   or multiple set; IN_INSN should be single_set for truth, but for convenience
3807   of insn categorization may be any JUMP or CALL insn.  */
3808
3809int
3810if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3811{
3812  rtx out_set, in_set;
3813
3814  in_set = single_set (in_insn);
3815  if (! in_set)
3816    {
3817      gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3818      return false;
3819    }
3820
3821  if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3822    return false;
3823  in_set = SET_SRC (in_set);
3824
3825  out_set = single_set (out_insn);
3826  if (out_set)
3827    {
3828      if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3829	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3830	return false;
3831    }
3832  else
3833    {
3834      rtx out_pat;
3835      int i;
3836
3837      out_pat = PATTERN (out_insn);
3838      gcc_assert (GET_CODE (out_pat) == PARALLEL);
3839
3840      for (i = 0; i < XVECLEN (out_pat, 0); i++)
3841	{
3842	  rtx exp = XVECEXP (out_pat, 0, i);
3843
3844	  if (GET_CODE (exp) == CLOBBER)
3845	    continue;
3846
3847	  gcc_assert (GET_CODE (exp) == SET);
3848
3849	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3850	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3851	    return false;
3852	}
3853    }
3854
3855  return true;
3856}
3857
3858static unsigned int
3859rest_of_handle_peephole2 (void)
3860{
3861#ifdef HAVE_peephole2
3862  peephole2_optimize ();
3863#endif
3864  return 0;
3865}
3866
3867namespace {
3868
3869const pass_data pass_data_peephole2 =
3870{
3871  RTL_PASS, /* type */
3872  "peephole2", /* name */
3873  OPTGROUP_NONE, /* optinfo_flags */
3874  TV_PEEPHOLE2, /* tv_id */
3875  0, /* properties_required */
3876  0, /* properties_provided */
3877  0, /* properties_destroyed */
3878  0, /* todo_flags_start */
3879  TODO_df_finish, /* todo_flags_finish */
3880};
3881
3882class pass_peephole2 : public rtl_opt_pass
3883{
3884public:
3885  pass_peephole2 (gcc::context *ctxt)
3886    : rtl_opt_pass (pass_data_peephole2, ctxt)
3887  {}
3888
3889  /* opt_pass methods: */
3890  /* The epiphany backend creates a second instance of this pass, so we need
3891     a clone method.  */
3892  opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3893  virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3894  virtual unsigned int execute (function *)
3895    {
3896      return rest_of_handle_peephole2 ();
3897    }
3898
3899}; // class pass_peephole2
3900
3901} // anon namespace
3902
3903rtl_opt_pass *
3904make_pass_peephole2 (gcc::context *ctxt)
3905{
3906  return new pass_peephole2 (ctxt);
3907}
3908
3909namespace {
3910
3911const pass_data pass_data_split_all_insns =
3912{
3913  RTL_PASS, /* type */
3914  "split1", /* name */
3915  OPTGROUP_NONE, /* optinfo_flags */
3916  TV_NONE, /* tv_id */
3917  0, /* properties_required */
3918  0, /* properties_provided */
3919  0, /* properties_destroyed */
3920  0, /* todo_flags_start */
3921  0, /* todo_flags_finish */
3922};
3923
3924class pass_split_all_insns : public rtl_opt_pass
3925{
3926public:
3927  pass_split_all_insns (gcc::context *ctxt)
3928    : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3929  {}
3930
3931  /* opt_pass methods: */
3932  /* The epiphany backend creates a second instance of this pass, so
3933     we need a clone method.  */
3934  opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3935  virtual unsigned int execute (function *)
3936    {
3937      split_all_insns ();
3938      return 0;
3939    }
3940
3941}; // class pass_split_all_insns
3942
3943} // anon namespace
3944
3945rtl_opt_pass *
3946make_pass_split_all_insns (gcc::context *ctxt)
3947{
3948  return new pass_split_all_insns (ctxt);
3949}
3950
3951static unsigned int
3952rest_of_handle_split_after_reload (void)
3953{
3954  /* If optimizing, then go ahead and split insns now.  */
3955#ifndef STACK_REGS
3956  if (optimize > 0)
3957#endif
3958    split_all_insns ();
3959  return 0;
3960}
3961
3962namespace {
3963
3964const pass_data pass_data_split_after_reload =
3965{
3966  RTL_PASS, /* type */
3967  "split2", /* name */
3968  OPTGROUP_NONE, /* optinfo_flags */
3969  TV_NONE, /* tv_id */
3970  0, /* properties_required */
3971  0, /* properties_provided */
3972  0, /* properties_destroyed */
3973  0, /* todo_flags_start */
3974  0, /* todo_flags_finish */
3975};
3976
3977class pass_split_after_reload : public rtl_opt_pass
3978{
3979public:
3980  pass_split_after_reload (gcc::context *ctxt)
3981    : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3982  {}
3983
3984  /* opt_pass methods: */
3985  virtual unsigned int execute (function *)
3986    {
3987      return rest_of_handle_split_after_reload ();
3988    }
3989
3990}; // class pass_split_after_reload
3991
3992} // anon namespace
3993
3994rtl_opt_pass *
3995make_pass_split_after_reload (gcc::context *ctxt)
3996{
3997  return new pass_split_after_reload (ctxt);
3998}
3999
4000namespace {
4001
4002const pass_data pass_data_split_before_regstack =
4003{
4004  RTL_PASS, /* type */
4005  "split3", /* name */
4006  OPTGROUP_NONE, /* optinfo_flags */
4007  TV_NONE, /* tv_id */
4008  0, /* properties_required */
4009  0, /* properties_provided */
4010  0, /* properties_destroyed */
4011  0, /* todo_flags_start */
4012  0, /* todo_flags_finish */
4013};
4014
4015class pass_split_before_regstack : public rtl_opt_pass
4016{
4017public:
4018  pass_split_before_regstack (gcc::context *ctxt)
4019    : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4020  {}
4021
4022  /* opt_pass methods: */
4023  virtual bool gate (function *);
4024  virtual unsigned int execute (function *)
4025    {
4026      split_all_insns ();
4027      return 0;
4028    }
4029
4030}; // class pass_split_before_regstack
4031
4032bool
4033pass_split_before_regstack::gate (function *)
4034{
4035#if HAVE_ATTR_length && defined (STACK_REGS)
4036  /* If flow2 creates new instructions which need splitting
4037     and scheduling after reload is not done, they might not be
4038     split until final which doesn't allow splitting
4039     if HAVE_ATTR_length.  */
4040# ifdef INSN_SCHEDULING
4041  return (optimize && !flag_schedule_insns_after_reload);
4042# else
4043  return (optimize);
4044# endif
4045#else
4046  return 0;
4047#endif
4048}
4049
4050} // anon namespace
4051
4052rtl_opt_pass *
4053make_pass_split_before_regstack (gcc::context *ctxt)
4054{
4055  return new pass_split_before_regstack (ctxt);
4056}
4057
4058static unsigned int
4059rest_of_handle_split_before_sched2 (void)
4060{
4061#ifdef INSN_SCHEDULING
4062  split_all_insns ();
4063#endif
4064  return 0;
4065}
4066
4067namespace {
4068
4069const pass_data pass_data_split_before_sched2 =
4070{
4071  RTL_PASS, /* type */
4072  "split4", /* name */
4073  OPTGROUP_NONE, /* optinfo_flags */
4074  TV_NONE, /* tv_id */
4075  0, /* properties_required */
4076  0, /* properties_provided */
4077  0, /* properties_destroyed */
4078  0, /* todo_flags_start */
4079  0, /* todo_flags_finish */
4080};
4081
4082class pass_split_before_sched2 : public rtl_opt_pass
4083{
4084public:
4085  pass_split_before_sched2 (gcc::context *ctxt)
4086    : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4087  {}
4088
4089  /* opt_pass methods: */
4090  virtual bool gate (function *)
4091    {
4092#ifdef INSN_SCHEDULING
4093      return optimize > 0 && flag_schedule_insns_after_reload;
4094#else
4095      return false;
4096#endif
4097    }
4098
4099  virtual unsigned int execute (function *)
4100    {
4101      return rest_of_handle_split_before_sched2 ();
4102    }
4103
4104}; // class pass_split_before_sched2
4105
4106} // anon namespace
4107
4108rtl_opt_pass *
4109make_pass_split_before_sched2 (gcc::context *ctxt)
4110{
4111  return new pass_split_before_sched2 (ctxt);
4112}
4113
4114namespace {
4115
4116const pass_data pass_data_split_for_shorten_branches =
4117{
4118  RTL_PASS, /* type */
4119  "split5", /* name */
4120  OPTGROUP_NONE, /* optinfo_flags */
4121  TV_NONE, /* tv_id */
4122  0, /* properties_required */
4123  0, /* properties_provided */
4124  0, /* properties_destroyed */
4125  0, /* todo_flags_start */
4126  0, /* todo_flags_finish */
4127};
4128
4129class pass_split_for_shorten_branches : public rtl_opt_pass
4130{
4131public:
4132  pass_split_for_shorten_branches (gcc::context *ctxt)
4133    : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4134  {}
4135
4136  /* opt_pass methods: */
4137  virtual bool gate (function *)
4138    {
4139      /* The placement of the splitting that we do for shorten_branches
4140	 depends on whether regstack is used by the target or not.  */
4141#if HAVE_ATTR_length && !defined (STACK_REGS)
4142      return true;
4143#else
4144      return false;
4145#endif
4146    }
4147
4148  virtual unsigned int execute (function *)
4149    {
4150      return split_all_insns_noflow ();
4151    }
4152
4153}; // class pass_split_for_shorten_branches
4154
4155} // anon namespace
4156
4157rtl_opt_pass *
4158make_pass_split_for_shorten_branches (gcc::context *ctxt)
4159{
4160  return new pass_split_for_shorten_branches (ctxt);
4161}
4162
4163/* (Re)initialize the target information after a change in target.  */
4164
4165void
4166recog_init ()
4167{
4168  /* The information is zero-initialized, so we don't need to do anything
4169     first time round.  */
4170  if (!this_target_recog->x_initialized)
4171    {
4172      this_target_recog->x_initialized = true;
4173      return;
4174    }
4175  memset (this_target_recog->x_bool_attr_masks, 0,
4176	  sizeof (this_target_recog->x_bool_attr_masks));
4177  for (int i = 0; i < LAST_INSN_CODE; ++i)
4178    if (this_target_recog->x_op_alt[i])
4179      {
4180	free (this_target_recog->x_op_alt[i]);
4181	this_target_recog->x_op_alt[i] = 0;
4182      }
4183}
4184