1/* Optimize jump instructions, for GNU compiler.
2   Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20/* This is the pathetic reminder of old fame of the jump-optimization pass
21   of the compiler.  Now it contains basically a set of utility functions to
22   operate with jumps.
23
24   Each CODE_LABEL has a count of the times it is used
25   stored in the LABEL_NUSES internal field, and each JUMP_INSN
26   has one label that it refers to stored in the
27   JUMP_LABEL internal field.  With this we can detect labels that
28   become unused because of the deletion of all the jumps that
29   formerly used them.  The JUMP_LABEL info is sometimes looked
30   at by later passes.  For return insns, it contains either a
31   RETURN or a SIMPLE_RETURN rtx.
32
33   The subroutines redirect_jump and invert_jump are used
34   from other passes as well.  */
35
36#include "config.h"
37#include "system.h"
38#include "coretypes.h"
39#include "tm.h"
40#include "rtl.h"
41#include "tm_p.h"
42#include "flags.h"
43#include "hard-reg-set.h"
44#include "regs.h"
45#include "insn-config.h"
46#include "insn-attr.h"
47#include "recog.h"
48#include "hashtab.h"
49#include "hash-set.h"
50#include "vec.h"
51#include "machmode.h"
52#include "input.h"
53#include "function.h"
54#include "predict.h"
55#include "dominance.h"
56#include "cfg.h"
57#include "cfgrtl.h"
58#include "basic-block.h"
59#include "symtab.h"
60#include "statistics.h"
61#include "double-int.h"
62#include "real.h"
63#include "fixed-value.h"
64#include "alias.h"
65#include "wide-int.h"
66#include "inchash.h"
67#include "tree.h"
68#include "expmed.h"
69#include "dojump.h"
70#include "explow.h"
71#include "calls.h"
72#include "emit-rtl.h"
73#include "varasm.h"
74#include "stmt.h"
75#include "expr.h"
76#include "except.h"
77#include "diagnostic-core.h"
78#include "reload.h"
79#include "tree-pass.h"
80#include "target.h"
81#include "rtl-iter.h"
82
83/* Optimize jump y; x: ... y: jumpif... x?
84   Don't know if it is worth bothering with.  */
85/* Optimize two cases of conditional jump to conditional jump?
86   This can never delete any instruction or make anything dead,
87   or even change what is live at any point.
88   So perhaps let combiner do it.  */
89
90static void init_label_info (rtx_insn *);
91static void mark_all_labels (rtx_insn *);
92static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
93static void mark_jump_label_asm (rtx, rtx_insn *);
94static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
95static int invert_exp_1 (rtx, rtx);
96
97/* Worker for rebuild_jump_labels and rebuild_jump_labels_chain.  */
98static void
99rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
100{
101  rtx_insn_list *insn;
102
103  timevar_push (TV_REBUILD_JUMP);
104  init_label_info (f);
105  mark_all_labels (f);
106
107  /* Keep track of labels used from static data; we don't track them
108     closely enough to delete them here, so make sure their reference
109     count doesn't drop to zero.  */
110
111  if (count_forced)
112    for (insn = forced_labels; insn; insn = insn->next ())
113      if (LABEL_P (insn->insn ()))
114	LABEL_NUSES (insn->insn ())++;
115  timevar_pop (TV_REBUILD_JUMP);
116}
117
118/* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
119   notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
120   instructions and jumping insns that have labels as operands
121   (e.g. cbranchsi4).  */
122void
123rebuild_jump_labels (rtx_insn *f)
124{
125  rebuild_jump_labels_1 (f, true);
126}
127
128/* This function is like rebuild_jump_labels, but doesn't run over
129   forced_labels.  It can be used on insn chains that aren't the
130   main function chain.  */
131void
132rebuild_jump_labels_chain (rtx_insn *chain)
133{
134  rebuild_jump_labels_1 (chain, false);
135}
136
137/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
138   non-fallthru insn.  This is not generally true, as multiple barriers
139   may have crept in, or the BARRIER may be separated from the last
140   real insn by one or more NOTEs.
141
142   This simple pass moves barriers and removes duplicates so that the
143   old code is happy.
144 */
145static unsigned int
146cleanup_barriers (void)
147{
148  rtx_insn *insn;
149  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
150    {
151      if (BARRIER_P (insn))
152	{
153	  rtx_insn *prev = prev_nonnote_insn (insn);
154	  if (!prev)
155	    continue;
156
157	  if (CALL_P (prev))
158	    {
159	      /* Make sure we do not split a call and its corresponding
160		 CALL_ARG_LOCATION note.  */
161	      rtx_insn *next = NEXT_INSN (prev);
162
163	      if (NOTE_P (next)
164		  && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
165		prev = next;
166	    }
167
168	  if (BARRIER_P (prev))
169	    delete_insn (insn);
170	  else if (prev != PREV_INSN (insn))
171	    {
172	      basic_block bb = BLOCK_FOR_INSN (prev);
173	      rtx_insn *end = PREV_INSN (insn);
174	      reorder_insns_nobb (insn, insn, prev);
175	      if (bb)
176		{
177		  /* If the backend called in machine reorg compute_bb_for_insn
178		     and didn't free_bb_for_insn again, preserve basic block
179		     boundaries.  Move the end of basic block to PREV since
180		     it is followed by a barrier now, and clear BLOCK_FOR_INSN
181		     on the following notes.
182		     ???  Maybe the proper solution for the targets that have
183		     cfg around after machine reorg is not to run cleanup_barriers
184		     pass at all.  */
185		  BB_END (bb) = prev;
186		  do
187		    {
188		      prev = NEXT_INSN (prev);
189		      if (prev != insn && BLOCK_FOR_INSN (prev) == bb)
190			BLOCK_FOR_INSN (prev) = NULL;
191		    }
192		  while (prev != end);
193		}
194	    }
195	}
196    }
197  return 0;
198}
199
200namespace {
201
202const pass_data pass_data_cleanup_barriers =
203{
204  RTL_PASS, /* type */
205  "barriers", /* name */
206  OPTGROUP_NONE, /* optinfo_flags */
207  TV_NONE, /* tv_id */
208  0, /* properties_required */
209  0, /* properties_provided */
210  0, /* properties_destroyed */
211  0, /* todo_flags_start */
212  0, /* todo_flags_finish */
213};
214
215class pass_cleanup_barriers : public rtl_opt_pass
216{
217public:
218  pass_cleanup_barriers (gcc::context *ctxt)
219    : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
220  {}
221
222  /* opt_pass methods: */
223  virtual unsigned int execute (function *) { return cleanup_barriers (); }
224
225}; // class pass_cleanup_barriers
226
227} // anon namespace
228
229rtl_opt_pass *
230make_pass_cleanup_barriers (gcc::context *ctxt)
231{
232  return new pass_cleanup_barriers (ctxt);
233}
234
235
236/* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
237   for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
238   notes whose labels don't occur in the insn any more.  */
239
240static void
241init_label_info (rtx_insn *f)
242{
243  rtx_insn *insn;
244
245  for (insn = f; insn; insn = NEXT_INSN (insn))
246    {
247      if (LABEL_P (insn))
248	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
249
250      /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
251	 sticky and not reset here; that way we won't lose association
252	 with a label when e.g. the source for a target register
253	 disappears out of reach for targets that may use jump-target
254	 registers.  Jump transformations are supposed to transform
255	 any REG_LABEL_TARGET notes.  The target label reference in a
256	 branch may disappear from the branch (and from the
257	 instruction before it) for other reasons, like register
258	 allocation.  */
259
260      if (INSN_P (insn))
261	{
262	  rtx note, next;
263
264	  for (note = REG_NOTES (insn); note; note = next)
265	    {
266	      next = XEXP (note, 1);
267	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
268		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
269		remove_note (insn, note);
270	    }
271	}
272    }
273}
274
275/* A subroutine of mark_all_labels.  Trivially propagate a simple label
276   load into a jump_insn that uses it.  */
277
278static void
279maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
280{
281  rtx label_note, pc, pc_src;
282
283  pc = pc_set (jump_insn);
284  pc_src = pc != NULL ? SET_SRC (pc) : NULL;
285  label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
286
287  /* If the previous non-jump insn sets something to a label,
288     something that this jump insn uses, make that label the primary
289     target of this insn if we don't yet have any.  That previous
290     insn must be a single_set and not refer to more than one label.
291     The jump insn must not refer to other labels as jump targets
292     and must be a plain (set (pc) ...), maybe in a parallel, and
293     may refer to the item being set only directly or as one of the
294     arms in an IF_THEN_ELSE.  */
295
296  if (label_note != NULL && pc_src != NULL)
297    {
298      rtx label_set = single_set (prev_nonjump_insn);
299      rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;
300
301      if (label_set != NULL
302	  /* The source must be the direct LABEL_REF, not a
303	     PLUS, UNSPEC, IF_THEN_ELSE etc.  */
304	  && GET_CODE (SET_SRC (label_set)) == LABEL_REF
305	  && (rtx_equal_p (label_dest, pc_src)
306	      || (GET_CODE (pc_src) == IF_THEN_ELSE
307		  && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
308		      || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
309	{
310	  /* The CODE_LABEL referred to in the note must be the
311	     CODE_LABEL in the LABEL_REF of the "set".  We can
312	     conveniently use it for the marker function, which
313	     requires a LABEL_REF wrapping.  */
314	  gcc_assert (XEXP (label_note, 0) == LABEL_REF_LABEL (SET_SRC (label_set)));
315
316	  mark_jump_label_1 (label_set, jump_insn, false, true);
317
318	  gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
319	}
320    }
321}
322
323/* Mark the label each jump jumps to.
324   Combine consecutive labels, and count uses of labels.  */
325
326static void
327mark_all_labels (rtx_insn *f)
328{
329  rtx_insn *insn;
330
331  if (current_ir_type () == IR_RTL_CFGLAYOUT)
332    {
333      basic_block bb;
334      FOR_EACH_BB_FN (bb, cfun)
335	{
336	  /* In cfglayout mode, we don't bother with trivial next-insn
337	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
338	     handled by other optimizers using better algorithms.  */
339	  FOR_BB_INSNS (bb, insn)
340	    {
341	      gcc_assert (! insn->deleted ());
342	      if (NONDEBUG_INSN_P (insn))
343	        mark_jump_label (PATTERN (insn), insn, 0);
344	    }
345
346	  /* In cfglayout mode, there may be non-insns between the
347	     basic blocks.  If those non-insns represent tablejump data,
348	     they contain label references that we must record.  */
349	  for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
350	    if (JUMP_TABLE_DATA_P (insn))
351	      mark_jump_label (PATTERN (insn), insn, 0);
352	  for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
353	    if (JUMP_TABLE_DATA_P (insn))
354	      mark_jump_label (PATTERN (insn), insn, 0);
355	}
356    }
357  else
358    {
359      rtx_insn *prev_nonjump_insn = NULL;
360      for (insn = f; insn; insn = NEXT_INSN (insn))
361	{
362	  if (insn->deleted ())
363	    ;
364	  else if (LABEL_P (insn))
365	    prev_nonjump_insn = NULL;
366	  else if (JUMP_TABLE_DATA_P (insn))
367	    mark_jump_label (PATTERN (insn), insn, 0);
368	  else if (NONDEBUG_INSN_P (insn))
369	    {
370	      mark_jump_label (PATTERN (insn), insn, 0);
371	      if (JUMP_P (insn))
372		{
373		  if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
374		    maybe_propagate_label_ref (insn, prev_nonjump_insn);
375		}
376	      else
377		prev_nonjump_insn = insn;
378	    }
379	}
380    }
381}
382
383/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
384   of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
385   UNKNOWN may be returned in case we are having CC_MODE compare and we don't
386   know whether it's source is floating point or integer comparison.  Machine
387   description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
388   to help this function avoid overhead in these cases.  */
389enum rtx_code
390reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
391				const_rtx arg1, const_rtx insn)
392{
393  machine_mode mode;
394
395  /* If this is not actually a comparison, we can't reverse it.  */
396  if (GET_RTX_CLASS (code) != RTX_COMPARE
397      && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
398    return UNKNOWN;
399
400  mode = GET_MODE (arg0);
401  if (mode == VOIDmode)
402    mode = GET_MODE (arg1);
403
404  /* First see if machine description supplies us way to reverse the
405     comparison.  Give it priority over everything else to allow
406     machine description to do tricks.  */
407  if (GET_MODE_CLASS (mode) == MODE_CC
408      && REVERSIBLE_CC_MODE (mode))
409    {
410#ifdef REVERSE_CONDITION
411      return REVERSE_CONDITION (code, mode);
412#else
413      return reverse_condition (code);
414#endif
415    }
416
417  /* Try a few special cases based on the comparison code.  */
418  switch (code)
419    {
420    case GEU:
421    case GTU:
422    case LEU:
423    case LTU:
424    case NE:
425    case EQ:
426      /* It is always safe to reverse EQ and NE, even for the floating
427	 point.  Similarly the unsigned comparisons are never used for
428	 floating point so we can reverse them in the default way.  */
429      return reverse_condition (code);
430    case ORDERED:
431    case UNORDERED:
432    case LTGT:
433    case UNEQ:
434      /* In case we already see unordered comparison, we can be sure to
435	 be dealing with floating point so we don't need any more tests.  */
436      return reverse_condition_maybe_unordered (code);
437    case UNLT:
438    case UNLE:
439    case UNGT:
440    case UNGE:
441      /* We don't have safe way to reverse these yet.  */
442      return UNKNOWN;
443    default:
444      break;
445    }
446
447  if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
448    {
449      const_rtx prev;
450      /* Try to search for the comparison to determine the real mode.
451         This code is expensive, but with sane machine description it
452         will be never used, since REVERSIBLE_CC_MODE will return true
453         in all cases.  */
454      if (! insn)
455	return UNKNOWN;
456
457      /* These CONST_CAST's are okay because prev_nonnote_insn just
458	 returns its argument and we assign it to a const_rtx
459	 variable.  */
460      for (prev = prev_nonnote_insn (CONST_CAST_RTX (insn));
461	   prev != 0 && !LABEL_P (prev);
462	   prev = prev_nonnote_insn (CONST_CAST_RTX (prev)))
463	{
464	  const_rtx set = set_of (arg0, prev);
465	  if (set && GET_CODE (set) == SET
466	      && rtx_equal_p (SET_DEST (set), arg0))
467	    {
468	      rtx src = SET_SRC (set);
469
470	      if (GET_CODE (src) == COMPARE)
471		{
472		  rtx comparison = src;
473		  arg0 = XEXP (src, 0);
474		  mode = GET_MODE (arg0);
475		  if (mode == VOIDmode)
476		    mode = GET_MODE (XEXP (comparison, 1));
477		  break;
478		}
479	      /* We can get past reg-reg moves.  This may be useful for model
480	         of i387 comparisons that first move flag registers around.  */
481	      if (REG_P (src))
482		{
483		  arg0 = src;
484		  continue;
485		}
486	    }
487	  /* If register is clobbered in some ununderstandable way,
488	     give up.  */
489	  if (set)
490	    return UNKNOWN;
491	}
492    }
493
494  /* Test for an integer condition, or a floating-point comparison
495     in which NaNs can be ignored.  */
496  if (CONST_INT_P (arg0)
497      || (GET_MODE (arg0) != VOIDmode
498	  && GET_MODE_CLASS (mode) != MODE_CC
499	  && !HONOR_NANS (mode)))
500    return reverse_condition (code);
501
502  return UNKNOWN;
503}
504
505/* A wrapper around the previous function to take COMPARISON as rtx
506   expression.  This simplifies many callers.  */
507enum rtx_code
508reversed_comparison_code (const_rtx comparison, const_rtx insn)
509{
510  if (!COMPARISON_P (comparison))
511    return UNKNOWN;
512  return reversed_comparison_code_parts (GET_CODE (comparison),
513					 XEXP (comparison, 0),
514					 XEXP (comparison, 1), insn);
515}
516
517/* Return comparison with reversed code of EXP.
518   Return NULL_RTX in case we fail to do the reversal.  */
519rtx
520reversed_comparison (const_rtx exp, machine_mode mode)
521{
522  enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
523  if (reversed_code == UNKNOWN)
524    return NULL_RTX;
525  else
526    return simplify_gen_relational (reversed_code, mode, VOIDmode,
527                                    XEXP (exp, 0), XEXP (exp, 1));
528}
529
530
531/* Given an rtx-code for a comparison, return the code for the negated
532   comparison.  If no such code exists, return UNKNOWN.
533
534   WATCH OUT!  reverse_condition is not safe to use on a jump that might
535   be acting on the results of an IEEE floating point comparison, because
536   of the special treatment of non-signaling nans in comparisons.
537   Use reversed_comparison_code instead.  */
538
539enum rtx_code
540reverse_condition (enum rtx_code code)
541{
542  switch (code)
543    {
544    case EQ:
545      return NE;
546    case NE:
547      return EQ;
548    case GT:
549      return LE;
550    case GE:
551      return LT;
552    case LT:
553      return GE;
554    case LE:
555      return GT;
556    case GTU:
557      return LEU;
558    case GEU:
559      return LTU;
560    case LTU:
561      return GEU;
562    case LEU:
563      return GTU;
564    case UNORDERED:
565      return ORDERED;
566    case ORDERED:
567      return UNORDERED;
568
569    case UNLT:
570    case UNLE:
571    case UNGT:
572    case UNGE:
573    case UNEQ:
574    case LTGT:
575      return UNKNOWN;
576
577    default:
578      gcc_unreachable ();
579    }
580}
581
582/* Similar, but we're allowed to generate unordered comparisons, which
583   makes it safe for IEEE floating-point.  Of course, we have to recognize
584   that the target will support them too...  */
585
586enum rtx_code
587reverse_condition_maybe_unordered (enum rtx_code code)
588{
589  switch (code)
590    {
591    case EQ:
592      return NE;
593    case NE:
594      return EQ;
595    case GT:
596      return UNLE;
597    case GE:
598      return UNLT;
599    case LT:
600      return UNGE;
601    case LE:
602      return UNGT;
603    case LTGT:
604      return UNEQ;
605    case UNORDERED:
606      return ORDERED;
607    case ORDERED:
608      return UNORDERED;
609    case UNLT:
610      return GE;
611    case UNLE:
612      return GT;
613    case UNGT:
614      return LE;
615    case UNGE:
616      return LT;
617    case UNEQ:
618      return LTGT;
619
620    default:
621      gcc_unreachable ();
622    }
623}
624
625/* Similar, but return the code when two operands of a comparison are swapped.
626   This IS safe for IEEE floating-point.  */
627
628enum rtx_code
629swap_condition (enum rtx_code code)
630{
631  switch (code)
632    {
633    case EQ:
634    case NE:
635    case UNORDERED:
636    case ORDERED:
637    case UNEQ:
638    case LTGT:
639      return code;
640
641    case GT:
642      return LT;
643    case GE:
644      return LE;
645    case LT:
646      return GT;
647    case LE:
648      return GE;
649    case GTU:
650      return LTU;
651    case GEU:
652      return LEU;
653    case LTU:
654      return GTU;
655    case LEU:
656      return GEU;
657    case UNLT:
658      return UNGT;
659    case UNLE:
660      return UNGE;
661    case UNGT:
662      return UNLT;
663    case UNGE:
664      return UNLE;
665
666    default:
667      gcc_unreachable ();
668    }
669}
670
671/* Given a comparison CODE, return the corresponding unsigned comparison.
672   If CODE is an equality comparison or already an unsigned comparison,
673   CODE is returned.  */
674
675enum rtx_code
676unsigned_condition (enum rtx_code code)
677{
678  switch (code)
679    {
680    case EQ:
681    case NE:
682    case GTU:
683    case GEU:
684    case LTU:
685    case LEU:
686      return code;
687
688    case GT:
689      return GTU;
690    case GE:
691      return GEU;
692    case LT:
693      return LTU;
694    case LE:
695      return LEU;
696
697    default:
698      gcc_unreachable ();
699    }
700}
701
702/* Similarly, return the signed version of a comparison.  */
703
704enum rtx_code
705signed_condition (enum rtx_code code)
706{
707  switch (code)
708    {
709    case EQ:
710    case NE:
711    case GT:
712    case GE:
713    case LT:
714    case LE:
715      return code;
716
717    case GTU:
718      return GT;
719    case GEU:
720      return GE;
721    case LTU:
722      return LT;
723    case LEU:
724      return LE;
725
726    default:
727      gcc_unreachable ();
728    }
729}
730
731/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
732   truth of CODE1 implies the truth of CODE2.  */
733
734int
735comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
736{
737  /* UNKNOWN comparison codes can happen as a result of trying to revert
738     comparison codes.
739     They can't match anything, so we have to reject them here.  */
740  if (code1 == UNKNOWN || code2 == UNKNOWN)
741    return 0;
742
743  if (code1 == code2)
744    return 1;
745
746  switch (code1)
747    {
748    case UNEQ:
749      if (code2 == UNLE || code2 == UNGE)
750	return 1;
751      break;
752
753    case EQ:
754      if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
755	  || code2 == ORDERED)
756	return 1;
757      break;
758
759    case UNLT:
760      if (code2 == UNLE || code2 == NE)
761	return 1;
762      break;
763
764    case LT:
765      if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
766	return 1;
767      break;
768
769    case UNGT:
770      if (code2 == UNGE || code2 == NE)
771	return 1;
772      break;
773
774    case GT:
775      if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
776	return 1;
777      break;
778
779    case GE:
780    case LE:
781      if (code2 == ORDERED)
782	return 1;
783      break;
784
785    case LTGT:
786      if (code2 == NE || code2 == ORDERED)
787	return 1;
788      break;
789
790    case LTU:
791      if (code2 == LEU || code2 == NE)
792	return 1;
793      break;
794
795    case GTU:
796      if (code2 == GEU || code2 == NE)
797	return 1;
798      break;
799
800    case UNORDERED:
801      if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
802	  || code2 == UNGE || code2 == UNGT)
803	return 1;
804      break;
805
806    default:
807      break;
808    }
809
810  return 0;
811}
812
813/* Return 1 if INSN is an unconditional jump and nothing else.  */
814
815int
816simplejump_p (const rtx_insn *insn)
817{
818  return (JUMP_P (insn)
819	  && GET_CODE (PATTERN (insn)) == SET
820	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
821	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
822}
823
824/* Return nonzero if INSN is a (possibly) conditional jump
825   and nothing more.
826
827   Use of this function is deprecated, since we need to support combined
828   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
829
830int
831condjump_p (const rtx_insn *insn)
832{
833  const_rtx x = PATTERN (insn);
834
835  if (GET_CODE (x) != SET
836      || GET_CODE (SET_DEST (x)) != PC)
837    return 0;
838
839  x = SET_SRC (x);
840  if (GET_CODE (x) == LABEL_REF)
841    return 1;
842  else
843    return (GET_CODE (x) == IF_THEN_ELSE
844	    && ((GET_CODE (XEXP (x, 2)) == PC
845		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
846		     || ANY_RETURN_P (XEXP (x, 1))))
847		|| (GET_CODE (XEXP (x, 1)) == PC
848		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
849			|| ANY_RETURN_P (XEXP (x, 2))))));
850}
851
852/* Return nonzero if INSN is a (possibly) conditional jump inside a
853   PARALLEL.
854
855   Use this function is deprecated, since we need to support combined
856   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
857
858int
859condjump_in_parallel_p (const rtx_insn *insn)
860{
861  const_rtx x = PATTERN (insn);
862
863  if (GET_CODE (x) != PARALLEL)
864    return 0;
865  else
866    x = XVECEXP (x, 0, 0);
867
868  if (GET_CODE (x) != SET)
869    return 0;
870  if (GET_CODE (SET_DEST (x)) != PC)
871    return 0;
872  if (GET_CODE (SET_SRC (x)) == LABEL_REF)
873    return 1;
874  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
875    return 0;
876  if (XEXP (SET_SRC (x), 2) == pc_rtx
877      && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
878	  || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
879    return 1;
880  if (XEXP (SET_SRC (x), 1) == pc_rtx
881      && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
882	  || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
883    return 1;
884  return 0;
885}
886
887/* Return set of PC, otherwise NULL.  */
888
889rtx
890pc_set (const rtx_insn *insn)
891{
892  rtx pat;
893  if (!JUMP_P (insn))
894    return NULL_RTX;
895  pat = PATTERN (insn);
896
897  /* The set is allowed to appear either as the insn pattern or
898     the first set in a PARALLEL.  */
899  if (GET_CODE (pat) == PARALLEL)
900    pat = XVECEXP (pat, 0, 0);
901  if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
902    return pat;
903
904  return NULL_RTX;
905}
906
907/* Return true when insn is an unconditional direct jump,
908   possibly bundled inside a PARALLEL.  */
909
910int
911any_uncondjump_p (const rtx_insn *insn)
912{
913  const_rtx x = pc_set (insn);
914  if (!x)
915    return 0;
916  if (GET_CODE (SET_SRC (x)) != LABEL_REF)
917    return 0;
918  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
919    return 0;
920  return 1;
921}
922
923/* Return true when insn is a conditional jump.  This function works for
924   instructions containing PC sets in PARALLELs.  The instruction may have
925   various other effects so before removing the jump you must verify
926   onlyjump_p.
927
928   Note that unlike condjump_p it returns false for unconditional jumps.  */
929
930int
931any_condjump_p (const rtx_insn *insn)
932{
933  const_rtx x = pc_set (insn);
934  enum rtx_code a, b;
935
936  if (!x)
937    return 0;
938  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
939    return 0;
940
941  a = GET_CODE (XEXP (SET_SRC (x), 1));
942  b = GET_CODE (XEXP (SET_SRC (x), 2));
943
944  return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
945	  || (a == PC
946	      && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
947}
948
949/* Return the label of a conditional jump.  */
950
951rtx
952condjump_label (const rtx_insn *insn)
953{
954  rtx x = pc_set (insn);
955
956  if (!x)
957    return NULL_RTX;
958  x = SET_SRC (x);
959  if (GET_CODE (x) == LABEL_REF)
960    return x;
961  if (GET_CODE (x) != IF_THEN_ELSE)
962    return NULL_RTX;
963  if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
964    return XEXP (x, 1);
965  if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
966    return XEXP (x, 2);
967  return NULL_RTX;
968}
969
970/* Return TRUE if INSN is a return jump.  */
971
972int
973returnjump_p (const rtx_insn *insn)
974{
975  if (JUMP_P (insn))
976    {
977      subrtx_iterator::array_type array;
978      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
979	{
980	  const_rtx x = *iter;
981	  switch (GET_CODE (x))
982	    {
983	    case RETURN:
984	    case SIMPLE_RETURN:
985	    case EH_RETURN:
986	      return true;
987
988	    case SET:
989	      if (SET_IS_RETURN_P (x))
990		return true;
991	      break;
992
993	    default:
994	      break;
995	    }
996	}
997    }
998  return false;
999}
1000
1001/* Return true if INSN is a (possibly conditional) return insn.  */
1002
1003int
1004eh_returnjump_p (rtx_insn *insn)
1005{
1006  if (JUMP_P (insn))
1007    {
1008      subrtx_iterator::array_type array;
1009      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
1010	if (GET_CODE (*iter) == EH_RETURN)
1011	  return true;
1012    }
1013  return false;
1014}
1015
1016/* Return true if INSN is a jump that only transfers control and
1017   nothing more.  */
1018
1019int
1020onlyjump_p (const rtx_insn *insn)
1021{
1022  rtx set;
1023
1024  if (!JUMP_P (insn))
1025    return 0;
1026
1027  set = single_set (insn);
1028  if (set == NULL)
1029    return 0;
1030  if (GET_CODE (SET_DEST (set)) != PC)
1031    return 0;
1032  if (side_effects_p (SET_SRC (set)))
1033    return 0;
1034
1035  return 1;
1036}
1037
1038/* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
1039   NULL or a return.  */
1040bool
1041jump_to_label_p (const rtx_insn *insn)
1042{
1043  return (JUMP_P (insn)
1044	  && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
1045}
1046
1047#ifdef HAVE_cc0
1048
1049/* Return nonzero if X is an RTX that only sets the condition codes
1050   and has no side effects.  */
1051
1052int
1053only_sets_cc0_p (const_rtx x)
1054{
1055  if (! x)
1056    return 0;
1057
1058  if (INSN_P (x))
1059    x = PATTERN (x);
1060
1061  return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1062}
1063
1064/* Return 1 if X is an RTX that does nothing but set the condition codes
1065   and CLOBBER or USE registers.
1066   Return -1 if X does explicitly set the condition codes,
1067   but also does other things.  */
1068
1069int
1070sets_cc0_p (const_rtx x)
1071{
1072  if (! x)
1073    return 0;
1074
1075  if (INSN_P (x))
1076    x = PATTERN (x);
1077
1078  if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1079    return 1;
1080  if (GET_CODE (x) == PARALLEL)
1081    {
1082      int i;
1083      int sets_cc0 = 0;
1084      int other_things = 0;
1085      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1086	{
1087	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
1088	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1089	    sets_cc0 = 1;
1090	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1091	    other_things = 1;
1092	}
1093      return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1094    }
1095  return 0;
1096}
1097#endif
1098
1099/* Find all CODE_LABELs referred to in X, and increment their use
1100   counts.  If INSN is a JUMP_INSN and there is at least one
1101   CODE_LABEL referenced in INSN as a jump target, then store the last
1102   one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
1103   for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
1104   notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
1105   a JUMP_INSN, and there is at least one CODE_LABEL referenced in
1106   INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1107   For returnjumps, the JUMP_LABEL will also be set as appropriate.
1108
1109   Note that two labels separated by a loop-beginning note
1110   must be kept distinct if we have not yet done loop-optimization,
1111   because the gap between them is where loop-optimize
1112   will want to move invariant code to.  CROSS_JUMP tells us
1113   that loop-optimization is done with.  */
1114
1115void
1116mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
1117{
1118  rtx asmop = extract_asm_operands (x);
1119  if (asmop)
1120    mark_jump_label_asm (asmop, insn);
1121  else
1122    mark_jump_label_1 (x, insn, in_mem != 0,
1123		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1124}
1125
1126/* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1127   within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
1128   jump-target; when the JUMP_LABEL field of INSN should be set or a
1129   REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
1130   note.  */
1131
1132static void
1133mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target)
1134{
1135  RTX_CODE code = GET_CODE (x);
1136  int i;
1137  const char *fmt;
1138
1139  switch (code)
1140    {
1141    case PC:
1142    case CC0:
1143    case REG:
1144    case CLOBBER:
1145    case CALL:
1146      return;
1147
1148    case RETURN:
1149    case SIMPLE_RETURN:
1150      if (is_target)
1151	{
1152	  gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
1153	  JUMP_LABEL (insn) = x;
1154	}
1155      return;
1156
1157    case MEM:
1158      in_mem = true;
1159      break;
1160
1161    case SEQUENCE:
1162      {
1163	rtx_sequence *seq = as_a <rtx_sequence *> (x);
1164	for (i = 0; i < seq->len (); i++)
1165	  mark_jump_label (PATTERN (seq->insn (i)),
1166			   seq->insn (i), 0);
1167      }
1168      return;
1169
1170    case SYMBOL_REF:
1171      if (!in_mem)
1172	return;
1173
1174      /* If this is a constant-pool reference, see if it is a label.  */
1175      if (CONSTANT_POOL_ADDRESS_P (x))
1176	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1177      break;
1178
1179      /* Handle operands in the condition of an if-then-else as for a
1180	 non-jump insn.  */
1181    case IF_THEN_ELSE:
1182      if (!is_target)
1183	break;
1184      mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
1185      mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
1186      mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
1187      return;
1188
1189    case LABEL_REF:
1190      {
1191	rtx label = LABEL_REF_LABEL (x);
1192
1193	/* Ignore remaining references to unreachable labels that
1194	   have been deleted.  */
1195	if (NOTE_P (label)
1196	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1197	  break;
1198
1199	gcc_assert (LABEL_P (label));
1200
1201	/* Ignore references to labels of containing functions.  */
1202	if (LABEL_REF_NONLOCAL_P (x))
1203	  break;
1204
1205	LABEL_REF_LABEL (x) = label;
1206	if (! insn || ! insn->deleted ())
1207	  ++LABEL_NUSES (label);
1208
1209	if (insn)
1210	  {
1211	    if (is_target
1212		/* Do not change a previous setting of JUMP_LABEL.  If the
1213		   JUMP_LABEL slot is occupied by a different label,
1214		   create a note for this label.  */
1215		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
1216	      JUMP_LABEL (insn) = label;
1217	    else
1218	      {
1219		enum reg_note kind
1220		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
1221
1222		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
1223		   for LABEL unless there already is one.  All uses of
1224		   a label, except for the primary target of a jump,
1225		   must have such a note.  */
1226		if (! find_reg_note (insn, kind, label))
1227		  add_reg_note (insn, kind, label);
1228	      }
1229	  }
1230	return;
1231      }
1232
1233    /* Do walk the labels in a vector, but not the first operand of an
1234       ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1235    case ADDR_VEC:
1236    case ADDR_DIFF_VEC:
1237      if (! insn->deleted ())
1238	{
1239	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1240
1241	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1242	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL, in_mem,
1243			       is_target);
1244	}
1245      return;
1246
1247    default:
1248      break;
1249    }
1250
1251  fmt = GET_RTX_FORMAT (code);
1252
1253  /* The primary target of a tablejump is the label of the ADDR_VEC,
1254     which is canonically mentioned *last* in the insn.  To get it
1255     marked as JUMP_LABEL, we iterate over items in reverse order.  */
1256  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1257    {
1258      if (fmt[i] == 'e')
1259	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
1260      else if (fmt[i] == 'E')
1261	{
1262	  int j;
1263
1264	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1265	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
1266			       is_target);
1267	}
1268    }
1269}
1270
1271/* Worker function for mark_jump_label.  Handle asm insns specially.
1272   In particular, output operands need not be considered so we can
1273   avoid re-scanning the replicated asm_operand.  Also, the asm_labels
1274   need to be considered targets.  */
1275
1276static void
1277mark_jump_label_asm (rtx asmop, rtx_insn *insn)
1278{
1279  int i;
1280
1281  for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
1282    mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
1283
1284  for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
1285    mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
1286}
1287
1288/* Delete insn INSN from the chain of insns and update label ref counts
1289   and delete insns now unreachable.
1290
1291   Returns the first insn after INSN that was not deleted.
1292
1293   Usage of this instruction is deprecated.  Use delete_insn instead and
1294   subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1295
1296rtx_insn *
1297delete_related_insns (rtx uncast_insn)
1298{
1299  rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1300  int was_code_label = (LABEL_P (insn));
1301  rtx note;
1302  rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
1303
1304  while (next && next->deleted ())
1305    next = NEXT_INSN (next);
1306
1307  /* This insn is already deleted => return first following nondeleted.  */
1308  if (insn->deleted ())
1309    return next;
1310
1311  delete_insn (insn);
1312
1313  /* If instruction is followed by a barrier,
1314     delete the barrier too.  */
1315
1316  if (next != 0 && BARRIER_P (next))
1317    delete_insn (next);
1318
1319  /* If this is a call, then we have to remove the var tracking note
1320     for the call arguments.  */
1321
1322  if (CALL_P (insn)
1323      || (NONJUMP_INSN_P (insn)
1324	  && GET_CODE (PATTERN (insn)) == SEQUENCE
1325	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
1326    {
1327      rtx_insn *p;
1328
1329      for (p = next && next->deleted () ? NEXT_INSN (next) : next;
1330	   p && NOTE_P (p);
1331	   p = NEXT_INSN (p))
1332	if (NOTE_KIND (p) == NOTE_INSN_CALL_ARG_LOCATION)
1333	  {
1334	    remove_insn (p);
1335	    break;
1336	  }
1337    }
1338
1339  /* If deleting a jump, decrement the count of the label,
1340     and delete the label if it is now unused.  */
1341
1342  if (jump_to_label_p (insn))
1343    {
1344      rtx lab = JUMP_LABEL (insn);
1345      rtx_jump_table_data *lab_next;
1346
1347      if (LABEL_NUSES (lab) == 0)
1348	/* This can delete NEXT or PREV,
1349	   either directly if NEXT is JUMP_LABEL (INSN),
1350	   or indirectly through more levels of jumps.  */
1351	delete_related_insns (lab);
1352      else if (tablejump_p (insn, NULL, &lab_next))
1353	{
1354	  /* If we're deleting the tablejump, delete the dispatch table.
1355	     We may not be able to kill the label immediately preceding
1356	     just yet, as it might be referenced in code leading up to
1357	     the tablejump.  */
1358	  delete_related_insns (lab_next);
1359	}
1360    }
1361
1362  /* Likewise if we're deleting a dispatch table.  */
1363
1364  if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1365    {
1366      rtvec labels = table->get_labels ();
1367      int i;
1368      int len = GET_NUM_ELEM (labels);
1369
1370      for (i = 0; i < len; i++)
1371	if (LABEL_NUSES (XEXP (RTVEC_ELT (labels, i), 0)) == 0)
1372	  delete_related_insns (XEXP (RTVEC_ELT (labels, i), 0));
1373      while (next && next->deleted ())
1374	next = NEXT_INSN (next);
1375      return next;
1376    }
1377
1378  /* Likewise for any JUMP_P / INSN / CALL_INSN with a
1379     REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
1380  if (INSN_P (insn))
1381    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1382      if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
1383	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1384	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1385	  && LABEL_P (XEXP (note, 0)))
1386	if (LABEL_NUSES (XEXP (note, 0)) == 0)
1387	  delete_related_insns (XEXP (note, 0));
1388
1389  while (prev && (prev->deleted () || NOTE_P (prev)))
1390    prev = PREV_INSN (prev);
1391
1392  /* If INSN was a label and a dispatch table follows it,
1393     delete the dispatch table.  The tablejump must have gone already.
1394     It isn't useful to fall through into a table.  */
1395
1396  if (was_code_label
1397      && NEXT_INSN (insn) != 0
1398      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1399    next = delete_related_insns (NEXT_INSN (insn));
1400
1401  /* If INSN was a label, delete insns following it if now unreachable.  */
1402
1403  if (was_code_label && prev && BARRIER_P (prev))
1404    {
1405      enum rtx_code code;
1406      while (next)
1407	{
1408	  code = GET_CODE (next);
1409	  if (code == NOTE)
1410	    next = NEXT_INSN (next);
1411	  /* Keep going past other deleted labels to delete what follows.  */
1412	  else if (code == CODE_LABEL && next->deleted ())
1413	    next = NEXT_INSN (next);
1414	  /* Keep the (use (insn))s created by dbr_schedule, which needs
1415	     them in order to track liveness relative to a previous
1416	     barrier.  */
1417	  else if (INSN_P (next)
1418		   && GET_CODE (PATTERN (next)) == USE
1419		   && INSN_P (XEXP (PATTERN (next), 0)))
1420	    next = NEXT_INSN (next);
1421	  else if (code == BARRIER || INSN_P (next))
1422	    /* Note: if this deletes a jump, it can cause more
1423	       deletion of unreachable code, after a different label.
1424	       As long as the value from this recursive call is correct,
1425	       this invocation functions correctly.  */
1426	    next = delete_related_insns (next);
1427	  else
1428	    break;
1429	}
1430    }
1431
1432  /* I feel a little doubtful about this loop,
1433     but I see no clean and sure alternative way
1434     to find the first insn after INSN that is not now deleted.
1435     I hope this works.  */
1436  while (next && next->deleted ())
1437    next = NEXT_INSN (next);
1438  return next;
1439}
1440
1441/* Delete a range of insns from FROM to TO, inclusive.
1442   This is for the sake of peephole optimization, so assume
1443   that whatever these insns do will still be done by a new
1444   peephole insn that will replace them.  */
1445
1446void
1447delete_for_peephole (rtx_insn *from, rtx_insn *to)
1448{
1449  rtx_insn *insn = from;
1450
1451  while (1)
1452    {
1453      rtx_insn *next = NEXT_INSN (insn);
1454      rtx_insn *prev = PREV_INSN (insn);
1455
1456      if (!NOTE_P (insn))
1457	{
1458	  insn->set_deleted();
1459
1460	  /* Patch this insn out of the chain.  */
1461	  /* We don't do this all at once, because we
1462	     must preserve all NOTEs.  */
1463	  if (prev)
1464	    SET_NEXT_INSN (prev) = next;
1465
1466	  if (next)
1467	    SET_PREV_INSN (next) = prev;
1468	}
1469
1470      if (insn == to)
1471	break;
1472      insn = next;
1473    }
1474
1475  /* Note that if TO is an unconditional jump
1476     we *do not* delete the BARRIER that follows,
1477     since the peephole that replaces this sequence
1478     is also an unconditional jump in that case.  */
1479}
1480
1481/* A helper function for redirect_exp_1; examines its input X and returns
1482   either a LABEL_REF around a label, or a RETURN if X was NULL.  */
1483static rtx
1484redirect_target (rtx x)
1485{
1486  if (x == NULL_RTX)
1487    return ret_rtx;
1488  if (!ANY_RETURN_P (x))
1489    return gen_rtx_LABEL_REF (Pmode, x);
1490  return x;
1491}
1492
1493/* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1494   NLABEL as a return.  Accrue modifications into the change group.  */
1495
1496static void
1497redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1498{
1499  rtx x = *loc;
1500  RTX_CODE code = GET_CODE (x);
1501  int i;
1502  const char *fmt;
1503
1504  if ((code == LABEL_REF && LABEL_REF_LABEL (x) == olabel)
1505      || x == olabel)
1506    {
1507      x = redirect_target (nlabel);
1508      if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
1509 	x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1510      validate_change (insn, loc, x, 1);
1511      return;
1512    }
1513
1514  if (code == SET && SET_DEST (x) == pc_rtx
1515      && ANY_RETURN_P (nlabel)
1516      && GET_CODE (SET_SRC (x)) == LABEL_REF
1517      && LABEL_REF_LABEL (SET_SRC (x)) == olabel)
1518    {
1519      validate_change (insn, loc, nlabel, 1);
1520      return;
1521    }
1522
1523  if (code == IF_THEN_ELSE)
1524    {
1525      /* Skip the condition of an IF_THEN_ELSE.  We only want to
1526         change jump destinations, not eventual label comparisons.  */
1527      redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
1528      redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
1529      return;
1530    }
1531
1532  fmt = GET_RTX_FORMAT (code);
1533  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1534    {
1535      if (fmt[i] == 'e')
1536	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1537      else if (fmt[i] == 'E')
1538	{
1539	  int j;
1540	  for (j = 0; j < XVECLEN (x, i); j++)
1541	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1542	}
1543    }
1544}
1545
1546/* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1547   the modifications into the change group.  Return false if we did
1548   not see how to do that.  */
1549
1550int
1551redirect_jump_1 (rtx jump, rtx nlabel)
1552{
1553  int ochanges = num_validated_changes ();
1554  rtx *loc, asmop;
1555
1556  gcc_assert (nlabel != NULL_RTX);
1557  asmop = extract_asm_operands (PATTERN (jump));
1558  if (asmop)
1559    {
1560      if (nlabel == NULL)
1561	return 0;
1562      gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
1563      loc = &ASM_OPERANDS_LABEL (asmop, 0);
1564    }
1565  else if (GET_CODE (PATTERN (jump)) == PARALLEL)
1566    loc = &XVECEXP (PATTERN (jump), 0, 0);
1567  else
1568    loc = &PATTERN (jump);
1569
1570  redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1571  return num_validated_changes () > ochanges;
1572}
1573
1574/* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1575   jump target label is unused as a result, it and the code following
1576   it may be deleted.
1577
1578   Normally, NLABEL will be a label, but it may also be a RETURN rtx;
1579   in that case we are to turn the jump into a (possibly conditional)
1580   return insn.
1581
1582   The return value will be 1 if the change was made, 0 if it wasn't
1583   (this can only occur when trying to produce return insns).  */
1584
1585int
1586redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1587{
1588  rtx olabel = JUMP_LABEL (jump);
1589
1590  if (!nlabel)
1591    {
1592      /* If there is no label, we are asked to redirect to the EXIT block.
1593	 When before the epilogue is emitted, return/simple_return cannot be
1594	 created so we return 0 immediately.  After the epilogue is emitted,
1595	 we always expect a label, either a non-null label, or a
1596	 return/simple_return RTX.  */
1597
1598      if (!epilogue_completed)
1599	return 0;
1600      gcc_unreachable ();
1601    }
1602
1603  if (nlabel == olabel)
1604    return 1;
1605
1606  if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1607    return 0;
1608
1609  redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1610  return 1;
1611}
1612
1613/* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1614   NLABEL in JUMP.
1615   If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1616   count has dropped to zero.  */
1617void
1618redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1619		 int invert)
1620{
1621  rtx note;
1622
1623  gcc_assert (JUMP_LABEL (jump) == olabel);
1624
1625  /* Negative DELETE_UNUSED used to be used to signalize behavior on
1626     moving FUNCTION_END note.  Just sanity check that no user still worry
1627     about this.  */
1628  gcc_assert (delete_unused >= 0);
1629  JUMP_LABEL (jump) = nlabel;
1630  if (!ANY_RETURN_P (nlabel))
1631    ++LABEL_NUSES (nlabel);
1632
1633  /* Update labels in any REG_EQUAL note.  */
1634  if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1635    {
1636      if (ANY_RETURN_P (nlabel)
1637	  || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1638	remove_note (jump, note);
1639      else
1640	{
1641	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1642	  confirm_change_group ();
1643	}
1644    }
1645
1646  /* Handle the case where we had a conditional crossing jump to a return
1647     label and are now changing it into a direct conditional return.
1648     The jump is no longer crossing in that case.  */
1649  if (ANY_RETURN_P (nlabel))
1650    CROSSING_JUMP_P (jump) = 0;
1651
1652  if (!ANY_RETURN_P (olabel)
1653      && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1654      /* Undefined labels will remain outside the insn stream.  */
1655      && INSN_UID (olabel))
1656    delete_related_insns (olabel);
1657  if (invert)
1658    invert_br_probabilities (jump);
1659}
1660
1661/* Invert the jump condition X contained in jump insn INSN.  Accrue the
1662   modifications into the change group.  Return nonzero for success.  */
1663static int
1664invert_exp_1 (rtx x, rtx insn)
1665{
1666  RTX_CODE code = GET_CODE (x);
1667
1668  if (code == IF_THEN_ELSE)
1669    {
1670      rtx comp = XEXP (x, 0);
1671      rtx tem;
1672      enum rtx_code reversed_code;
1673
1674      /* We can do this in two ways:  The preferable way, which can only
1675	 be done if this is not an integer comparison, is to reverse
1676	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1677	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
1678
1679      reversed_code = reversed_comparison_code (comp, insn);
1680
1681      if (reversed_code != UNKNOWN)
1682	{
1683	  validate_change (insn, &XEXP (x, 0),
1684			   gen_rtx_fmt_ee (reversed_code,
1685					   GET_MODE (comp), XEXP (comp, 0),
1686					   XEXP (comp, 1)),
1687			   1);
1688	  return 1;
1689	}
1690
1691      tem = XEXP (x, 1);
1692      validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1693      validate_change (insn, &XEXP (x, 2), tem, 1);
1694      return 1;
1695    }
1696  else
1697    return 0;
1698}
1699
1700/* Invert the condition of the jump JUMP, and make it jump to label
1701   NLABEL instead of where it jumps now.  Accrue changes into the
1702   change group.  Return false if we didn't see how to perform the
1703   inversion and redirection.  */
1704
1705int
1706invert_jump_1 (rtx_insn *jump, rtx nlabel)
1707{
1708  rtx x = pc_set (jump);
1709  int ochanges;
1710  int ok;
1711
1712  ochanges = num_validated_changes ();
1713  if (x == NULL)
1714    return 0;
1715  ok = invert_exp_1 (SET_SRC (x), jump);
1716  gcc_assert (ok);
1717
1718  if (num_validated_changes () == ochanges)
1719    return 0;
1720
1721  /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1722     in Pmode, so checking this is not merely an optimization.  */
1723  return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1724}
1725
1726/* Invert the condition of the jump JUMP, and make it jump to label
1727   NLABEL instead of where it jumps now.  Return true if successful.  */
1728
1729int
1730invert_jump (rtx_insn *jump, rtx nlabel, int delete_unused)
1731{
1732  rtx olabel = JUMP_LABEL (jump);
1733
1734  if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1735    {
1736      redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1737      return 1;
1738    }
1739  cancel_changes (0);
1740  return 0;
1741}
1742
1743
1744/* Like rtx_equal_p except that it considers two REGs as equal
1745   if they renumber to the same value and considers two commutative
1746   operations to be the same if the order of the operands has been
1747   reversed.  */
1748
1749int
1750rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1751{
1752  int i;
1753  const enum rtx_code code = GET_CODE (x);
1754  const char *fmt;
1755
1756  if (x == y)
1757    return 1;
1758
1759  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1760      && (REG_P (y) || (GET_CODE (y) == SUBREG
1761				  && REG_P (SUBREG_REG (y)))))
1762    {
1763      int reg_x = -1, reg_y = -1;
1764      int byte_x = 0, byte_y = 0;
1765      struct subreg_info info;
1766
1767      if (GET_MODE (x) != GET_MODE (y))
1768	return 0;
1769
1770      /* If we haven't done any renumbering, don't
1771	 make any assumptions.  */
1772      if (reg_renumber == 0)
1773	return rtx_equal_p (x, y);
1774
1775      if (code == SUBREG)
1776	{
1777	  reg_x = REGNO (SUBREG_REG (x));
1778	  byte_x = SUBREG_BYTE (x);
1779
1780	  if (reg_renumber[reg_x] >= 0)
1781	    {
1782	      subreg_get_info (reg_renumber[reg_x],
1783			       GET_MODE (SUBREG_REG (x)), byte_x,
1784			       GET_MODE (x), &info);
1785	      if (!info.representable_p)
1786		return 0;
1787	      reg_x = info.offset;
1788	      byte_x = 0;
1789	    }
1790	}
1791      else
1792	{
1793	  reg_x = REGNO (x);
1794	  if (reg_renumber[reg_x] >= 0)
1795	    reg_x = reg_renumber[reg_x];
1796	}
1797
1798      if (GET_CODE (y) == SUBREG)
1799	{
1800	  reg_y = REGNO (SUBREG_REG (y));
1801	  byte_y = SUBREG_BYTE (y);
1802
1803	  if (reg_renumber[reg_y] >= 0)
1804	    {
1805	      subreg_get_info (reg_renumber[reg_y],
1806			       GET_MODE (SUBREG_REG (y)), byte_y,
1807			       GET_MODE (y), &info);
1808	      if (!info.representable_p)
1809		return 0;
1810	      reg_y = info.offset;
1811	      byte_y = 0;
1812	    }
1813	}
1814      else
1815	{
1816	  reg_y = REGNO (y);
1817	  if (reg_renumber[reg_y] >= 0)
1818	    reg_y = reg_renumber[reg_y];
1819	}
1820
1821      return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1822    }
1823
1824  /* Now we have disposed of all the cases
1825     in which different rtx codes can match.  */
1826  if (code != GET_CODE (y))
1827    return 0;
1828
1829  switch (code)
1830    {
1831    case PC:
1832    case CC0:
1833    case ADDR_VEC:
1834    case ADDR_DIFF_VEC:
1835    CASE_CONST_UNIQUE:
1836      return 0;
1837
1838    case LABEL_REF:
1839      /* We can't assume nonlocal labels have their following insns yet.  */
1840      if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1841	return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
1842
1843      /* Two label-refs are equivalent if they point at labels
1844	 in the same position in the instruction stream.  */
1845      return (next_real_insn (LABEL_REF_LABEL (x))
1846	      == next_real_insn (LABEL_REF_LABEL (y)));
1847
1848    case SYMBOL_REF:
1849      return XSTR (x, 0) == XSTR (y, 0);
1850
1851    case CODE_LABEL:
1852      /* If we didn't match EQ equality above, they aren't the same.  */
1853      return 0;
1854
1855    default:
1856      break;
1857    }
1858
1859  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1860
1861  if (GET_MODE (x) != GET_MODE (y))
1862    return 0;
1863
1864  /* MEMs referring to different address space are not equivalent.  */
1865  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
1866    return 0;
1867
1868  /* For commutative operations, the RTX match if the operand match in any
1869     order.  Also handle the simple binary and unary cases without a loop.  */
1870  if (targetm.commutative_p (x, UNKNOWN))
1871    return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1872	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1873	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1874		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1875  else if (NON_COMMUTATIVE_P (x))
1876    return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1877	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1878  else if (UNARY_P (x))
1879    return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1880
1881  /* Compare the elements.  If any pair of corresponding elements
1882     fail to match, return 0 for the whole things.  */
1883
1884  fmt = GET_RTX_FORMAT (code);
1885  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1886    {
1887      int j;
1888      switch (fmt[i])
1889	{
1890	case 'w':
1891	  if (XWINT (x, i) != XWINT (y, i))
1892	    return 0;
1893	  break;
1894
1895	case 'i':
1896	  if (XINT (x, i) != XINT (y, i))
1897	    {
1898	      if (((code == ASM_OPERANDS && i == 6)
1899		   || (code == ASM_INPUT && i == 1)))
1900		break;
1901	      return 0;
1902	    }
1903	  break;
1904
1905	case 't':
1906	  if (XTREE (x, i) != XTREE (y, i))
1907	    return 0;
1908	  break;
1909
1910	case 's':
1911	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1912	    return 0;
1913	  break;
1914
1915	case 'e':
1916	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1917	    return 0;
1918	  break;
1919
1920	case 'u':
1921	  if (XEXP (x, i) != XEXP (y, i))
1922	    return 0;
1923	  /* Fall through.  */
1924	case '0':
1925	  break;
1926
1927	case 'E':
1928	  if (XVECLEN (x, i) != XVECLEN (y, i))
1929	    return 0;
1930	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1931	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1932	      return 0;
1933	  break;
1934
1935	default:
1936	  gcc_unreachable ();
1937	}
1938    }
1939  return 1;
1940}
1941
1942/* If X is a hard register or equivalent to one or a subregister of one,
1943   return the hard register number.  If X is a pseudo register that was not
1944   assigned a hard register, return the pseudo register number.  Otherwise,
1945   return -1.  Any rtx is valid for X.  */
1946
1947int
1948true_regnum (const_rtx x)
1949{
1950  if (REG_P (x))
1951    {
1952      if (REGNO (x) >= FIRST_PSEUDO_REGISTER
1953	  && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
1954	return reg_renumber[REGNO (x)];
1955      return REGNO (x);
1956    }
1957  if (GET_CODE (x) == SUBREG)
1958    {
1959      int base = true_regnum (SUBREG_REG (x));
1960      if (base >= 0
1961	  && base < FIRST_PSEUDO_REGISTER)
1962	{
1963	  struct subreg_info info;
1964
1965	  subreg_get_info (lra_in_progress
1966			   ? (unsigned) base : REGNO (SUBREG_REG (x)),
1967			   GET_MODE (SUBREG_REG (x)),
1968			   SUBREG_BYTE (x), GET_MODE (x), &info);
1969
1970	  if (info.representable_p)
1971	    return base + info.offset;
1972	}
1973    }
1974  return -1;
1975}
1976
1977/* Return regno of the register REG and handle subregs too.  */
1978unsigned int
1979reg_or_subregno (const_rtx reg)
1980{
1981  if (GET_CODE (reg) == SUBREG)
1982    reg = SUBREG_REG (reg);
1983  gcc_assert (REG_P (reg));
1984  return REGNO (reg);
1985}
1986