1/* If-conversion support.
2   Copyright (C) 2000-2015 Free Software Foundation, Inc.
3
4   This file is part of GCC.
5
6   GCC is free software; you can redistribute it and/or modify it
7   under the terms of the GNU General Public License as published by
8   the Free Software Foundation; either version 3, or (at your option)
9   any later version.
10
11   GCC is distributed in the hope that it will be useful, but WITHOUT
12   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
14   License for more details.
15
16   You should have received a copy of the GNU General Public License
17   along with GCC; see the file COPYING3.  If not see
18   <http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24
25#include "rtl.h"
26#include "regs.h"
27#include "hashtab.h"
28#include "hash-set.h"
29#include "vec.h"
30#include "machmode.h"
31#include "hard-reg-set.h"
32#include "input.h"
33#include "function.h"
34#include "flags.h"
35#include "insn-config.h"
36#include "recog.h"
37#include "except.h"
38#include "predict.h"
39#include "dominance.h"
40#include "cfg.h"
41#include "cfgrtl.h"
42#include "cfganal.h"
43#include "cfgcleanup.h"
44#include "basic-block.h"
45#include "symtab.h"
46#include "statistics.h"
47#include "double-int.h"
48#include "real.h"
49#include "fixed-value.h"
50#include "alias.h"
51#include "wide-int.h"
52#include "inchash.h"
53#include "tree.h"
54#include "expmed.h"
55#include "dojump.h"
56#include "explow.h"
57#include "calls.h"
58#include "emit-rtl.h"
59#include "varasm.h"
60#include "stmt.h"
61#include "expr.h"
62#include "output.h"
63#include "insn-codes.h"
64#include "optabs.h"
65#include "diagnostic-core.h"
66#include "tm_p.h"
67#include "cfgloop.h"
68#include "target.h"
69#include "tree-pass.h"
70#include "df.h"
71#include "dbgcnt.h"
72#include "shrink-wrap.h"
73#include "ifcvt.h"
74
75#ifndef HAVE_conditional_move
76#define HAVE_conditional_move 0
77#endif
78#ifndef HAVE_incscc
79#define HAVE_incscc 0
80#endif
81#ifndef HAVE_decscc
82#define HAVE_decscc 0
83#endif
84#ifndef HAVE_trap
85#define HAVE_trap 0
86#endif
87
88#ifndef MAX_CONDITIONAL_EXECUTE
89#define MAX_CONDITIONAL_EXECUTE \
90  (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
91   + 1)
92#endif
93
94#ifndef HAVE_cbranchcc4
95#define HAVE_cbranchcc4 0
96#endif
97
98#define IFCVT_MULTIPLE_DUMPS 1
99
100#define NULL_BLOCK	((basic_block) NULL)
101
102/* True if after combine pass.  */
103static bool ifcvt_after_combine;
104
105/* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
106static int num_possible_if_blocks;
107
108/* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
109   execution.  */
110static int num_updated_if_blocks;
111
112/* # of changes made.  */
113static int num_true_changes;
114
115/* Whether conditional execution changes were made.  */
116static int cond_exec_changed_p;
117
118/* Forward references.  */
119static int count_bb_insns (const_basic_block);
120static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
121static rtx_insn *first_active_insn (basic_block);
122static rtx_insn *last_active_insn (basic_block, int);
123static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
124static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
125static basic_block block_fallthru (basic_block);
126static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
127				    int);
128static rtx cond_exec_get_condition (rtx_insn *);
129static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
130static int noce_operand_ok (const_rtx);
131static void merge_if_block (ce_if_block *);
132static int find_cond_trap (basic_block, edge, edge);
133static basic_block find_if_header (basic_block, int);
134static int block_jumps_and_fallthru_p (basic_block, basic_block);
135static int noce_find_if_block (basic_block, edge, edge, int);
136static int cond_exec_find_if_block (ce_if_block *);
137static int find_if_case_1 (basic_block, edge, edge);
138static int find_if_case_2 (basic_block, edge, edge);
139static int dead_or_predicable (basic_block, basic_block, basic_block,
140			       edge, int);
141static void noce_emit_move_insn (rtx, rtx);
142static rtx_insn *block_has_only_trap (basic_block);
143
144/* Count the number of non-jump active insns in BB.  */
145
146static int
147count_bb_insns (const_basic_block bb)
148{
149  int count = 0;
150  rtx_insn *insn = BB_HEAD (bb);
151
152  while (1)
153    {
154      if (active_insn_p (insn) && !JUMP_P (insn))
155	count++;
156
157      if (insn == BB_END (bb))
158	break;
159      insn = NEXT_INSN (insn);
160    }
161
162  return count;
163}
164
165/* Determine whether the total insn_rtx_cost on non-jump insns in
166   basic block BB is less than MAX_COST.  This function returns
167   false if the cost of any instruction could not be estimated.
168
169   The cost of the non-jump insns in BB is scaled by REG_BR_PROB_BASE
170   as those insns are being speculated.  MAX_COST is scaled with SCALE
171   plus a small fudge factor.  */
172
173static bool
174cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost)
175{
176  int count = 0;
177  rtx_insn *insn = BB_HEAD (bb);
178  bool speed = optimize_bb_for_speed_p (bb);
179
180  /* Set scale to REG_BR_PROB_BASE to void the identical scaling
181     applied to insn_rtx_cost when optimizing for size.  Only do
182     this after combine because if-conversion might interfere with
183     passes before combine.
184
185     Use optimize_function_for_speed_p instead of the pre-defined
186     variable speed to make sure it is set to same value for all
187     basic blocks in one if-conversion transformation.  */
188  if (!optimize_function_for_speed_p (cfun) && ifcvt_after_combine)
189    scale = REG_BR_PROB_BASE;
190  /* Our branch probability/scaling factors are just estimates and don't
191     account for cases where we can get speculation for free and other
192     secondary benefits.  So we fudge the scale factor to make speculating
193     appear a little more profitable when optimizing for performance.  */
194  else
195    scale += REG_BR_PROB_BASE / 8;
196
197
198  max_cost *= scale;
199
200  while (1)
201    {
202      if (NONJUMP_INSN_P (insn))
203	{
204	  int cost = insn_rtx_cost (PATTERN (insn), speed) * REG_BR_PROB_BASE;
205	  if (cost == 0)
206	    return false;
207
208	  /* If this instruction is the load or set of a "stack" register,
209	     such as a floating point register on x87, then the cost of
210	     speculatively executing this insn may need to include
211	     the additional cost of popping its result off of the
212	     register stack.  Unfortunately, correctly recognizing and
213	     accounting for this additional overhead is tricky, so for
214	     now we simply prohibit such speculative execution.  */
215#ifdef STACK_REGS
216	  {
217	    rtx set = single_set (insn);
218	    if (set && STACK_REG_P (SET_DEST (set)))
219	      return false;
220	  }
221#endif
222
223	  count += cost;
224	  if (count >= max_cost)
225	    return false;
226	}
227      else if (CALL_P (insn))
228	return false;
229
230      if (insn == BB_END (bb))
231	break;
232      insn = NEXT_INSN (insn);
233    }
234
235  return true;
236}
237
238/* Return the first non-jump active insn in the basic block.  */
239
240static rtx_insn *
241first_active_insn (basic_block bb)
242{
243  rtx_insn *insn = BB_HEAD (bb);
244
245  if (LABEL_P (insn))
246    {
247      if (insn == BB_END (bb))
248	return NULL;
249      insn = NEXT_INSN (insn);
250    }
251
252  while (NOTE_P (insn) || DEBUG_INSN_P (insn))
253    {
254      if (insn == BB_END (bb))
255	return NULL;
256      insn = NEXT_INSN (insn);
257    }
258
259  if (JUMP_P (insn))
260    return NULL;
261
262  return insn;
263}
264
265/* Return the last non-jump active (non-jump) insn in the basic block.  */
266
267static rtx_insn *
268last_active_insn (basic_block bb, int skip_use_p)
269{
270  rtx_insn *insn = BB_END (bb);
271  rtx_insn *head = BB_HEAD (bb);
272
273  while (NOTE_P (insn)
274	 || JUMP_P (insn)
275	 || DEBUG_INSN_P (insn)
276	 || (skip_use_p
277	     && NONJUMP_INSN_P (insn)
278	     && GET_CODE (PATTERN (insn)) == USE))
279    {
280      if (insn == head)
281	return NULL;
282      insn = PREV_INSN (insn);
283    }
284
285  if (LABEL_P (insn))
286    return NULL;
287
288  return insn;
289}
290
291/* Return the active insn before INSN inside basic block CURR_BB. */
292
293static rtx_insn *
294find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
295{
296  if (!insn || insn == BB_HEAD (curr_bb))
297    return NULL;
298
299  while ((insn = PREV_INSN (insn)) != NULL_RTX)
300    {
301      if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
302        break;
303
304      /* No other active insn all the way to the start of the basic block. */
305      if (insn == BB_HEAD (curr_bb))
306        return NULL;
307    }
308
309  return insn;
310}
311
312/* Return the active insn after INSN inside basic block CURR_BB. */
313
314static rtx_insn *
315find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
316{
317  if (!insn || insn == BB_END (curr_bb))
318    return NULL;
319
320  while ((insn = NEXT_INSN (insn)) != NULL_RTX)
321    {
322      if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
323        break;
324
325      /* No other active insn all the way to the end of the basic block. */
326      if (insn == BB_END (curr_bb))
327        return NULL;
328    }
329
330  return insn;
331}
332
333/* Return the basic block reached by falling though the basic block BB.  */
334
335static basic_block
336block_fallthru (basic_block bb)
337{
338  edge e = find_fallthru_edge (bb->succs);
339
340  return (e) ? e->dest : NULL_BLOCK;
341}
342
343/* Return true if RTXs A and B can be safely interchanged.  */
344
345static bool
346rtx_interchangeable_p (const_rtx a, const_rtx b)
347{
348  if (!rtx_equal_p (a, b))
349    return false;
350
351  if (GET_CODE (a) != MEM)
352    return true;
353
354  /* A dead type-unsafe memory reference is legal, but a live type-unsafe memory
355     reference is not.  Interchanging a dead type-unsafe memory reference with
356     a live type-safe one creates a live type-unsafe memory reference, in other
357     words, it makes the program illegal.
358     We check here conservatively whether the two memory references have equal
359     memory attributes.  */
360
361  return mem_attrs_eq_p (get_mem_attrs (a), get_mem_attrs (b));
362}
363
364
365/* Go through a bunch of insns, converting them to conditional
366   execution format if possible.  Return TRUE if all of the non-note
367   insns were processed.  */
368
369static int
370cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
371			 /* if block information */rtx_insn *start,
372			 /* first insn to look at */rtx end,
373			 /* last insn to look at */rtx test,
374			 /* conditional execution test */int prob_val,
375			 /* probability of branch taken. */int mod_ok)
376{
377  int must_be_last = FALSE;
378  rtx_insn *insn;
379  rtx xtest;
380  rtx pattern;
381
382  if (!start || !end)
383    return FALSE;
384
385  for (insn = start; ; insn = NEXT_INSN (insn))
386    {
387      /* dwarf2out can't cope with conditional prologues.  */
388      if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
389	return FALSE;
390
391      if (NOTE_P (insn) || DEBUG_INSN_P (insn))
392	goto insn_done;
393
394      gcc_assert (NONJUMP_INSN_P (insn) || CALL_P (insn));
395
396      /* dwarf2out can't cope with conditional unwind info.  */
397      if (RTX_FRAME_RELATED_P (insn))
398	return FALSE;
399
400      /* Remove USE insns that get in the way.  */
401      if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
402	{
403	  /* ??? Ug.  Actually unlinking the thing is problematic,
404	     given what we'd have to coordinate with our callers.  */
405	  SET_INSN_DELETED (insn);
406	  goto insn_done;
407	}
408
409      /* Last insn wasn't last?  */
410      if (must_be_last)
411	return FALSE;
412
413      if (modified_in_p (test, insn))
414	{
415	  if (!mod_ok)
416	    return FALSE;
417	  must_be_last = TRUE;
418	}
419
420      /* Now build the conditional form of the instruction.  */
421      pattern = PATTERN (insn);
422      xtest = copy_rtx (test);
423
424      /* If this is already a COND_EXEC, rewrite the test to be an AND of the
425         two conditions.  */
426      if (GET_CODE (pattern) == COND_EXEC)
427	{
428	  if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
429	    return FALSE;
430
431	  xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
432			       COND_EXEC_TEST (pattern));
433	  pattern = COND_EXEC_CODE (pattern);
434	}
435
436      pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
437
438      /* If the machine needs to modify the insn being conditionally executed,
439         say for example to force a constant integer operand into a temp
440         register, do so here.  */
441#ifdef IFCVT_MODIFY_INSN
442      IFCVT_MODIFY_INSN (ce_info, pattern, insn);
443      if (! pattern)
444	return FALSE;
445#endif
446
447      validate_change (insn, &PATTERN (insn), pattern, 1);
448
449      if (CALL_P (insn) && prob_val >= 0)
450	validate_change (insn, &REG_NOTES (insn),
451			 gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
452					   prob_val, REG_NOTES (insn)), 1);
453
454    insn_done:
455      if (insn == end)
456	break;
457    }
458
459  return TRUE;
460}
461
462/* Return the condition for a jump.  Do not do any special processing.  */
463
464static rtx
465cond_exec_get_condition (rtx_insn *jump)
466{
467  rtx test_if, cond;
468
469  if (any_condjump_p (jump))
470    test_if = SET_SRC (pc_set (jump));
471  else
472    return NULL_RTX;
473  cond = XEXP (test_if, 0);
474
475  /* If this branches to JUMP_LABEL when the condition is false,
476     reverse the condition.  */
477  if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
478      && LABEL_REF_LABEL (XEXP (test_if, 2)) == JUMP_LABEL (jump))
479    {
480      enum rtx_code rev = reversed_comparison_code (cond, jump);
481      if (rev == UNKNOWN)
482	return NULL_RTX;
483
484      cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
485			     XEXP (cond, 1));
486    }
487
488  return cond;
489}
490
491/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
492   to conditional execution.  Return TRUE if we were successful at
493   converting the block.  */
494
495static int
496cond_exec_process_if_block (ce_if_block * ce_info,
497			    /* if block information */int do_multiple_p)
498{
499  basic_block test_bb = ce_info->test_bb;	/* last test block */
500  basic_block then_bb = ce_info->then_bb;	/* THEN */
501  basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
502  rtx test_expr;		/* expression in IF_THEN_ELSE that is tested */
503  rtx_insn *then_start;		/* first insn in THEN block */
504  rtx_insn *then_end;		/* last insn + 1 in THEN block */
505  rtx_insn *else_start = NULL;	/* first insn in ELSE block or NULL */
506  rtx_insn *else_end = NULL;	/* last insn + 1 in ELSE block */
507  int max;			/* max # of insns to convert.  */
508  int then_mod_ok;		/* whether conditional mods are ok in THEN */
509  rtx true_expr;		/* test for else block insns */
510  rtx false_expr;		/* test for then block insns */
511  int true_prob_val;		/* probability of else block */
512  int false_prob_val;		/* probability of then block */
513  rtx_insn *then_last_head = NULL;	/* Last match at the head of THEN */
514  rtx_insn *else_last_head = NULL;	/* Last match at the head of ELSE */
515  rtx_insn *then_first_tail = NULL;	/* First match at the tail of THEN */
516  rtx_insn *else_first_tail = NULL;	/* First match at the tail of ELSE */
517  int then_n_insns, else_n_insns, n_insns;
518  enum rtx_code false_code;
519  rtx note;
520
521  /* If test is comprised of && or || elements, and we've failed at handling
522     all of them together, just use the last test if it is the special case of
523     && elements without an ELSE block.  */
524  if (!do_multiple_p && ce_info->num_multiple_test_blocks)
525    {
526      if (else_bb || ! ce_info->and_and_p)
527	return FALSE;
528
529      ce_info->test_bb = test_bb = ce_info->last_test_bb;
530      ce_info->num_multiple_test_blocks = 0;
531      ce_info->num_and_and_blocks = 0;
532      ce_info->num_or_or_blocks = 0;
533    }
534
535  /* Find the conditional jump to the ELSE or JOIN part, and isolate
536     the test.  */
537  test_expr = cond_exec_get_condition (BB_END (test_bb));
538  if (! test_expr)
539    return FALSE;
540
541  /* If the conditional jump is more than just a conditional jump,
542     then we can not do conditional execution conversion on this block.  */
543  if (! onlyjump_p (BB_END (test_bb)))
544    return FALSE;
545
546  /* Collect the bounds of where we're to search, skipping any labels, jumps
547     and notes at the beginning and end of the block.  Then count the total
548     number of insns and see if it is small enough to convert.  */
549  then_start = first_active_insn (then_bb);
550  then_end = last_active_insn (then_bb, TRUE);
551  then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
552  n_insns = then_n_insns;
553  max = MAX_CONDITIONAL_EXECUTE;
554
555  if (else_bb)
556    {
557      int n_matching;
558
559      max *= 2;
560      else_start = first_active_insn (else_bb);
561      else_end = last_active_insn (else_bb, TRUE);
562      else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
563      n_insns += else_n_insns;
564
565      /* Look for matching sequences at the head and tail of the two blocks,
566	 and limit the range of insns to be converted if possible.  */
567      n_matching = flow_find_cross_jump (then_bb, else_bb,
568					 &then_first_tail, &else_first_tail,
569					 NULL);
570      if (then_first_tail == BB_HEAD (then_bb))
571	then_start = then_end = NULL;
572      if (else_first_tail == BB_HEAD (else_bb))
573	else_start = else_end = NULL;
574
575      if (n_matching > 0)
576	{
577	  if (then_end)
578	    then_end = find_active_insn_before (then_bb, then_first_tail);
579	  if (else_end)
580	    else_end = find_active_insn_before (else_bb, else_first_tail);
581	  n_insns -= 2 * n_matching;
582	}
583
584      if (then_start
585	  && else_start
586	  && then_n_insns > n_matching
587	  && else_n_insns > n_matching)
588	{
589	  int longest_match = MIN (then_n_insns - n_matching,
590				   else_n_insns - n_matching);
591	  n_matching
592	    = flow_find_head_matching_sequence (then_bb, else_bb,
593						&then_last_head,
594						&else_last_head,
595						longest_match);
596
597	  if (n_matching > 0)
598	    {
599	      rtx_insn *insn;
600
601	      /* We won't pass the insns in the head sequence to
602		 cond_exec_process_insns, so we need to test them here
603		 to make sure that they don't clobber the condition.  */
604	      for (insn = BB_HEAD (then_bb);
605		   insn != NEXT_INSN (then_last_head);
606		   insn = NEXT_INSN (insn))
607		if (!LABEL_P (insn) && !NOTE_P (insn)
608		    && !DEBUG_INSN_P (insn)
609		    && modified_in_p (test_expr, insn))
610		  return FALSE;
611	    }
612
613	  if (then_last_head == then_end)
614	    then_start = then_end = NULL;
615	  if (else_last_head == else_end)
616	    else_start = else_end = NULL;
617
618	  if (n_matching > 0)
619	    {
620	      if (then_start)
621		then_start = find_active_insn_after (then_bb, then_last_head);
622	      if (else_start)
623		else_start = find_active_insn_after (else_bb, else_last_head);
624	      n_insns -= 2 * n_matching;
625	    }
626	}
627    }
628
629  if (n_insns > max)
630    return FALSE;
631
632  /* Map test_expr/test_jump into the appropriate MD tests to use on
633     the conditionally executed code.  */
634
635  true_expr = test_expr;
636
637  false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
638  if (false_code != UNKNOWN)
639    false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
640				 XEXP (true_expr, 0), XEXP (true_expr, 1));
641  else
642    false_expr = NULL_RTX;
643
644#ifdef IFCVT_MODIFY_TESTS
645  /* If the machine description needs to modify the tests, such as setting a
646     conditional execution register from a comparison, it can do so here.  */
647  IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
648
649  /* See if the conversion failed.  */
650  if (!true_expr || !false_expr)
651    goto fail;
652#endif
653
654  note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
655  if (note)
656    {
657      true_prob_val = XINT (note, 0);
658      false_prob_val = REG_BR_PROB_BASE - true_prob_val;
659    }
660  else
661    {
662      true_prob_val = -1;
663      false_prob_val = -1;
664    }
665
666  /* If we have && or || tests, do them here.  These tests are in the adjacent
667     blocks after the first block containing the test.  */
668  if (ce_info->num_multiple_test_blocks > 0)
669    {
670      basic_block bb = test_bb;
671      basic_block last_test_bb = ce_info->last_test_bb;
672
673      if (! false_expr)
674	goto fail;
675
676      do
677	{
678	  rtx_insn *start, *end;
679	  rtx t, f;
680	  enum rtx_code f_code;
681
682	  bb = block_fallthru (bb);
683	  start = first_active_insn (bb);
684	  end = last_active_insn (bb, TRUE);
685	  if (start
686	      && ! cond_exec_process_insns (ce_info, start, end, false_expr,
687					    false_prob_val, FALSE))
688	    goto fail;
689
690	  /* If the conditional jump is more than just a conditional jump, then
691	     we can not do conditional execution conversion on this block.  */
692	  if (! onlyjump_p (BB_END (bb)))
693	    goto fail;
694
695	  /* Find the conditional jump and isolate the test.  */
696	  t = cond_exec_get_condition (BB_END (bb));
697	  if (! t)
698	    goto fail;
699
700	  f_code = reversed_comparison_code (t, BB_END (bb));
701	  if (f_code == UNKNOWN)
702	    goto fail;
703
704	  f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
705	  if (ce_info->and_and_p)
706	    {
707	      t = gen_rtx_AND (GET_MODE (t), true_expr, t);
708	      f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
709	    }
710	  else
711	    {
712	      t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
713	      f = gen_rtx_AND (GET_MODE (t), false_expr, f);
714	    }
715
716	  /* If the machine description needs to modify the tests, such as
717	     setting a conditional execution register from a comparison, it can
718	     do so here.  */
719#ifdef IFCVT_MODIFY_MULTIPLE_TESTS
720	  IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
721
722	  /* See if the conversion failed.  */
723	  if (!t || !f)
724	    goto fail;
725#endif
726
727	  true_expr = t;
728	  false_expr = f;
729	}
730      while (bb != last_test_bb);
731    }
732
733  /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
734     on then THEN block.  */
735  then_mod_ok = (else_bb == NULL_BLOCK);
736
737  /* Go through the THEN and ELSE blocks converting the insns if possible
738     to conditional execution.  */
739
740  if (then_end
741      && (! false_expr
742	  || ! cond_exec_process_insns (ce_info, then_start, then_end,
743					false_expr, false_prob_val,
744					then_mod_ok)))
745    goto fail;
746
747  if (else_bb && else_end
748      && ! cond_exec_process_insns (ce_info, else_start, else_end,
749				    true_expr, true_prob_val, TRUE))
750    goto fail;
751
752  /* If we cannot apply the changes, fail.  Do not go through the normal fail
753     processing, since apply_change_group will call cancel_changes.  */
754  if (! apply_change_group ())
755    {
756#ifdef IFCVT_MODIFY_CANCEL
757      /* Cancel any machine dependent changes.  */
758      IFCVT_MODIFY_CANCEL (ce_info);
759#endif
760      return FALSE;
761    }
762
763#ifdef IFCVT_MODIFY_FINAL
764  /* Do any machine dependent final modifications.  */
765  IFCVT_MODIFY_FINAL (ce_info);
766#endif
767
768  /* Conversion succeeded.  */
769  if (dump_file)
770    fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
771	     n_insns, (n_insns == 1) ? " was" : "s were");
772
773  /* Merge the blocks!  If we had matching sequences, make sure to delete one
774     copy at the appropriate location first: delete the copy in the THEN branch
775     for a tail sequence so that the remaining one is executed last for both
776     branches, and delete the copy in the ELSE branch for a head sequence so
777     that the remaining one is executed first for both branches.  */
778  if (then_first_tail)
779    {
780      rtx_insn *from = then_first_tail;
781      if (!INSN_P (from))
782	from = find_active_insn_after (then_bb, from);
783      delete_insn_chain (from, BB_END (then_bb), false);
784    }
785  if (else_last_head)
786    delete_insn_chain (first_active_insn (else_bb), else_last_head, false);
787
788  merge_if_block (ce_info);
789  cond_exec_changed_p = TRUE;
790  return TRUE;
791
792 fail:
793#ifdef IFCVT_MODIFY_CANCEL
794  /* Cancel any machine dependent changes.  */
795  IFCVT_MODIFY_CANCEL (ce_info);
796#endif
797
798  cancel_changes (0);
799  return FALSE;
800}
801
802/* Used by noce_process_if_block to communicate with its subroutines.
803
804   The subroutines know that A and B may be evaluated freely.  They
805   know that X is a register.  They should insert new instructions
806   before cond_earliest.  */
807
808struct noce_if_info
809{
810  /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block.  */
811  basic_block test_bb, then_bb, else_bb, join_bb;
812
813  /* The jump that ends TEST_BB.  */
814  rtx_insn *jump;
815
816  /* The jump condition.  */
817  rtx cond;
818
819  /* New insns should be inserted before this one.  */
820  rtx_insn *cond_earliest;
821
822  /* Insns in the THEN and ELSE block.  There is always just this
823     one insns in those blocks.  The insns are single_set insns.
824     If there was no ELSE block, INSN_B is the last insn before
825     COND_EARLIEST, or NULL_RTX.  In the former case, the insn
826     operands are still valid, as if INSN_B was moved down below
827     the jump.  */
828  rtx_insn *insn_a, *insn_b;
829
830  /* The SET_SRC of INSN_A and INSN_B.  */
831  rtx a, b;
832
833  /* The SET_DEST of INSN_A.  */
834  rtx x;
835
836  /* True if this if block is not canonical.  In the canonical form of
837     if blocks, the THEN_BB is the block reached via the fallthru edge
838     from TEST_BB.  For the noce transformations, we allow the symmetric
839     form as well.  */
840  bool then_else_reversed;
841
842  /* Estimated cost of the particular branch instruction.  */
843  int branch_cost;
844};
845
846static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
847static int noce_try_move (struct noce_if_info *);
848static int noce_try_store_flag (struct noce_if_info *);
849static int noce_try_addcc (struct noce_if_info *);
850static int noce_try_store_flag_constants (struct noce_if_info *);
851static int noce_try_store_flag_mask (struct noce_if_info *);
852static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
853			    rtx, rtx, rtx);
854static int noce_try_cmove (struct noce_if_info *);
855static int noce_try_cmove_arith (struct noce_if_info *);
856static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx_insn **);
857static int noce_try_minmax (struct noce_if_info *);
858static int noce_try_abs (struct noce_if_info *);
859static int noce_try_sign_mask (struct noce_if_info *);
860
861/* Helper function for noce_try_store_flag*.  */
862
863static rtx
864noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
865		      int normalize)
866{
867  rtx cond = if_info->cond;
868  int cond_complex;
869  enum rtx_code code;
870
871  cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
872		  || ! general_operand (XEXP (cond, 1), VOIDmode));
873
874  /* If earliest == jump, or when the condition is complex, try to
875     build the store_flag insn directly.  */
876
877  if (cond_complex)
878    {
879      rtx set = pc_set (if_info->jump);
880      cond = XEXP (SET_SRC (set), 0);
881      if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
882	  && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump))
883	reversep = !reversep;
884      if (if_info->then_else_reversed)
885	reversep = !reversep;
886    }
887
888  if (reversep)
889    code = reversed_comparison_code (cond, if_info->jump);
890  else
891    code = GET_CODE (cond);
892
893  if ((if_info->cond_earliest == if_info->jump || cond_complex)
894      && (normalize == 0 || STORE_FLAG_VALUE == normalize))
895    {
896      rtx src = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
897			    XEXP (cond, 1));
898      rtx set = gen_rtx_SET (VOIDmode, x, src);
899
900      start_sequence ();
901      rtx_insn *insn = emit_insn (set);
902
903      if (recog_memoized (insn) >= 0)
904	{
905	  rtx_insn *seq = get_insns ();
906	  end_sequence ();
907	  emit_insn (seq);
908
909	  if_info->cond_earliest = if_info->jump;
910
911	  return x;
912	}
913
914      end_sequence ();
915    }
916
917  /* Don't even try if the comparison operands or the mode of X are weird.  */
918  if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
919    return NULL_RTX;
920
921  return emit_store_flag (x, code, XEXP (cond, 0),
922			  XEXP (cond, 1), VOIDmode,
923			  (code == LTU || code == LEU
924			   || code == GEU || code == GTU), normalize);
925}
926
927/* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
928   X is the destination/target and Y is the value to copy.  */
929
930static void
931noce_emit_move_insn (rtx x, rtx y)
932{
933  machine_mode outmode;
934  rtx outer, inner;
935  int bitpos;
936
937  if (GET_CODE (x) != STRICT_LOW_PART)
938    {
939      rtx_insn *seq, *insn;
940      rtx target;
941      optab ot;
942
943      start_sequence ();
944      /* Check that the SET_SRC is reasonable before calling emit_move_insn,
945	 otherwise construct a suitable SET pattern ourselves.  */
946      insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
947	     ? emit_move_insn (x, y)
948	     : emit_insn (gen_rtx_SET (VOIDmode, x, y));
949      seq = get_insns ();
950      end_sequence ();
951
952      if (recog_memoized (insn) <= 0)
953	{
954	  if (GET_CODE (x) == ZERO_EXTRACT)
955	    {
956	      rtx op = XEXP (x, 0);
957	      unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
958	      unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
959
960	      /* store_bit_field expects START to be relative to
961		 BYTES_BIG_ENDIAN and adjusts this value for machines with
962		 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN.  In order to be able to
963		 invoke store_bit_field again it is necessary to have the START
964		 value from the first call.  */
965	      if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
966		{
967		  if (MEM_P (op))
968		    start = BITS_PER_UNIT - start - size;
969		  else
970		    {
971		      gcc_assert (REG_P (op));
972		      start = BITS_PER_WORD - start - size;
973		    }
974		}
975
976	      gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
977	      store_bit_field (op, size, start, 0, 0, GET_MODE (x), y);
978	      return;
979	    }
980
981	  switch (GET_RTX_CLASS (GET_CODE (y)))
982	    {
983	    case RTX_UNARY:
984	      ot = code_to_optab (GET_CODE (y));
985	      if (ot)
986		{
987		  start_sequence ();
988		  target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
989		  if (target != NULL_RTX)
990		    {
991		      if (target != x)
992			emit_move_insn (x, target);
993		      seq = get_insns ();
994		    }
995		  end_sequence ();
996		}
997	      break;
998
999	    case RTX_BIN_ARITH:
1000	    case RTX_COMM_ARITH:
1001	      ot = code_to_optab (GET_CODE (y));
1002	      if (ot)
1003		{
1004		  start_sequence ();
1005		  target = expand_binop (GET_MODE (y), ot,
1006					 XEXP (y, 0), XEXP (y, 1),
1007					 x, 0, OPTAB_DIRECT);
1008		  if (target != NULL_RTX)
1009		    {
1010		      if (target != x)
1011			  emit_move_insn (x, target);
1012		      seq = get_insns ();
1013		    }
1014		  end_sequence ();
1015		}
1016	      break;
1017
1018	    default:
1019	      break;
1020	    }
1021	}
1022
1023      emit_insn (seq);
1024      return;
1025    }
1026
1027  outer = XEXP (x, 0);
1028  inner = XEXP (outer, 0);
1029  outmode = GET_MODE (outer);
1030  bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
1031  store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
1032		   0, 0, outmode, y);
1033}
1034
1035/* Return the CC reg if it is used in COND.  */
1036
1037static rtx
1038cc_in_cond (rtx cond)
1039{
1040  if (HAVE_cbranchcc4 && cond
1041      && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_CC)
1042    return XEXP (cond, 0);
1043
1044  return NULL_RTX;
1045}
1046
1047/* Return sequence of instructions generated by if conversion.  This
1048   function calls end_sequence() to end the current stream, ensures
1049   that are instructions are unshared, recognizable non-jump insns.
1050   On failure, this function returns a NULL_RTX.  */
1051
1052static rtx_insn *
1053end_ifcvt_sequence (struct noce_if_info *if_info)
1054{
1055  rtx_insn *insn;
1056  rtx_insn *seq = get_insns ();
1057  rtx cc = cc_in_cond (if_info->cond);
1058
1059  set_used_flags (if_info->x);
1060  set_used_flags (if_info->cond);
1061  set_used_flags (if_info->a);
1062  set_used_flags (if_info->b);
1063  unshare_all_rtl_in_chain (seq);
1064  end_sequence ();
1065
1066  /* Make sure that all of the instructions emitted are recognizable,
1067     and that we haven't introduced a new jump instruction.
1068     As an exercise for the reader, build a general mechanism that
1069     allows proper placement of required clobbers.  */
1070  for (insn = seq; insn; insn = NEXT_INSN (insn))
1071    if (JUMP_P (insn)
1072	|| recog_memoized (insn) == -1
1073	   /* Make sure new generated code does not clobber CC.  */
1074	|| (cc && set_of (cc, insn)))
1075      return NULL;
1076
1077  return seq;
1078}
1079
1080/* Convert "if (a != b) x = a; else x = b" into "x = a" and
1081   "if (a == b) x = a; else x = b" into "x = b".  */
1082
1083static int
1084noce_try_move (struct noce_if_info *if_info)
1085{
1086  rtx cond = if_info->cond;
1087  enum rtx_code code = GET_CODE (cond);
1088  rtx y;
1089  rtx_insn *seq;
1090
1091  if (code != NE && code != EQ)
1092    return FALSE;
1093
1094  /* This optimization isn't valid if either A or B could be a NaN
1095     or a signed zero.  */
1096  if (HONOR_NANS (if_info->x)
1097      || HONOR_SIGNED_ZEROS (if_info->x))
1098    return FALSE;
1099
1100  /* Check whether the operands of the comparison are A and in
1101     either order.  */
1102  if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
1103       && rtx_equal_p (if_info->b, XEXP (cond, 1)))
1104      || (rtx_equal_p (if_info->a, XEXP (cond, 1))
1105	  && rtx_equal_p (if_info->b, XEXP (cond, 0))))
1106    {
1107      if (!rtx_interchangeable_p (if_info->a, if_info->b))
1108	return FALSE;
1109
1110      y = (code == EQ) ? if_info->a : if_info->b;
1111
1112      /* Avoid generating the move if the source is the destination.  */
1113      if (! rtx_equal_p (if_info->x, y))
1114	{
1115	  start_sequence ();
1116	  noce_emit_move_insn (if_info->x, y);
1117	  seq = end_ifcvt_sequence (if_info);
1118	  if (!seq)
1119	    return FALSE;
1120
1121	  emit_insn_before_setloc (seq, if_info->jump,
1122				   INSN_LOCATION (if_info->insn_a));
1123	}
1124      return TRUE;
1125    }
1126  return FALSE;
1127}
1128
1129/* Convert "if (test) x = 1; else x = 0".
1130
1131   Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
1132   tried in noce_try_store_flag_constants after noce_try_cmove has had
1133   a go at the conversion.  */
1134
1135static int
1136noce_try_store_flag (struct noce_if_info *if_info)
1137{
1138  int reversep;
1139  rtx target;
1140  rtx_insn *seq;
1141
1142  if (CONST_INT_P (if_info->b)
1143      && INTVAL (if_info->b) == STORE_FLAG_VALUE
1144      && if_info->a == const0_rtx)
1145    reversep = 0;
1146  else if (if_info->b == const0_rtx
1147	   && CONST_INT_P (if_info->a)
1148	   && INTVAL (if_info->a) == STORE_FLAG_VALUE
1149	   && (reversed_comparison_code (if_info->cond, if_info->jump)
1150	       != UNKNOWN))
1151    reversep = 1;
1152  else
1153    return FALSE;
1154
1155  start_sequence ();
1156
1157  target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
1158  if (target)
1159    {
1160      if (target != if_info->x)
1161	noce_emit_move_insn (if_info->x, target);
1162
1163      seq = end_ifcvt_sequence (if_info);
1164      if (! seq)
1165	return FALSE;
1166
1167      emit_insn_before_setloc (seq, if_info->jump,
1168			       INSN_LOCATION (if_info->insn_a));
1169      return TRUE;
1170    }
1171  else
1172    {
1173      end_sequence ();
1174      return FALSE;
1175    }
1176}
1177
1178/* Convert "if (test) x = a; else x = b", for A and B constant.  */
1179
1180static int
1181noce_try_store_flag_constants (struct noce_if_info *if_info)
1182{
1183  rtx target;
1184  rtx_insn *seq;
1185  int reversep;
1186  HOST_WIDE_INT itrue, ifalse, diff, tmp;
1187  int normalize, can_reverse;
1188  machine_mode mode;
1189
1190  if (CONST_INT_P (if_info->a)
1191      && CONST_INT_P (if_info->b))
1192    {
1193      mode = GET_MODE (if_info->x);
1194      ifalse = INTVAL (if_info->a);
1195      itrue = INTVAL (if_info->b);
1196
1197      diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1198      /* Make sure we can represent the difference between the two values.  */
1199      if ((diff > 0)
1200	  != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1201	return FALSE;
1202
1203      diff = trunc_int_for_mode (diff, mode);
1204
1205      can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
1206		     != UNKNOWN);
1207
1208      reversep = 0;
1209      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1210	normalize = 0;
1211      else if (ifalse == 0 && exact_log2 (itrue) >= 0
1212	       && (STORE_FLAG_VALUE == 1
1213		   || if_info->branch_cost >= 2))
1214	normalize = 1;
1215      else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
1216	       && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
1217	normalize = 1, reversep = 1;
1218      else if (itrue == -1
1219	       && (STORE_FLAG_VALUE == -1
1220		   || if_info->branch_cost >= 2))
1221	normalize = -1;
1222      else if (ifalse == -1 && can_reverse
1223	       && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
1224	normalize = -1, reversep = 1;
1225      else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
1226	       || if_info->branch_cost >= 3)
1227	normalize = -1;
1228      else
1229	return FALSE;
1230
1231      if (reversep)
1232	{
1233	  tmp = itrue; itrue = ifalse; ifalse = tmp;
1234	  diff = trunc_int_for_mode (-(unsigned HOST_WIDE_INT) diff, mode);
1235	}
1236
1237      start_sequence ();
1238      target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1239      if (! target)
1240	{
1241	  end_sequence ();
1242	  return FALSE;
1243	}
1244
1245      /* if (test) x = 3; else x = 4;
1246	 =>   x = 3 + (test == 0);  */
1247      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1248	{
1249	  target = expand_simple_binop (mode,
1250					(diff == STORE_FLAG_VALUE
1251					 ? PLUS : MINUS),
1252					gen_int_mode (ifalse, mode), target,
1253					if_info->x, 0, OPTAB_WIDEN);
1254	}
1255
1256      /* if (test) x = 8; else x = 0;
1257	 =>   x = (test != 0) << 3;  */
1258      else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1259	{
1260	  target = expand_simple_binop (mode, ASHIFT,
1261					target, GEN_INT (tmp), if_info->x, 0,
1262					OPTAB_WIDEN);
1263	}
1264
1265      /* if (test) x = -1; else x = b;
1266	 =>   x = -(test != 0) | b;  */
1267      else if (itrue == -1)
1268	{
1269	  target = expand_simple_binop (mode, IOR,
1270					target, gen_int_mode (ifalse, mode),
1271					if_info->x, 0, OPTAB_WIDEN);
1272	}
1273
1274      /* if (test) x = a; else x = b;
1275	 =>   x = (-(test != 0) & (b - a)) + a;  */
1276      else
1277	{
1278	  target = expand_simple_binop (mode, AND,
1279					target, gen_int_mode (diff, mode),
1280					if_info->x, 0, OPTAB_WIDEN);
1281	  if (target)
1282	    target = expand_simple_binop (mode, PLUS,
1283					  target, gen_int_mode (ifalse, mode),
1284					  if_info->x, 0, OPTAB_WIDEN);
1285	}
1286
1287      if (! target)
1288	{
1289	  end_sequence ();
1290	  return FALSE;
1291	}
1292
1293      if (target != if_info->x)
1294	noce_emit_move_insn (if_info->x, target);
1295
1296      seq = end_ifcvt_sequence (if_info);
1297      if (!seq)
1298	return FALSE;
1299
1300      emit_insn_before_setloc (seq, if_info->jump,
1301			       INSN_LOCATION (if_info->insn_a));
1302      return TRUE;
1303    }
1304
1305  return FALSE;
1306}
1307
1308/* Convert "if (test) foo++" into "foo += (test != 0)", and
1309   similarly for "foo--".  */
1310
1311static int
1312noce_try_addcc (struct noce_if_info *if_info)
1313{
1314  rtx target;
1315  rtx_insn *seq;
1316  int subtract, normalize;
1317
1318  if (GET_CODE (if_info->a) == PLUS
1319      && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1320      && (reversed_comparison_code (if_info->cond, if_info->jump)
1321	  != UNKNOWN))
1322    {
1323      rtx cond = if_info->cond;
1324      enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1325
1326      /* First try to use addcc pattern.  */
1327      if (general_operand (XEXP (cond, 0), VOIDmode)
1328	  && general_operand (XEXP (cond, 1), VOIDmode))
1329	{
1330	  start_sequence ();
1331	  target = emit_conditional_add (if_info->x, code,
1332					 XEXP (cond, 0),
1333					 XEXP (cond, 1),
1334					 VOIDmode,
1335					 if_info->b,
1336					 XEXP (if_info->a, 1),
1337					 GET_MODE (if_info->x),
1338					 (code == LTU || code == GEU
1339					  || code == LEU || code == GTU));
1340	  if (target)
1341	    {
1342	      if (target != if_info->x)
1343		noce_emit_move_insn (if_info->x, target);
1344
1345	      seq = end_ifcvt_sequence (if_info);
1346	      if (!seq)
1347		return FALSE;
1348
1349	      emit_insn_before_setloc (seq, if_info->jump,
1350				       INSN_LOCATION (if_info->insn_a));
1351	      return TRUE;
1352	    }
1353	  end_sequence ();
1354	}
1355
1356      /* If that fails, construct conditional increment or decrement using
1357	 setcc.  */
1358      if (if_info->branch_cost >= 2
1359	  && (XEXP (if_info->a, 1) == const1_rtx
1360	      || XEXP (if_info->a, 1) == constm1_rtx))
1361        {
1362	  start_sequence ();
1363	  if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1364	    subtract = 0, normalize = 0;
1365	  else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1366	    subtract = 1, normalize = 0;
1367	  else
1368	    subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1369
1370
1371	  target = noce_emit_store_flag (if_info,
1372					 gen_reg_rtx (GET_MODE (if_info->x)),
1373					 1, normalize);
1374
1375	  if (target)
1376	    target = expand_simple_binop (GET_MODE (if_info->x),
1377					  subtract ? MINUS : PLUS,
1378					  if_info->b, target, if_info->x,
1379					  0, OPTAB_WIDEN);
1380	  if (target)
1381	    {
1382	      if (target != if_info->x)
1383		noce_emit_move_insn (if_info->x, target);
1384
1385	      seq = end_ifcvt_sequence (if_info);
1386	      if (!seq)
1387		return FALSE;
1388
1389	      emit_insn_before_setloc (seq, if_info->jump,
1390				       INSN_LOCATION (if_info->insn_a));
1391	      return TRUE;
1392	    }
1393	  end_sequence ();
1394	}
1395    }
1396
1397  return FALSE;
1398}
1399
1400/* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */
1401
1402static int
1403noce_try_store_flag_mask (struct noce_if_info *if_info)
1404{
1405  rtx target;
1406  rtx_insn *seq;
1407  int reversep;
1408
1409  reversep = 0;
1410  if ((if_info->branch_cost >= 2
1411       || STORE_FLAG_VALUE == -1)
1412      && ((if_info->a == const0_rtx
1413	   && rtx_equal_p (if_info->b, if_info->x))
1414	  || ((reversep = (reversed_comparison_code (if_info->cond,
1415						     if_info->jump)
1416			   != UNKNOWN))
1417	      && if_info->b == const0_rtx
1418	      && rtx_equal_p (if_info->a, if_info->x))))
1419    {
1420      start_sequence ();
1421      target = noce_emit_store_flag (if_info,
1422				     gen_reg_rtx (GET_MODE (if_info->x)),
1423				     reversep, -1);
1424      if (target)
1425        target = expand_simple_binop (GET_MODE (if_info->x), AND,
1426				      if_info->x,
1427				      target, if_info->x, 0,
1428				      OPTAB_WIDEN);
1429
1430      if (target)
1431	{
1432	  int old_cost, new_cost, insn_cost;
1433	  int speed_p;
1434
1435	  if (target != if_info->x)
1436	    noce_emit_move_insn (if_info->x, target);
1437
1438	  seq = end_ifcvt_sequence (if_info);
1439	  if (!seq)
1440	    return FALSE;
1441
1442	  speed_p = optimize_bb_for_speed_p (BLOCK_FOR_INSN (if_info->insn_a));
1443	  insn_cost = insn_rtx_cost (PATTERN (if_info->insn_a), speed_p);
1444	  old_cost = COSTS_N_INSNS (if_info->branch_cost) + insn_cost;
1445	  new_cost = seq_cost (seq, speed_p);
1446
1447	  if (new_cost > old_cost)
1448	    return FALSE;
1449
1450	  emit_insn_before_setloc (seq, if_info->jump,
1451				   INSN_LOCATION (if_info->insn_a));
1452	  return TRUE;
1453	}
1454
1455      end_sequence ();
1456    }
1457
1458  return FALSE;
1459}
1460
1461/* Helper function for noce_try_cmove and noce_try_cmove_arith.  */
1462
1463static rtx
1464noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1465		 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1466{
1467  rtx target ATTRIBUTE_UNUSED;
1468  int unsignedp ATTRIBUTE_UNUSED;
1469
1470  /* If earliest == jump, try to build the cmove insn directly.
1471     This is helpful when combine has created some complex condition
1472     (like for alpha's cmovlbs) that we can't hope to regenerate
1473     through the normal interface.  */
1474
1475  if (if_info->cond_earliest == if_info->jump)
1476    {
1477      rtx cond = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1478      rtx if_then_else = gen_rtx_IF_THEN_ELSE (GET_MODE (x),
1479					       cond, vtrue, vfalse);
1480      rtx set = gen_rtx_SET (VOIDmode, x, if_then_else);
1481
1482      start_sequence ();
1483      rtx_insn *insn = emit_insn (set);
1484
1485      if (recog_memoized (insn) >= 0)
1486	{
1487	  rtx_insn *seq = get_insns ();
1488	  end_sequence ();
1489	  emit_insn (seq);
1490
1491	  return x;
1492	}
1493
1494      end_sequence ();
1495    }
1496
1497  /* Don't even try if the comparison operands are weird
1498     except that the target supports cbranchcc4.  */
1499  if (! general_operand (cmp_a, GET_MODE (cmp_a))
1500      || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1501    {
1502      if (!(HAVE_cbranchcc4)
1503	  || GET_MODE_CLASS (GET_MODE (cmp_a)) != MODE_CC
1504	  || cmp_b != const0_rtx)
1505	return NULL_RTX;
1506    }
1507
1508#if HAVE_conditional_move
1509  unsignedp = (code == LTU || code == GEU
1510	       || code == LEU || code == GTU);
1511
1512  target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1513				  vtrue, vfalse, GET_MODE (x),
1514				  unsignedp);
1515  if (target)
1516    return target;
1517
1518  /* We might be faced with a situation like:
1519
1520     x = (reg:M TARGET)
1521     vtrue = (subreg:M (reg:N VTRUE) BYTE)
1522     vfalse = (subreg:M (reg:N VFALSE) BYTE)
1523
1524     We can't do a conditional move in mode M, but it's possible that we
1525     could do a conditional move in mode N instead and take a subreg of
1526     the result.
1527
1528     If we can't create new pseudos, though, don't bother.  */
1529  if (reload_completed)
1530    return NULL_RTX;
1531
1532  if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
1533    {
1534      rtx reg_vtrue = SUBREG_REG (vtrue);
1535      rtx reg_vfalse = SUBREG_REG (vfalse);
1536      unsigned int byte_vtrue = SUBREG_BYTE (vtrue);
1537      unsigned int byte_vfalse = SUBREG_BYTE (vfalse);
1538      rtx promoted_target;
1539
1540      if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
1541	  || byte_vtrue != byte_vfalse
1542	  || (SUBREG_PROMOTED_VAR_P (vtrue)
1543	      != SUBREG_PROMOTED_VAR_P (vfalse))
1544	  || (SUBREG_PROMOTED_GET (vtrue)
1545	      != SUBREG_PROMOTED_GET (vfalse)))
1546	return NULL_RTX;
1547
1548      promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
1549
1550      target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
1551				      VOIDmode, reg_vtrue, reg_vfalse,
1552				      GET_MODE (reg_vtrue), unsignedp);
1553      /* Nope, couldn't do it in that mode either.  */
1554      if (!target)
1555	return NULL_RTX;
1556
1557      target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
1558      SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
1559      SUBREG_PROMOTED_SET (target, SUBREG_PROMOTED_GET (vtrue));
1560      emit_move_insn (x, target);
1561      return x;
1562    }
1563  else
1564    return NULL_RTX;
1565#else
1566  /* We'll never get here, as noce_process_if_block doesn't call the
1567     functions involved.  Ifdef code, however, should be discouraged
1568     because it leads to typos in the code not selected.  However,
1569     emit_conditional_move won't exist either.  */
1570  return NULL_RTX;
1571#endif
1572}
1573
1574/* Try only simple constants and registers here.  More complex cases
1575   are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1576   has had a go at it.  */
1577
1578static int
1579noce_try_cmove (struct noce_if_info *if_info)
1580{
1581  enum rtx_code code;
1582  rtx target;
1583  rtx_insn *seq;
1584
1585  if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1586      && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1587    {
1588      start_sequence ();
1589
1590      code = GET_CODE (if_info->cond);
1591      target = noce_emit_cmove (if_info, if_info->x, code,
1592				XEXP (if_info->cond, 0),
1593				XEXP (if_info->cond, 1),
1594				if_info->a, if_info->b);
1595
1596      if (target)
1597	{
1598	  if (target != if_info->x)
1599	    noce_emit_move_insn (if_info->x, target);
1600
1601	  seq = end_ifcvt_sequence (if_info);
1602	  if (!seq)
1603	    return FALSE;
1604
1605	  emit_insn_before_setloc (seq, if_info->jump,
1606				   INSN_LOCATION (if_info->insn_a));
1607	  return TRUE;
1608	}
1609      else
1610	{
1611	  end_sequence ();
1612	  return FALSE;
1613	}
1614    }
1615
1616  return FALSE;
1617}
1618
1619/* Try more complex cases involving conditional_move.  */
1620
1621static int
1622noce_try_cmove_arith (struct noce_if_info *if_info)
1623{
1624  rtx a = if_info->a;
1625  rtx b = if_info->b;
1626  rtx x = if_info->x;
1627  rtx orig_a, orig_b;
1628  rtx_insn *insn_a, *insn_b;
1629  rtx target;
1630  int is_mem = 0;
1631  int insn_cost;
1632  enum rtx_code code;
1633  rtx_insn *ifcvt_seq;
1634
1635  /* A conditional move from two memory sources is equivalent to a
1636     conditional on their addresses followed by a load.  Don't do this
1637     early because it'll screw alias analysis.  Note that we've
1638     already checked for no side effects.  */
1639  /* ??? FIXME: Magic number 5.  */
1640  if (cse_not_expected
1641      && MEM_P (a) && MEM_P (b)
1642      && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1643      && if_info->branch_cost >= 5)
1644    {
1645      machine_mode address_mode = get_address_mode (a);
1646
1647      a = XEXP (a, 0);
1648      b = XEXP (b, 0);
1649      x = gen_reg_rtx (address_mode);
1650      is_mem = 1;
1651    }
1652
1653  /* ??? We could handle this if we knew that a load from A or B could
1654     not trap or fault.  This is also true if we've already loaded
1655     from the address along the path from ENTRY.  */
1656  else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
1657    return FALSE;
1658
1659  /* if (test) x = a + b; else x = c - d;
1660     => y = a + b;
1661        x = c - d;
1662	if (test)
1663	  x = y;
1664  */
1665
1666  code = GET_CODE (if_info->cond);
1667  insn_a = if_info->insn_a;
1668  insn_b = if_info->insn_b;
1669
1670  /* Total insn_rtx_cost should be smaller than branch cost.  Exit
1671     if insn_rtx_cost can't be estimated.  */
1672  if (insn_a)
1673    {
1674      insn_cost
1675	= insn_rtx_cost (PATTERN (insn_a),
1676      			 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1677      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1678	return FALSE;
1679    }
1680  else
1681    insn_cost = 0;
1682
1683  if (insn_b)
1684    {
1685      insn_cost
1686	+= insn_rtx_cost (PATTERN (insn_b),
1687      			  optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1688      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1689        return FALSE;
1690    }
1691
1692  /* Possibly rearrange operands to make things come out more natural.  */
1693  if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1694    {
1695      int reversep = 0;
1696      if (rtx_equal_p (b, x))
1697	reversep = 1;
1698      else if (general_operand (b, GET_MODE (b)))
1699	reversep = 1;
1700
1701      if (reversep)
1702	{
1703	  rtx tmp;
1704	  rtx_insn *tmp_insn;
1705	  code = reversed_comparison_code (if_info->cond, if_info->jump);
1706	  tmp = a, a = b, b = tmp;
1707	  tmp_insn = insn_a, insn_a = insn_b, insn_b = tmp_insn;
1708	}
1709    }
1710
1711  start_sequence ();
1712
1713  orig_a = a;
1714  orig_b = b;
1715
1716  /* If either operand is complex, load it into a register first.
1717     The best way to do this is to copy the original insn.  In this
1718     way we preserve any clobbers etc that the insn may have had.
1719     This is of course not possible in the IS_MEM case.  */
1720  if (! general_operand (a, GET_MODE (a)))
1721    {
1722      rtx_insn *insn;
1723
1724      if (is_mem)
1725	{
1726	  rtx reg = gen_reg_rtx (GET_MODE (a));
1727	  insn = emit_insn (gen_rtx_SET (VOIDmode, reg, a));
1728	}
1729      else if (! insn_a)
1730	goto end_seq_and_fail;
1731      else
1732	{
1733	  a = gen_reg_rtx (GET_MODE (a));
1734	  rtx_insn *copy_of_a = as_a <rtx_insn *> (copy_rtx (insn_a));
1735	  rtx set = single_set (copy_of_a);
1736	  SET_DEST (set) = a;
1737	  insn = emit_insn (PATTERN (copy_of_a));
1738	}
1739      if (recog_memoized (insn) < 0)
1740	goto end_seq_and_fail;
1741    }
1742  if (! general_operand (b, GET_MODE (b)))
1743    {
1744      rtx pat;
1745      rtx_insn *last;
1746      rtx_insn *new_insn;
1747
1748      if (is_mem)
1749	{
1750          rtx reg = gen_reg_rtx (GET_MODE (b));
1751	  pat = gen_rtx_SET (VOIDmode, reg, b);
1752	}
1753      else if (! insn_b)
1754	goto end_seq_and_fail;
1755      else
1756	{
1757          b = gen_reg_rtx (GET_MODE (b));
1758	  rtx_insn *copy_of_insn_b = as_a <rtx_insn *> (copy_rtx (insn_b));
1759	  rtx set = single_set (copy_of_insn_b);
1760	  SET_DEST (set) = b;
1761	  pat = PATTERN (copy_of_insn_b);
1762	}
1763
1764      /* If insn to set up A clobbers any registers B depends on, try to
1765	 swap insn that sets up A with the one that sets up B.  If even
1766	 that doesn't help, punt.  */
1767      last = get_last_insn ();
1768      if (last && modified_in_p (orig_b, last))
1769	{
1770	  new_insn = emit_insn_before (pat, get_insns ());
1771	  if (modified_in_p (orig_a, new_insn))
1772	    goto end_seq_and_fail;
1773	}
1774      else
1775	new_insn = emit_insn (pat);
1776
1777      if (recog_memoized (new_insn) < 0)
1778	goto end_seq_and_fail;
1779    }
1780
1781  target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1782			    XEXP (if_info->cond, 1), a, b);
1783
1784  if (! target)
1785    goto end_seq_and_fail;
1786
1787  /* If we're handling a memory for above, emit the load now.  */
1788  if (is_mem)
1789    {
1790      rtx mem = gen_rtx_MEM (GET_MODE (if_info->x), target);
1791
1792      /* Copy over flags as appropriate.  */
1793      if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1794	MEM_VOLATILE_P (mem) = 1;
1795      if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1796	set_mem_alias_set (mem, MEM_ALIAS_SET (if_info->a));
1797      set_mem_align (mem,
1798		     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1799
1800      gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
1801      set_mem_addr_space (mem, MEM_ADDR_SPACE (if_info->a));
1802
1803      noce_emit_move_insn (if_info->x, mem);
1804    }
1805  else if (target != x)
1806    noce_emit_move_insn (x, target);
1807
1808  ifcvt_seq = end_ifcvt_sequence (if_info);
1809  if (!ifcvt_seq)
1810    return FALSE;
1811
1812  emit_insn_before_setloc (ifcvt_seq, if_info->jump,
1813			   INSN_LOCATION (if_info->insn_a));
1814  return TRUE;
1815
1816 end_seq_and_fail:
1817  end_sequence ();
1818  return FALSE;
1819}
1820
1821/* For most cases, the simplified condition we found is the best
1822   choice, but this is not the case for the min/max/abs transforms.
1823   For these we wish to know that it is A or B in the condition.  */
1824
1825static rtx
1826noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1827			rtx_insn **earliest)
1828{
1829  rtx cond, set;
1830  rtx_insn *insn;
1831  int reverse;
1832
1833  /* If target is already mentioned in the known condition, return it.  */
1834  if (reg_mentioned_p (target, if_info->cond))
1835    {
1836      *earliest = if_info->cond_earliest;
1837      return if_info->cond;
1838    }
1839
1840  set = pc_set (if_info->jump);
1841  cond = XEXP (SET_SRC (set), 0);
1842  reverse
1843    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1844      && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump);
1845  if (if_info->then_else_reversed)
1846    reverse = !reverse;
1847
1848  /* If we're looking for a constant, try to make the conditional
1849     have that constant in it.  There are two reasons why it may
1850     not have the constant we want:
1851
1852     1. GCC may have needed to put the constant in a register, because
1853        the target can't compare directly against that constant.  For
1854        this case, we look for a SET immediately before the comparison
1855        that puts a constant in that register.
1856
1857     2. GCC may have canonicalized the conditional, for example
1858	replacing "if x < 4" with "if x <= 3".  We can undo that (or
1859	make equivalent types of changes) to get the constants we need
1860	if they're off by one in the right direction.  */
1861
1862  if (CONST_INT_P (target))
1863    {
1864      enum rtx_code code = GET_CODE (if_info->cond);
1865      rtx op_a = XEXP (if_info->cond, 0);
1866      rtx op_b = XEXP (if_info->cond, 1);
1867      rtx prev_insn;
1868
1869      /* First, look to see if we put a constant in a register.  */
1870      prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1871      if (prev_insn
1872	  && BLOCK_FOR_INSN (prev_insn)
1873	     == BLOCK_FOR_INSN (if_info->cond_earliest)
1874	  && INSN_P (prev_insn)
1875	  && GET_CODE (PATTERN (prev_insn)) == SET)
1876	{
1877	  rtx src = find_reg_equal_equiv_note (prev_insn);
1878	  if (!src)
1879	    src = SET_SRC (PATTERN (prev_insn));
1880	  if (CONST_INT_P (src))
1881	    {
1882	      if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1883		op_a = src;
1884	      else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1885		op_b = src;
1886
1887	      if (CONST_INT_P (op_a))
1888		{
1889		  rtx tmp = op_a;
1890		  op_a = op_b;
1891		  op_b = tmp;
1892		  code = swap_condition (code);
1893		}
1894	    }
1895	}
1896
1897      /* Now, look to see if we can get the right constant by
1898	 adjusting the conditional.  */
1899      if (CONST_INT_P (op_b))
1900	{
1901	  HOST_WIDE_INT desired_val = INTVAL (target);
1902	  HOST_WIDE_INT actual_val = INTVAL (op_b);
1903
1904	  switch (code)
1905	    {
1906	    case LT:
1907	      if (actual_val == desired_val + 1)
1908		{
1909		  code = LE;
1910		  op_b = GEN_INT (desired_val);
1911		}
1912	      break;
1913	    case LE:
1914	      if (actual_val == desired_val - 1)
1915		{
1916		  code = LT;
1917		  op_b = GEN_INT (desired_val);
1918		}
1919	      break;
1920	    case GT:
1921	      if (actual_val == desired_val - 1)
1922		{
1923		  code = GE;
1924		  op_b = GEN_INT (desired_val);
1925		}
1926	      break;
1927	    case GE:
1928	      if (actual_val == desired_val + 1)
1929		{
1930		  code = GT;
1931		  op_b = GEN_INT (desired_val);
1932		}
1933	      break;
1934	    default:
1935	      break;
1936	    }
1937	}
1938
1939      /* If we made any changes, generate a new conditional that is
1940	 equivalent to what we started with, but has the right
1941	 constants in it.  */
1942      if (code != GET_CODE (if_info->cond)
1943	  || op_a != XEXP (if_info->cond, 0)
1944	  || op_b != XEXP (if_info->cond, 1))
1945	{
1946	  cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1947	  *earliest = if_info->cond_earliest;
1948	  return cond;
1949	}
1950    }
1951
1952  cond = canonicalize_condition (if_info->jump, cond, reverse,
1953				 earliest, target, HAVE_cbranchcc4, true);
1954  if (! cond || ! reg_mentioned_p (target, cond))
1955    return NULL;
1956
1957  /* We almost certainly searched back to a different place.
1958     Need to re-verify correct lifetimes.  */
1959
1960  /* X may not be mentioned in the range (cond_earliest, jump].  */
1961  for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1962    if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1963      return NULL;
1964
1965  /* A and B may not be modified in the range [cond_earliest, jump).  */
1966  for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1967    if (INSN_P (insn)
1968	&& (modified_in_p (if_info->a, insn)
1969	    || modified_in_p (if_info->b, insn)))
1970      return NULL;
1971
1972  return cond;
1973}
1974
1975/* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */
1976
1977static int
1978noce_try_minmax (struct noce_if_info *if_info)
1979{
1980  rtx cond, target;
1981  rtx_insn *earliest, *seq;
1982  enum rtx_code code, op;
1983  int unsignedp;
1984
1985  /* ??? Reject modes with NaNs or signed zeros since we don't know how
1986     they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
1987     to get the target to tell us...  */
1988  if (HONOR_SIGNED_ZEROS (if_info->x)
1989      || HONOR_NANS (if_info->x))
1990    return FALSE;
1991
1992  cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1993  if (!cond)
1994    return FALSE;
1995
1996  /* Verify the condition is of the form we expect, and canonicalize
1997     the comparison code.  */
1998  code = GET_CODE (cond);
1999  if (rtx_equal_p (XEXP (cond, 0), if_info->a))
2000    {
2001      if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
2002	return FALSE;
2003    }
2004  else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
2005    {
2006      if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
2007	return FALSE;
2008      code = swap_condition (code);
2009    }
2010  else
2011    return FALSE;
2012
2013  /* Determine what sort of operation this is.  Note that the code is for
2014     a taken branch, so the code->operation mapping appears backwards.  */
2015  switch (code)
2016    {
2017    case LT:
2018    case LE:
2019    case UNLT:
2020    case UNLE:
2021      op = SMAX;
2022      unsignedp = 0;
2023      break;
2024    case GT:
2025    case GE:
2026    case UNGT:
2027    case UNGE:
2028      op = SMIN;
2029      unsignedp = 0;
2030      break;
2031    case LTU:
2032    case LEU:
2033      op = UMAX;
2034      unsignedp = 1;
2035      break;
2036    case GTU:
2037    case GEU:
2038      op = UMIN;
2039      unsignedp = 1;
2040      break;
2041    default:
2042      return FALSE;
2043    }
2044
2045  start_sequence ();
2046
2047  target = expand_simple_binop (GET_MODE (if_info->x), op,
2048				if_info->a, if_info->b,
2049				if_info->x, unsignedp, OPTAB_WIDEN);
2050  if (! target)
2051    {
2052      end_sequence ();
2053      return FALSE;
2054    }
2055  if (target != if_info->x)
2056    noce_emit_move_insn (if_info->x, target);
2057
2058  seq = end_ifcvt_sequence (if_info);
2059  if (!seq)
2060    return FALSE;
2061
2062  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2063  if_info->cond = cond;
2064  if_info->cond_earliest = earliest;
2065
2066  return TRUE;
2067}
2068
2069/* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
2070   "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
2071   etc.  */
2072
2073static int
2074noce_try_abs (struct noce_if_info *if_info)
2075{
2076  rtx cond, target, a, b, c;
2077  rtx_insn *earliest, *seq;
2078  int negate;
2079  bool one_cmpl = false;
2080
2081  /* Reject modes with signed zeros.  */
2082  if (HONOR_SIGNED_ZEROS (if_info->x))
2083    return FALSE;
2084
2085  /* Recognize A and B as constituting an ABS or NABS.  The canonical
2086     form is a branch around the negation, taken when the object is the
2087     first operand of a comparison against 0 that evaluates to true.  */
2088  a = if_info->a;
2089  b = if_info->b;
2090  if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
2091    negate = 0;
2092  else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
2093    {
2094      c = a; a = b; b = c;
2095      negate = 1;
2096    }
2097  else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
2098    {
2099      negate = 0;
2100      one_cmpl = true;
2101    }
2102  else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
2103    {
2104      c = a; a = b; b = c;
2105      negate = 1;
2106      one_cmpl = true;
2107    }
2108  else
2109    return FALSE;
2110
2111  cond = noce_get_alt_condition (if_info, b, &earliest);
2112  if (!cond)
2113    return FALSE;
2114
2115  /* Verify the condition is of the form we expect.  */
2116  if (rtx_equal_p (XEXP (cond, 0), b))
2117    c = XEXP (cond, 1);
2118  else if (rtx_equal_p (XEXP (cond, 1), b))
2119    {
2120      c = XEXP (cond, 0);
2121      negate = !negate;
2122    }
2123  else
2124    return FALSE;
2125
2126  /* Verify that C is zero.  Search one step backward for a
2127     REG_EQUAL note or a simple source if necessary.  */
2128  if (REG_P (c))
2129    {
2130      rtx set;
2131      rtx_insn *insn = prev_nonnote_insn (earliest);
2132      if (insn
2133	  && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
2134	  && (set = single_set (insn))
2135	  && rtx_equal_p (SET_DEST (set), c))
2136	{
2137	  rtx note = find_reg_equal_equiv_note (insn);
2138	  if (note)
2139	    c = XEXP (note, 0);
2140	  else
2141	    c = SET_SRC (set);
2142	}
2143      else
2144	return FALSE;
2145    }
2146  if (MEM_P (c)
2147      && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
2148      && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
2149    c = get_pool_constant (XEXP (c, 0));
2150
2151  /* Work around funny ideas get_condition has wrt canonicalization.
2152     Note that these rtx constants are known to be CONST_INT, and
2153     therefore imply integer comparisons.
2154     The one_cmpl case is more complicated, as we want to handle
2155     only x < 0 ? ~x : x or x >= 0 ? x : ~x to one_cmpl_abs (x)
2156     and x < 0 ? x : ~x or x >= 0 ? ~x : x to ~one_cmpl_abs (x),
2157     but not other cases (x > -1 is equivalent of x >= 0).  */
2158  if (c == constm1_rtx && GET_CODE (cond) == GT)
2159    ;
2160  else if (c == const1_rtx && GET_CODE (cond) == LT)
2161    {
2162      if (one_cmpl)
2163	return FALSE;
2164    }
2165  else if (c == CONST0_RTX (GET_MODE (b)))
2166    {
2167      if (one_cmpl
2168	  && GET_CODE (cond) != GE
2169	  && GET_CODE (cond) != LT)
2170	return FALSE;
2171    }
2172  else
2173    return FALSE;
2174
2175  /* Determine what sort of operation this is.  */
2176  switch (GET_CODE (cond))
2177    {
2178    case LT:
2179    case LE:
2180    case UNLT:
2181    case UNLE:
2182      negate = !negate;
2183      break;
2184    case GT:
2185    case GE:
2186    case UNGT:
2187    case UNGE:
2188      break;
2189    default:
2190      return FALSE;
2191    }
2192
2193  start_sequence ();
2194  if (one_cmpl)
2195    target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
2196                                         if_info->x);
2197  else
2198    target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2199
2200  /* ??? It's a quandary whether cmove would be better here, especially
2201     for integers.  Perhaps combine will clean things up.  */
2202  if (target && negate)
2203    {
2204      if (one_cmpl)
2205        target = expand_simple_unop (GET_MODE (target), NOT, target,
2206                                     if_info->x, 0);
2207      else
2208        target = expand_simple_unop (GET_MODE (target), NEG, target,
2209                                     if_info->x, 0);
2210    }
2211
2212  if (! target)
2213    {
2214      end_sequence ();
2215      return FALSE;
2216    }
2217
2218  if (target != if_info->x)
2219    noce_emit_move_insn (if_info->x, target);
2220
2221  seq = end_ifcvt_sequence (if_info);
2222  if (!seq)
2223    return FALSE;
2224
2225  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2226  if_info->cond = cond;
2227  if_info->cond_earliest = earliest;
2228
2229  return TRUE;
2230}
2231
2232/* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;".  */
2233
2234static int
2235noce_try_sign_mask (struct noce_if_info *if_info)
2236{
2237  rtx cond, t, m, c;
2238  rtx_insn *seq;
2239  machine_mode mode;
2240  enum rtx_code code;
2241  bool t_unconditional;
2242
2243  cond = if_info->cond;
2244  code = GET_CODE (cond);
2245  m = XEXP (cond, 0);
2246  c = XEXP (cond, 1);
2247
2248  t = NULL_RTX;
2249  if (if_info->a == const0_rtx)
2250    {
2251      if ((code == LT && c == const0_rtx)
2252	  || (code == LE && c == constm1_rtx))
2253	t = if_info->b;
2254    }
2255  else if (if_info->b == const0_rtx)
2256    {
2257      if ((code == GE && c == const0_rtx)
2258	  || (code == GT && c == constm1_rtx))
2259	t = if_info->a;
2260    }
2261
2262  if (! t || side_effects_p (t))
2263    return FALSE;
2264
2265  /* We currently don't handle different modes.  */
2266  mode = GET_MODE (t);
2267  if (GET_MODE (m) != mode)
2268    return FALSE;
2269
2270  /* This is only profitable if T is unconditionally executed/evaluated in the
2271     original insn sequence or T is cheap.  The former happens if B is the
2272     non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
2273     INSN_B which can happen for e.g. conditional stores to memory.  For the
2274     cost computation use the block TEST_BB where the evaluation will end up
2275     after the transformation.  */
2276  t_unconditional =
2277    (t == if_info->b
2278     && (if_info->insn_b == NULL_RTX
2279	 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
2280  if (!(t_unconditional
2281	|| (set_src_cost (t, optimize_bb_for_speed_p (if_info->test_bb))
2282	    < COSTS_N_INSNS (2))))
2283    return FALSE;
2284
2285  start_sequence ();
2286  /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
2287     "(signed) m >> 31" directly.  This benefits targets with specialized
2288     insns to obtain the signmask, but still uses ashr_optab otherwise.  */
2289  m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2290  t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
2291	: NULL_RTX;
2292
2293  if (!t)
2294    {
2295      end_sequence ();
2296      return FALSE;
2297    }
2298
2299  noce_emit_move_insn (if_info->x, t);
2300
2301  seq = end_ifcvt_sequence (if_info);
2302  if (!seq)
2303    return FALSE;
2304
2305  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2306  return TRUE;
2307}
2308
2309
2310/* Optimize away "if (x & C) x |= C" and similar bit manipulation
2311   transformations.  */
2312
2313static int
2314noce_try_bitop (struct noce_if_info *if_info)
2315{
2316  rtx cond, x, a, result;
2317  rtx_insn *seq;
2318  machine_mode mode;
2319  enum rtx_code code;
2320  int bitnum;
2321
2322  x = if_info->x;
2323  cond = if_info->cond;
2324  code = GET_CODE (cond);
2325
2326  /* Check for no else condition.  */
2327  if (! rtx_equal_p (x, if_info->b))
2328    return FALSE;
2329
2330  /* Check for a suitable condition.  */
2331  if (code != NE && code != EQ)
2332    return FALSE;
2333  if (XEXP (cond, 1) != const0_rtx)
2334    return FALSE;
2335  cond = XEXP (cond, 0);
2336
2337  /* ??? We could also handle AND here.  */
2338  if (GET_CODE (cond) == ZERO_EXTRACT)
2339    {
2340      if (XEXP (cond, 1) != const1_rtx
2341	  || !CONST_INT_P (XEXP (cond, 2))
2342	  || ! rtx_equal_p (x, XEXP (cond, 0)))
2343	return FALSE;
2344      bitnum = INTVAL (XEXP (cond, 2));
2345      mode = GET_MODE (x);
2346      if (BITS_BIG_ENDIAN)
2347	bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2348      if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2349	return FALSE;
2350    }
2351  else
2352    return FALSE;
2353
2354  a = if_info->a;
2355  if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2356    {
2357      /* Check for "if (X & C) x = x op C".  */
2358      if (! rtx_equal_p (x, XEXP (a, 0))
2359          || !CONST_INT_P (XEXP (a, 1))
2360	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2361	     != (unsigned HOST_WIDE_INT) 1 << bitnum)
2362        return FALSE;
2363
2364      /* if ((x & C) == 0) x |= C; is transformed to x |= C.   */
2365      /* if ((x & C) != 0) x |= C; is transformed to nothing.  */
2366      if (GET_CODE (a) == IOR)
2367	result = (code == NE) ? a : NULL_RTX;
2368      else if (code == NE)
2369	{
2370	  /* if ((x & C) == 0) x ^= C; is transformed to x |= C.   */
2371	  result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2372	  result = simplify_gen_binary (IOR, mode, x, result);
2373	}
2374      else
2375	{
2376	  /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C.  */
2377	  result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2378	  result = simplify_gen_binary (AND, mode, x, result);
2379	}
2380    }
2381  else if (GET_CODE (a) == AND)
2382    {
2383      /* Check for "if (X & C) x &= ~C".  */
2384      if (! rtx_equal_p (x, XEXP (a, 0))
2385	  || !CONST_INT_P (XEXP (a, 1))
2386	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2387	     != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2388        return FALSE;
2389
2390      /* if ((x & C) == 0) x &= ~C; is transformed to nothing.  */
2391      /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C.  */
2392      result = (code == EQ) ? a : NULL_RTX;
2393    }
2394  else
2395    return FALSE;
2396
2397  if (result)
2398    {
2399      start_sequence ();
2400      noce_emit_move_insn (x, result);
2401      seq = end_ifcvt_sequence (if_info);
2402      if (!seq)
2403	return FALSE;
2404
2405      emit_insn_before_setloc (seq, if_info->jump,
2406			       INSN_LOCATION (if_info->insn_a));
2407    }
2408  return TRUE;
2409}
2410
2411
2412/* Similar to get_condition, only the resulting condition must be
2413   valid at JUMP, instead of at EARLIEST.
2414
2415   If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2416   THEN block of the caller, and we have to reverse the condition.  */
2417
2418static rtx
2419noce_get_condition (rtx_insn *jump, rtx_insn **earliest, bool then_else_reversed)
2420{
2421  rtx cond, set, tmp;
2422  bool reverse;
2423
2424  if (! any_condjump_p (jump))
2425    return NULL_RTX;
2426
2427  set = pc_set (jump);
2428
2429  /* If this branches to JUMP_LABEL when the condition is false,
2430     reverse the condition.  */
2431  reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2432	     && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump));
2433
2434  /* We may have to reverse because the caller's if block is not canonical,
2435     i.e. the THEN block isn't the fallthrough block for the TEST block
2436     (see find_if_header).  */
2437  if (then_else_reversed)
2438    reverse = !reverse;
2439
2440  /* If the condition variable is a register and is MODE_INT, accept it.  */
2441
2442  cond = XEXP (SET_SRC (set), 0);
2443  tmp = XEXP (cond, 0);
2444  if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT
2445      && (GET_MODE (tmp) != BImode
2446          || !targetm.small_register_classes_for_mode_p (BImode)))
2447    {
2448      *earliest = jump;
2449
2450      if (reverse)
2451	cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2452			       GET_MODE (cond), tmp, XEXP (cond, 1));
2453      return cond;
2454    }
2455
2456  /* Otherwise, fall back on canonicalize_condition to do the dirty
2457     work of manipulating MODE_CC values and COMPARE rtx codes.  */
2458  tmp = canonicalize_condition (jump, cond, reverse, earliest,
2459				NULL_RTX, HAVE_cbranchcc4, true);
2460
2461  /* We don't handle side-effects in the condition, like handling
2462     REG_INC notes and making sure no duplicate conditions are emitted.  */
2463  if (tmp != NULL_RTX && side_effects_p (tmp))
2464    return NULL_RTX;
2465
2466  return tmp;
2467}
2468
2469/* Return true if OP is ok for if-then-else processing.  */
2470
2471static int
2472noce_operand_ok (const_rtx op)
2473{
2474  if (side_effects_p (op))
2475    return FALSE;
2476
2477  /* We special-case memories, so handle any of them with
2478     no address side effects.  */
2479  if (MEM_P (op))
2480    return ! side_effects_p (XEXP (op, 0));
2481
2482  return ! may_trap_p (op);
2483}
2484
2485/* Return true if a write into MEM may trap or fault.  */
2486
2487static bool
2488noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2489{
2490  rtx addr;
2491
2492  if (MEM_READONLY_P (mem))
2493    return true;
2494
2495  if (may_trap_or_fault_p (mem))
2496    return true;
2497
2498  addr = XEXP (mem, 0);
2499
2500  /* Call target hook to avoid the effects of -fpic etc....  */
2501  addr = targetm.delegitimize_address (addr);
2502
2503  while (addr)
2504    switch (GET_CODE (addr))
2505      {
2506      case CONST:
2507      case PRE_DEC:
2508      case PRE_INC:
2509      case POST_DEC:
2510      case POST_INC:
2511      case POST_MODIFY:
2512	addr = XEXP (addr, 0);
2513	break;
2514      case LO_SUM:
2515      case PRE_MODIFY:
2516	addr = XEXP (addr, 1);
2517	break;
2518      case PLUS:
2519	if (CONST_INT_P (XEXP (addr, 1)))
2520	  addr = XEXP (addr, 0);
2521	else
2522	  return false;
2523	break;
2524      case LABEL_REF:
2525	return true;
2526      case SYMBOL_REF:
2527	if (SYMBOL_REF_DECL (addr)
2528	    && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2529	  return true;
2530	return false;
2531      default:
2532	return false;
2533      }
2534
2535  return false;
2536}
2537
2538/* Return whether we can use store speculation for MEM.  TOP_BB is the
2539   basic block above the conditional block where we are considering
2540   doing the speculative store.  We look for whether MEM is set
2541   unconditionally later in the function.  */
2542
2543static bool
2544noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2545{
2546  basic_block dominator;
2547
2548  for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2549       dominator != NULL;
2550       dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2551    {
2552      rtx_insn *insn;
2553
2554      FOR_BB_INSNS (dominator, insn)
2555	{
2556	  /* If we see something that might be a memory barrier, we
2557	     have to stop looking.  Even if the MEM is set later in
2558	     the function, we still don't want to set it
2559	     unconditionally before the barrier.  */
2560	  if (INSN_P (insn)
2561	      && (volatile_insn_p (PATTERN (insn))
2562		  || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2563	    return false;
2564
2565	  if (memory_must_be_modified_in_insn_p (mem, insn))
2566	    return true;
2567	  if (modified_in_p (XEXP (mem, 0), insn))
2568	    return false;
2569
2570	}
2571    }
2572
2573  return false;
2574}
2575
2576/* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2577   it without using conditional execution.  Return TRUE if we were successful
2578   at converting the block.  */
2579
2580static int
2581noce_process_if_block (struct noce_if_info *if_info)
2582{
2583  basic_block test_bb = if_info->test_bb;	/* test block */
2584  basic_block then_bb = if_info->then_bb;	/* THEN */
2585  basic_block else_bb = if_info->else_bb;	/* ELSE or NULL */
2586  basic_block join_bb = if_info->join_bb;	/* JOIN */
2587  rtx_insn *jump = if_info->jump;
2588  rtx cond = if_info->cond;
2589  rtx_insn *insn_a, *insn_b;
2590  rtx set_a, set_b;
2591  rtx orig_x, x, a, b;
2592  rtx cc;
2593
2594  /* We're looking for patterns of the form
2595
2596     (1) if (...) x = a; else x = b;
2597     (2) x = b; if (...) x = a;
2598     (3) if (...) x = a;   // as if with an initial x = x.
2599
2600     The later patterns require jumps to be more expensive.
2601
2602     ??? For future expansion, look for multiple X in such patterns.  */
2603
2604  /* Look for one of the potential sets.  */
2605  insn_a = first_active_insn (then_bb);
2606  if (! insn_a
2607      || insn_a != last_active_insn (then_bb, FALSE)
2608      || (set_a = single_set (insn_a)) == NULL_RTX)
2609    return FALSE;
2610
2611  x = SET_DEST (set_a);
2612  a = SET_SRC (set_a);
2613
2614  /* Look for the other potential set.  Make sure we've got equivalent
2615     destinations.  */
2616  /* ??? This is overconservative.  Storing to two different mems is
2617     as easy as conditionally computing the address.  Storing to a
2618     single mem merely requires a scratch memory to use as one of the
2619     destination addresses; often the memory immediately below the
2620     stack pointer is available for this.  */
2621  set_b = NULL_RTX;
2622  if (else_bb)
2623    {
2624      insn_b = first_active_insn (else_bb);
2625      if (! insn_b
2626	  || insn_b != last_active_insn (else_bb, FALSE)
2627	  || (set_b = single_set (insn_b)) == NULL_RTX
2628	  || ! rtx_interchangeable_p (x, SET_DEST (set_b)))
2629	return FALSE;
2630    }
2631  else
2632    {
2633      insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
2634      /* We're going to be moving the evaluation of B down from above
2635	 COND_EARLIEST to JUMP.  Make sure the relevant data is still
2636	 intact.  */
2637      if (! insn_b
2638	  || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2639	  || !NONJUMP_INSN_P (insn_b)
2640	  || (set_b = single_set (insn_b)) == NULL_RTX
2641	  || ! rtx_interchangeable_p (x, SET_DEST (set_b))
2642	  || ! noce_operand_ok (SET_SRC (set_b))
2643	  || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2644	  || modified_between_p (SET_SRC (set_b), insn_b, jump)
2645	  /* Avoid extending the lifetime of hard registers on small
2646	     register class machines.  */
2647	  || (REG_P (SET_SRC (set_b))
2648	      && HARD_REGISTER_P (SET_SRC (set_b))
2649	      && targetm.small_register_classes_for_mode_p
2650		   (GET_MODE (SET_SRC (set_b))))
2651	  /* Likewise with X.  In particular this can happen when
2652	     noce_get_condition looks farther back in the instruction
2653	     stream than one might expect.  */
2654	  || reg_overlap_mentioned_p (x, cond)
2655	  || reg_overlap_mentioned_p (x, a)
2656	  || modified_between_p (x, insn_b, jump))
2657	{
2658	  insn_b = NULL;
2659	  set_b = NULL_RTX;
2660	}
2661    }
2662
2663  /* If x has side effects then only the if-then-else form is safe to
2664     convert.  But even in that case we would need to restore any notes
2665     (such as REG_INC) at then end.  That can be tricky if
2666     noce_emit_move_insn expands to more than one insn, so disable the
2667     optimization entirely for now if there are side effects.  */
2668  if (side_effects_p (x))
2669    return FALSE;
2670
2671  b = (set_b ? SET_SRC (set_b) : x);
2672
2673  /* Only operate on register destinations, and even then avoid extending
2674     the lifetime of hard registers on small register class machines.  */
2675  orig_x = x;
2676  if (!REG_P (x)
2677      || (HARD_REGISTER_P (x)
2678	  && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
2679    {
2680      if (GET_MODE (x) == BLKmode)
2681	return FALSE;
2682
2683      if (GET_CODE (x) == ZERO_EXTRACT
2684	  && (!CONST_INT_P (XEXP (x, 1))
2685	      || !CONST_INT_P (XEXP (x, 2))))
2686	return FALSE;
2687
2688      x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2689				 ? XEXP (x, 0) : x));
2690    }
2691
2692  /* Don't operate on sources that may trap or are volatile.  */
2693  if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2694    return FALSE;
2695
2696 retry:
2697  /* Set up the info block for our subroutines.  */
2698  if_info->insn_a = insn_a;
2699  if_info->insn_b = insn_b;
2700  if_info->x = x;
2701  if_info->a = a;
2702  if_info->b = b;
2703
2704  /* Skip it if the instruction to be moved might clobber CC.  */
2705  cc = cc_in_cond (cond);
2706  if (cc
2707      && (set_of (cc, insn_a)
2708	  || (insn_b && set_of (cc, insn_b))))
2709    return FALSE;
2710
2711  /* Try optimizations in some approximation of a useful order.  */
2712  /* ??? Should first look to see if X is live incoming at all.  If it
2713     isn't, we don't need anything but an unconditional set.  */
2714
2715  /* Look and see if A and B are really the same.  Avoid creating silly
2716     cmove constructs that no one will fix up later.  */
2717  if (rtx_interchangeable_p (a, b))
2718    {
2719      /* If we have an INSN_B, we don't have to create any new rtl.  Just
2720	 move the instruction that we already have.  If we don't have an
2721	 INSN_B, that means that A == X, and we've got a noop move.  In
2722	 that case don't do anything and let the code below delete INSN_A.  */
2723      if (insn_b && else_bb)
2724	{
2725	  rtx note;
2726
2727	  if (else_bb && insn_b == BB_END (else_bb))
2728	    BB_END (else_bb) = PREV_INSN (insn_b);
2729	  reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2730
2731	  /* If there was a REG_EQUAL note, delete it since it may have been
2732	     true due to this insn being after a jump.  */
2733	  if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2734	    remove_note (insn_b, note);
2735
2736	  insn_b = NULL;
2737	}
2738      /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2739	 x must be executed twice.  */
2740      else if (insn_b && side_effects_p (orig_x))
2741	return FALSE;
2742
2743      x = orig_x;
2744      goto success;
2745    }
2746
2747  if (!set_b && MEM_P (orig_x))
2748    {
2749      /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2750	 for optimizations if writing to x may trap or fault,
2751	 i.e. it's a memory other than a static var or a stack slot,
2752	 is misaligned on strict aligned machines or is read-only.  If
2753	 x is a read-only memory, then the program is valid only if we
2754	 avoid the store into it.  If there are stores on both the
2755	 THEN and ELSE arms, then we can go ahead with the conversion;
2756	 either the program is broken, or the condition is always
2757	 false such that the other memory is selected.  */
2758      if (noce_mem_write_may_trap_or_fault_p (orig_x))
2759	return FALSE;
2760
2761      /* Avoid store speculation: given "if (...) x = a" where x is a
2762	 MEM, we only want to do the store if x is always set
2763	 somewhere in the function.  This avoids cases like
2764	   if (pthread_mutex_trylock(mutex))
2765	     ++global_variable;
2766	 where we only want global_variable to be changed if the mutex
2767	 is held.  FIXME: This should ideally be expressed directly in
2768	 RTL somehow.  */
2769      if (!noce_can_store_speculate_p (test_bb, orig_x))
2770	return FALSE;
2771    }
2772
2773  if (noce_try_move (if_info))
2774    goto success;
2775  if (noce_try_store_flag (if_info))
2776    goto success;
2777  if (noce_try_bitop (if_info))
2778    goto success;
2779  if (noce_try_minmax (if_info))
2780    goto success;
2781  if (noce_try_abs (if_info))
2782    goto success;
2783  if (HAVE_conditional_move
2784      && noce_try_cmove (if_info))
2785    goto success;
2786  if (! targetm.have_conditional_execution ())
2787    {
2788      if (noce_try_store_flag_constants (if_info))
2789	goto success;
2790      if (noce_try_addcc (if_info))
2791	goto success;
2792      if (noce_try_store_flag_mask (if_info))
2793	goto success;
2794      if (HAVE_conditional_move
2795	  && noce_try_cmove_arith (if_info))
2796	goto success;
2797      if (noce_try_sign_mask (if_info))
2798	goto success;
2799    }
2800
2801  if (!else_bb && set_b)
2802    {
2803      insn_b = NULL;
2804      set_b = NULL_RTX;
2805      b = orig_x;
2806      goto retry;
2807    }
2808
2809  return FALSE;
2810
2811 success:
2812
2813  /* If we used a temporary, fix it up now.  */
2814  if (orig_x != x)
2815    {
2816      rtx_insn *seq;
2817
2818      start_sequence ();
2819      noce_emit_move_insn (orig_x, x);
2820      seq = get_insns ();
2821      set_used_flags (orig_x);
2822      unshare_all_rtl_in_chain (seq);
2823      end_sequence ();
2824
2825      emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATION (insn_a));
2826    }
2827
2828  /* The original THEN and ELSE blocks may now be removed.  The test block
2829     must now jump to the join block.  If the test block and the join block
2830     can be merged, do so.  */
2831  if (else_bb)
2832    {
2833      delete_basic_block (else_bb);
2834      num_true_changes++;
2835    }
2836  else
2837    remove_edge (find_edge (test_bb, join_bb));
2838
2839  remove_edge (find_edge (then_bb, join_bb));
2840  redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2841  delete_basic_block (then_bb);
2842  num_true_changes++;
2843
2844  if (can_merge_blocks_p (test_bb, join_bb))
2845    {
2846      merge_blocks (test_bb, join_bb);
2847      num_true_changes++;
2848    }
2849
2850  num_updated_if_blocks++;
2851  return TRUE;
2852}
2853
2854/* Check whether a block is suitable for conditional move conversion.
2855   Every insn must be a simple set of a register to a constant or a
2856   register.  For each assignment, store the value in the pointer map
2857   VALS, keyed indexed by register pointer, then store the register
2858   pointer in REGS.  COND is the condition we will test.  */
2859
2860static int
2861check_cond_move_block (basic_block bb,
2862		       hash_map<rtx, rtx> *vals,
2863		       vec<rtx> *regs,
2864		       rtx cond)
2865{
2866  rtx_insn *insn;
2867  rtx cc = cc_in_cond (cond);
2868
2869   /* We can only handle simple jumps at the end of the basic block.
2870      It is almost impossible to update the CFG otherwise.  */
2871  insn = BB_END (bb);
2872  if (JUMP_P (insn) && !onlyjump_p (insn))
2873    return FALSE;
2874
2875  FOR_BB_INSNS (bb, insn)
2876    {
2877      rtx set, dest, src;
2878
2879      if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2880	continue;
2881      set = single_set (insn);
2882      if (!set)
2883	return FALSE;
2884
2885      dest = SET_DEST (set);
2886      src = SET_SRC (set);
2887      if (!REG_P (dest)
2888	  || (HARD_REGISTER_P (dest)
2889	      && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
2890	return FALSE;
2891
2892      if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2893	return FALSE;
2894
2895      if (side_effects_p (src) || side_effects_p (dest))
2896	return FALSE;
2897
2898      if (may_trap_p (src) || may_trap_p (dest))
2899	return FALSE;
2900
2901      /* Don't try to handle this if the source register was
2902	 modified earlier in the block.  */
2903      if ((REG_P (src)
2904	   && vals->get (src))
2905	  || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2906	      && vals->get (SUBREG_REG (src))))
2907	return FALSE;
2908
2909      /* Don't try to handle this if the destination register was
2910	 modified earlier in the block.  */
2911      if (vals->get (dest))
2912	return FALSE;
2913
2914      /* Don't try to handle this if the condition uses the
2915	 destination register.  */
2916      if (reg_overlap_mentioned_p (dest, cond))
2917	return FALSE;
2918
2919      /* Don't try to handle this if the source register is modified
2920	 later in the block.  */
2921      if (!CONSTANT_P (src)
2922	  && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2923	return FALSE;
2924
2925      /* Skip it if the instruction to be moved might clobber CC.  */
2926      if (cc && set_of (cc, insn))
2927	return FALSE;
2928
2929      vals->put (dest, src);
2930
2931      regs->safe_push (dest);
2932    }
2933
2934  return TRUE;
2935}
2936
2937/* Given a basic block BB suitable for conditional move conversion,
2938   a condition COND, and pointer maps THEN_VALS and ELSE_VALS containing
2939   the register values depending on COND, emit the insns in the block as
2940   conditional moves.  If ELSE_BLOCK is true, THEN_BB was already
2941   processed.  The caller has started a sequence for the conversion.
2942   Return true if successful, false if something goes wrong.  */
2943
2944static bool
2945cond_move_convert_if_block (struct noce_if_info *if_infop,
2946			    basic_block bb, rtx cond,
2947			    hash_map<rtx, rtx> *then_vals,
2948			    hash_map<rtx, rtx> *else_vals,
2949			    bool else_block_p)
2950{
2951  enum rtx_code code;
2952  rtx_insn *insn;
2953  rtx cond_arg0, cond_arg1;
2954
2955  code = GET_CODE (cond);
2956  cond_arg0 = XEXP (cond, 0);
2957  cond_arg1 = XEXP (cond, 1);
2958
2959  FOR_BB_INSNS (bb, insn)
2960    {
2961      rtx set, target, dest, t, e;
2962
2963      /* ??? Maybe emit conditional debug insn?  */
2964      if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2965	continue;
2966      set = single_set (insn);
2967      gcc_assert (set && REG_P (SET_DEST (set)));
2968
2969      dest = SET_DEST (set);
2970
2971      rtx *then_slot = then_vals->get (dest);
2972      rtx *else_slot = else_vals->get (dest);
2973      t = then_slot ? *then_slot : NULL_RTX;
2974      e = else_slot ? *else_slot : NULL_RTX;
2975
2976      if (else_block_p)
2977	{
2978	  /* If this register was set in the then block, we already
2979	     handled this case there.  */
2980	  if (t)
2981	    continue;
2982	  t = dest;
2983	  gcc_assert (e);
2984	}
2985      else
2986	{
2987	  gcc_assert (t);
2988	  if (!e)
2989	    e = dest;
2990	}
2991
2992      target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
2993				t, e);
2994      if (!target)
2995	return false;
2996
2997      if (target != dest)
2998	noce_emit_move_insn (dest, target);
2999    }
3000
3001  return true;
3002}
3003
3004/* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
3005   it using only conditional moves.  Return TRUE if we were successful at
3006   converting the block.  */
3007
3008static int
3009cond_move_process_if_block (struct noce_if_info *if_info)
3010{
3011  basic_block test_bb = if_info->test_bb;
3012  basic_block then_bb = if_info->then_bb;
3013  basic_block else_bb = if_info->else_bb;
3014  basic_block join_bb = if_info->join_bb;
3015  rtx_insn *jump = if_info->jump;
3016  rtx cond = if_info->cond;
3017  rtx_insn *seq, *loc_insn;
3018  rtx reg;
3019  int c;
3020  vec<rtx> then_regs = vNULL;
3021  vec<rtx> else_regs = vNULL;
3022  unsigned int i;
3023  int success_p = FALSE;
3024
3025  /* Build a mapping for each block to the value used for each
3026     register.  */
3027  hash_map<rtx, rtx> then_vals;
3028  hash_map<rtx, rtx> else_vals;
3029
3030  /* Make sure the blocks are suitable.  */
3031  if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
3032      || (else_bb
3033	  && !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
3034    goto done;
3035
3036  /* Make sure the blocks can be used together.  If the same register
3037     is set in both blocks, and is not set to a constant in both
3038     cases, then both blocks must set it to the same register.  We
3039     have already verified that if it is set to a register, that the
3040     source register does not change after the assignment.  Also count
3041     the number of registers set in only one of the blocks.  */
3042  c = 0;
3043  FOR_EACH_VEC_ELT (then_regs, i, reg)
3044    {
3045      rtx *then_slot = then_vals.get (reg);
3046      rtx *else_slot = else_vals.get (reg);
3047
3048      gcc_checking_assert (then_slot);
3049      if (!else_slot)
3050	++c;
3051      else
3052	{
3053	  rtx then_val = *then_slot;
3054	  rtx else_val = *else_slot;
3055	  if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
3056	      && !rtx_equal_p (then_val, else_val))
3057	    goto done;
3058	}
3059    }
3060
3061  /* Finish off c for MAX_CONDITIONAL_EXECUTE.  */
3062  FOR_EACH_VEC_ELT (else_regs, i, reg)
3063    {
3064      gcc_checking_assert (else_vals.get (reg));
3065      if (!then_vals.get (reg))
3066	++c;
3067    }
3068
3069  /* Make sure it is reasonable to convert this block.  What matters
3070     is the number of assignments currently made in only one of the
3071     branches, since if we convert we are going to always execute
3072     them.  */
3073  if (c > MAX_CONDITIONAL_EXECUTE)
3074    goto done;
3075
3076  /* Try to emit the conditional moves.  First do the then block,
3077     then do anything left in the else blocks.  */
3078  start_sequence ();
3079  if (!cond_move_convert_if_block (if_info, then_bb, cond,
3080				   &then_vals, &else_vals, false)
3081      || (else_bb
3082	  && !cond_move_convert_if_block (if_info, else_bb, cond,
3083					  &then_vals, &else_vals, true)))
3084    {
3085      end_sequence ();
3086      goto done;
3087    }
3088  seq = end_ifcvt_sequence (if_info);
3089  if (!seq)
3090    goto done;
3091
3092  loc_insn = first_active_insn (then_bb);
3093  if (!loc_insn)
3094    {
3095      loc_insn = first_active_insn (else_bb);
3096      gcc_assert (loc_insn);
3097    }
3098  emit_insn_before_setloc (seq, jump, INSN_LOCATION (loc_insn));
3099
3100  if (else_bb)
3101    {
3102      delete_basic_block (else_bb);
3103      num_true_changes++;
3104    }
3105  else
3106    remove_edge (find_edge (test_bb, join_bb));
3107
3108  remove_edge (find_edge (then_bb, join_bb));
3109  redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3110  delete_basic_block (then_bb);
3111  num_true_changes++;
3112
3113  if (can_merge_blocks_p (test_bb, join_bb))
3114    {
3115      merge_blocks (test_bb, join_bb);
3116      num_true_changes++;
3117    }
3118
3119  num_updated_if_blocks++;
3120
3121  success_p = TRUE;
3122
3123done:
3124  then_regs.release ();
3125  else_regs.release ();
3126  return success_p;
3127}
3128
3129
3130/* Determine if a given basic block heads a simple IF-THEN-JOIN or an
3131   IF-THEN-ELSE-JOIN block.
3132
3133   If so, we'll try to convert the insns to not require the branch,
3134   using only transformations that do not require conditional execution.
3135
3136   Return TRUE if we were successful at converting the block.  */
3137
3138static int
3139noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
3140		    int pass)
3141{
3142  basic_block then_bb, else_bb, join_bb;
3143  bool then_else_reversed = false;
3144  rtx_insn *jump;
3145  rtx cond;
3146  rtx_insn *cond_earliest;
3147  struct noce_if_info if_info;
3148
3149  /* We only ever should get here before reload.  */
3150  gcc_assert (!reload_completed);
3151
3152  /* Recognize an IF-THEN-ELSE-JOIN block.  */
3153  if (single_pred_p (then_edge->dest)
3154      && single_succ_p (then_edge->dest)
3155      && single_pred_p (else_edge->dest)
3156      && single_succ_p (else_edge->dest)
3157      && single_succ (then_edge->dest) == single_succ (else_edge->dest))
3158    {
3159      then_bb = then_edge->dest;
3160      else_bb = else_edge->dest;
3161      join_bb = single_succ (then_bb);
3162    }
3163  /* Recognize an IF-THEN-JOIN block.  */
3164  else if (single_pred_p (then_edge->dest)
3165	   && single_succ_p (then_edge->dest)
3166	   && single_succ (then_edge->dest) == else_edge->dest)
3167    {
3168      then_bb = then_edge->dest;
3169      else_bb = NULL_BLOCK;
3170      join_bb = else_edge->dest;
3171    }
3172  /* Recognize an IF-ELSE-JOIN block.  We can have those because the order
3173     of basic blocks in cfglayout mode does not matter, so the fallthrough
3174     edge can go to any basic block (and not just to bb->next_bb, like in
3175     cfgrtl mode).  */
3176  else if (single_pred_p (else_edge->dest)
3177	   && single_succ_p (else_edge->dest)
3178	   && single_succ (else_edge->dest) == then_edge->dest)
3179    {
3180      /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
3181	 To make this work, we have to invert the THEN and ELSE blocks
3182	 and reverse the jump condition.  */
3183      then_bb = else_edge->dest;
3184      else_bb = NULL_BLOCK;
3185      join_bb = single_succ (then_bb);
3186      then_else_reversed = true;
3187    }
3188  else
3189    /* Not a form we can handle.  */
3190    return FALSE;
3191
3192  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
3193  if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3194    return FALSE;
3195  if (else_bb
3196      && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3197    return FALSE;
3198
3199  num_possible_if_blocks++;
3200
3201  if (dump_file)
3202    {
3203      fprintf (dump_file,
3204	       "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
3205	       (else_bb) ? "-ELSE" : "",
3206	       pass, test_bb->index, then_bb->index);
3207
3208      if (else_bb)
3209	fprintf (dump_file, ", else %d", else_bb->index);
3210
3211      fprintf (dump_file, ", join %d\n", join_bb->index);
3212    }
3213
3214  /* If the conditional jump is more than just a conditional
3215     jump, then we can not do if-conversion on this block.  */
3216  jump = BB_END (test_bb);
3217  if (! onlyjump_p (jump))
3218    return FALSE;
3219
3220  /* If this is not a standard conditional jump, we can't parse it.  */
3221  cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
3222  if (!cond)
3223    return FALSE;
3224
3225  /* We must be comparing objects whose modes imply the size.  */
3226  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3227    return FALSE;
3228
3229  /* Initialize an IF_INFO struct to pass around.  */
3230  memset (&if_info, 0, sizeof if_info);
3231  if_info.test_bb = test_bb;
3232  if_info.then_bb = then_bb;
3233  if_info.else_bb = else_bb;
3234  if_info.join_bb = join_bb;
3235  if_info.cond = cond;
3236  if_info.cond_earliest = cond_earliest;
3237  if_info.jump = jump;
3238  if_info.then_else_reversed = then_else_reversed;
3239  if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
3240				     predictable_edge_p (then_edge));
3241
3242  /* Do the real work.  */
3243
3244  if (noce_process_if_block (&if_info))
3245    return TRUE;
3246
3247  if (HAVE_conditional_move
3248      && cond_move_process_if_block (&if_info))
3249    return TRUE;
3250
3251  return FALSE;
3252}
3253
3254
3255/* Merge the blocks and mark for local life update.  */
3256
3257static void
3258merge_if_block (struct ce_if_block * ce_info)
3259{
3260  basic_block test_bb = ce_info->test_bb;	/* last test block */
3261  basic_block then_bb = ce_info->then_bb;	/* THEN */
3262  basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
3263  basic_block join_bb = ce_info->join_bb;	/* join block */
3264  basic_block combo_bb;
3265
3266  /* All block merging is done into the lower block numbers.  */
3267
3268  combo_bb = test_bb;
3269  df_set_bb_dirty (test_bb);
3270
3271  /* Merge any basic blocks to handle && and || subtests.  Each of
3272     the blocks are on the fallthru path from the predecessor block.  */
3273  if (ce_info->num_multiple_test_blocks > 0)
3274    {
3275      basic_block bb = test_bb;
3276      basic_block last_test_bb = ce_info->last_test_bb;
3277      basic_block fallthru = block_fallthru (bb);
3278
3279      do
3280	{
3281	  bb = fallthru;
3282	  fallthru = block_fallthru (bb);
3283	  merge_blocks (combo_bb, bb);
3284	  num_true_changes++;
3285	}
3286      while (bb != last_test_bb);
3287    }
3288
3289  /* Merge TEST block into THEN block.  Normally the THEN block won't have a
3290     label, but it might if there were || tests.  That label's count should be
3291     zero, and it normally should be removed.  */
3292
3293  if (then_bb)
3294    {
3295      /* If THEN_BB has no successors, then there's a BARRIER after it.
3296	 If COMBO_BB has more than one successor (THEN_BB), then that BARRIER
3297	 is no longer needed, and in fact it is incorrect to leave it in
3298	 the insn stream.  */
3299      if (EDGE_COUNT (then_bb->succs) == 0
3300	  && EDGE_COUNT (combo_bb->succs) > 1)
3301	{
3302	  rtx_insn *end = NEXT_INSN (BB_END (then_bb));
3303	  while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3304	    end = NEXT_INSN (end);
3305
3306	  if (end && BARRIER_P (end))
3307	    delete_insn (end);
3308	}
3309      merge_blocks (combo_bb, then_bb);
3310      num_true_changes++;
3311    }
3312
3313  /* The ELSE block, if it existed, had a label.  That label count
3314     will almost always be zero, but odd things can happen when labels
3315     get their addresses taken.  */
3316  if (else_bb)
3317    {
3318      /* If ELSE_BB has no successors, then there's a BARRIER after it.
3319	 If COMBO_BB has more than one successor (ELSE_BB), then that BARRIER
3320	 is no longer needed, and in fact it is incorrect to leave it in
3321	 the insn stream.  */
3322      if (EDGE_COUNT (else_bb->succs) == 0
3323	  && EDGE_COUNT (combo_bb->succs) > 1)
3324	{
3325	  rtx_insn *end = NEXT_INSN (BB_END (else_bb));
3326	  while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3327	    end = NEXT_INSN (end);
3328
3329	  if (end && BARRIER_P (end))
3330	    delete_insn (end);
3331	}
3332      merge_blocks (combo_bb, else_bb);
3333      num_true_changes++;
3334    }
3335
3336  /* If there was no join block reported, that means it was not adjacent
3337     to the others, and so we cannot merge them.  */
3338
3339  if (! join_bb)
3340    {
3341      rtx_insn *last = BB_END (combo_bb);
3342
3343      /* The outgoing edge for the current COMBO block should already
3344	 be correct.  Verify this.  */
3345      if (EDGE_COUNT (combo_bb->succs) == 0)
3346	gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
3347		    || (NONJUMP_INSN_P (last)
3348			&& GET_CODE (PATTERN (last)) == TRAP_IF
3349			&& (TRAP_CONDITION (PATTERN (last))
3350			    == const_true_rtx)));
3351
3352      else
3353      /* There should still be something at the end of the THEN or ELSE
3354         blocks taking us to our final destination.  */
3355	gcc_assert (JUMP_P (last)
3356		    || (EDGE_SUCC (combo_bb, 0)->dest
3357			== EXIT_BLOCK_PTR_FOR_FN (cfun)
3358			&& CALL_P (last)
3359			&& SIBLING_CALL_P (last))
3360		    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
3361			&& can_throw_internal (last)));
3362    }
3363
3364  /* The JOIN block may have had quite a number of other predecessors too.
3365     Since we've already merged the TEST, THEN and ELSE blocks, we should
3366     have only one remaining edge from our if-then-else diamond.  If there
3367     is more than one remaining edge, it must come from elsewhere.  There
3368     may be zero incoming edges if the THEN block didn't actually join
3369     back up (as with a call to a non-return function).  */
3370  else if (EDGE_COUNT (join_bb->preds) < 2
3371	   && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3372    {
3373      /* We can merge the JOIN cleanly and update the dataflow try
3374	 again on this pass.*/
3375      merge_blocks (combo_bb, join_bb);
3376      num_true_changes++;
3377    }
3378  else
3379    {
3380      /* We cannot merge the JOIN.  */
3381
3382      /* The outgoing edge for the current COMBO block should already
3383	 be correct.  Verify this.  */
3384      gcc_assert (single_succ_p (combo_bb)
3385		  && single_succ (combo_bb) == join_bb);
3386
3387      /* Remove the jump and cruft from the end of the COMBO block.  */
3388      if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3389	tidy_fallthru_edge (single_succ_edge (combo_bb));
3390    }
3391
3392  num_updated_if_blocks++;
3393}
3394
3395/* Find a block ending in a simple IF condition and try to transform it
3396   in some way.  When converting a multi-block condition, put the new code
3397   in the first such block and delete the rest.  Return a pointer to this
3398   first block if some transformation was done.  Return NULL otherwise.  */
3399
3400static basic_block
3401find_if_header (basic_block test_bb, int pass)
3402{
3403  ce_if_block ce_info;
3404  edge then_edge;
3405  edge else_edge;
3406
3407  /* The kind of block we're looking for has exactly two successors.  */
3408  if (EDGE_COUNT (test_bb->succs) != 2)
3409    return NULL;
3410
3411  then_edge = EDGE_SUCC (test_bb, 0);
3412  else_edge = EDGE_SUCC (test_bb, 1);
3413
3414  if (df_get_bb_dirty (then_edge->dest))
3415    return NULL;
3416  if (df_get_bb_dirty (else_edge->dest))
3417    return NULL;
3418
3419  /* Neither edge should be abnormal.  */
3420  if ((then_edge->flags & EDGE_COMPLEX)
3421      || (else_edge->flags & EDGE_COMPLEX))
3422    return NULL;
3423
3424  /* Nor exit the loop.  */
3425  if ((then_edge->flags & EDGE_LOOP_EXIT)
3426      || (else_edge->flags & EDGE_LOOP_EXIT))
3427    return NULL;
3428
3429  /* The THEN edge is canonically the one that falls through.  */
3430  if (then_edge->flags & EDGE_FALLTHRU)
3431    ;
3432  else if (else_edge->flags & EDGE_FALLTHRU)
3433    {
3434      edge e = else_edge;
3435      else_edge = then_edge;
3436      then_edge = e;
3437    }
3438  else
3439    /* Otherwise this must be a multiway branch of some sort.  */
3440    return NULL;
3441
3442  memset (&ce_info, 0, sizeof (ce_info));
3443  ce_info.test_bb = test_bb;
3444  ce_info.then_bb = then_edge->dest;
3445  ce_info.else_bb = else_edge->dest;
3446  ce_info.pass = pass;
3447
3448#ifdef IFCVT_MACHDEP_INIT
3449  IFCVT_MACHDEP_INIT (&ce_info);
3450#endif
3451
3452  if (!reload_completed
3453      && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3454    goto success;
3455
3456  if (reload_completed
3457      && targetm.have_conditional_execution ()
3458      && cond_exec_find_if_block (&ce_info))
3459    goto success;
3460
3461  if (HAVE_trap
3462      && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
3463      && find_cond_trap (test_bb, then_edge, else_edge))
3464    goto success;
3465
3466  if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3467      && (reload_completed || !targetm.have_conditional_execution ()))
3468    {
3469      if (find_if_case_1 (test_bb, then_edge, else_edge))
3470	goto success;
3471      if (find_if_case_2 (test_bb, then_edge, else_edge))
3472	goto success;
3473    }
3474
3475  return NULL;
3476
3477 success:
3478  if (dump_file)
3479    fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3480  /* Set this so we continue looking.  */
3481  cond_exec_changed_p = TRUE;
3482  return ce_info.test_bb;
3483}
3484
3485/* Return true if a block has two edges, one of which falls through to the next
3486   block, and the other jumps to a specific block, so that we can tell if the
3487   block is part of an && test or an || test.  Returns either -1 or the number
3488   of non-note, non-jump, non-USE/CLOBBER insns in the block.  */
3489
3490static int
3491block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3492{
3493  edge cur_edge;
3494  int fallthru_p = FALSE;
3495  int jump_p = FALSE;
3496  rtx_insn *insn;
3497  rtx_insn *end;
3498  int n_insns = 0;
3499  edge_iterator ei;
3500
3501  if (!cur_bb || !target_bb)
3502    return -1;
3503
3504  /* If no edges, obviously it doesn't jump or fallthru.  */
3505  if (EDGE_COUNT (cur_bb->succs) == 0)
3506    return FALSE;
3507
3508  FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3509    {
3510      if (cur_edge->flags & EDGE_COMPLEX)
3511	/* Anything complex isn't what we want.  */
3512	return -1;
3513
3514      else if (cur_edge->flags & EDGE_FALLTHRU)
3515	fallthru_p = TRUE;
3516
3517      else if (cur_edge->dest == target_bb)
3518	jump_p = TRUE;
3519
3520      else
3521	return -1;
3522    }
3523
3524  if ((jump_p & fallthru_p) == 0)
3525    return -1;
3526
3527  /* Don't allow calls in the block, since this is used to group && and ||
3528     together for conditional execution support.  ??? we should support
3529     conditional execution support across calls for IA-64 some day, but
3530     for now it makes the code simpler.  */
3531  end = BB_END (cur_bb);
3532  insn = BB_HEAD (cur_bb);
3533
3534  while (insn != NULL_RTX)
3535    {
3536      if (CALL_P (insn))
3537	return -1;
3538
3539      if (INSN_P (insn)
3540	  && !JUMP_P (insn)
3541	  && !DEBUG_INSN_P (insn)
3542	  && GET_CODE (PATTERN (insn)) != USE
3543	  && GET_CODE (PATTERN (insn)) != CLOBBER)
3544	n_insns++;
3545
3546      if (insn == end)
3547	break;
3548
3549      insn = NEXT_INSN (insn);
3550    }
3551
3552  return n_insns;
3553}
3554
3555/* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3556   block.  If so, we'll try to convert the insns to not require the branch.
3557   Return TRUE if we were successful at converting the block.  */
3558
3559static int
3560cond_exec_find_if_block (struct ce_if_block * ce_info)
3561{
3562  basic_block test_bb = ce_info->test_bb;
3563  basic_block then_bb = ce_info->then_bb;
3564  basic_block else_bb = ce_info->else_bb;
3565  basic_block join_bb = NULL_BLOCK;
3566  edge cur_edge;
3567  basic_block next;
3568  edge_iterator ei;
3569
3570  ce_info->last_test_bb = test_bb;
3571
3572  /* We only ever should get here after reload,
3573     and if we have conditional execution.  */
3574  gcc_assert (reload_completed && targetm.have_conditional_execution ());
3575
3576  /* Discover if any fall through predecessors of the current test basic block
3577     were && tests (which jump to the else block) or || tests (which jump to
3578     the then block).  */
3579  if (single_pred_p (test_bb)
3580      && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3581    {
3582      basic_block bb = single_pred (test_bb);
3583      basic_block target_bb;
3584      int max_insns = MAX_CONDITIONAL_EXECUTE;
3585      int n_insns;
3586
3587      /* Determine if the preceding block is an && or || block.  */
3588      if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3589	{
3590	  ce_info->and_and_p = TRUE;
3591	  target_bb = else_bb;
3592	}
3593      else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3594	{
3595	  ce_info->and_and_p = FALSE;
3596	  target_bb = then_bb;
3597	}
3598      else
3599	target_bb = NULL_BLOCK;
3600
3601      if (target_bb && n_insns <= max_insns)
3602	{
3603	  int total_insns = 0;
3604	  int blocks = 0;
3605
3606	  ce_info->last_test_bb = test_bb;
3607
3608	  /* Found at least one && or || block, look for more.  */
3609	  do
3610	    {
3611	      ce_info->test_bb = test_bb = bb;
3612	      total_insns += n_insns;
3613	      blocks++;
3614
3615	      if (!single_pred_p (bb))
3616		break;
3617
3618	      bb = single_pred (bb);
3619	      n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3620	    }
3621	  while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3622
3623	  ce_info->num_multiple_test_blocks = blocks;
3624	  ce_info->num_multiple_test_insns = total_insns;
3625
3626	  if (ce_info->and_and_p)
3627	    ce_info->num_and_and_blocks = blocks;
3628	  else
3629	    ce_info->num_or_or_blocks = blocks;
3630	}
3631    }
3632
3633  /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3634     other than any || blocks which jump to the THEN block.  */
3635  if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3636    return FALSE;
3637
3638  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
3639  FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3640    {
3641      if (cur_edge->flags & EDGE_COMPLEX)
3642	return FALSE;
3643    }
3644
3645  FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3646    {
3647      if (cur_edge->flags & EDGE_COMPLEX)
3648	return FALSE;
3649    }
3650
3651  /* The THEN block of an IF-THEN combo must have zero or one successors.  */
3652  if (EDGE_COUNT (then_bb->succs) > 0
3653      && (!single_succ_p (then_bb)
3654          || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3655	  || (epilogue_completed
3656	      && tablejump_p (BB_END (then_bb), NULL, NULL))))
3657    return FALSE;
3658
3659  /* If the THEN block has no successors, conditional execution can still
3660     make a conditional call.  Don't do this unless the ELSE block has
3661     only one incoming edge -- the CFG manipulation is too ugly otherwise.
3662     Check for the last insn of the THEN block being an indirect jump, which
3663     is listed as not having any successors, but confuses the rest of the CE
3664     code processing.  ??? we should fix this in the future.  */
3665  if (EDGE_COUNT (then_bb->succs) == 0)
3666    {
3667      if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3668	{
3669	  rtx_insn *last_insn = BB_END (then_bb);
3670
3671	  while (last_insn
3672		 && NOTE_P (last_insn)
3673		 && last_insn != BB_HEAD (then_bb))
3674	    last_insn = PREV_INSN (last_insn);
3675
3676	  if (last_insn
3677	      && JUMP_P (last_insn)
3678	      && ! simplejump_p (last_insn))
3679	    return FALSE;
3680
3681	  join_bb = else_bb;
3682	  else_bb = NULL_BLOCK;
3683	}
3684      else
3685	return FALSE;
3686    }
3687
3688  /* If the THEN block's successor is the other edge out of the TEST block,
3689     then we have an IF-THEN combo without an ELSE.  */
3690  else if (single_succ (then_bb) == else_bb)
3691    {
3692      join_bb = else_bb;
3693      else_bb = NULL_BLOCK;
3694    }
3695
3696  /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3697     has exactly one predecessor and one successor, and the outgoing edge
3698     is not complex, then we have an IF-THEN-ELSE combo.  */
3699  else if (single_succ_p (else_bb)
3700	   && single_succ (then_bb) == single_succ (else_bb)
3701	   && single_pred_p (else_bb)
3702	   && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3703	   && !(epilogue_completed
3704		&& tablejump_p (BB_END (else_bb), NULL, NULL)))
3705    join_bb = single_succ (else_bb);
3706
3707  /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
3708  else
3709    return FALSE;
3710
3711  num_possible_if_blocks++;
3712
3713  if (dump_file)
3714    {
3715      fprintf (dump_file,
3716	       "\nIF-THEN%s block found, pass %d, start block %d "
3717	       "[insn %d], then %d [%d]",
3718	       (else_bb) ? "-ELSE" : "",
3719	       ce_info->pass,
3720	       test_bb->index,
3721	       BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3722	       then_bb->index,
3723	       BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3724
3725      if (else_bb)
3726	fprintf (dump_file, ", else %d [%d]",
3727		 else_bb->index,
3728		 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3729
3730      fprintf (dump_file, ", join %d [%d]",
3731	       join_bb->index,
3732	       BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3733
3734      if (ce_info->num_multiple_test_blocks > 0)
3735	fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3736		 ce_info->num_multiple_test_blocks,
3737		 (ce_info->and_and_p) ? "&&" : "||",
3738		 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3739		 ce_info->last_test_bb->index,
3740		 ((BB_HEAD (ce_info->last_test_bb))
3741		  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3742		  : -1));
3743
3744      fputc ('\n', dump_file);
3745    }
3746
3747  /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
3748     first condition for free, since we've already asserted that there's a
3749     fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
3750     we checked the FALLTHRU flag, those are already adjacent to the last IF
3751     block.  */
3752  /* ??? As an enhancement, move the ELSE block.  Have to deal with
3753     BLOCK notes, if by no other means than backing out the merge if they
3754     exist.  Sticky enough I don't want to think about it now.  */
3755  next = then_bb;
3756  if (else_bb && (next = next->next_bb) != else_bb)
3757    return FALSE;
3758  if ((next = next->next_bb) != join_bb
3759      && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3760    {
3761      if (else_bb)
3762	join_bb = NULL;
3763      else
3764	return FALSE;
3765    }
3766
3767  /* Do the real work.  */
3768
3769  ce_info->else_bb = else_bb;
3770  ce_info->join_bb = join_bb;
3771
3772  /* If we have && and || tests, try to first handle combining the && and ||
3773     tests into the conditional code, and if that fails, go back and handle
3774     it without the && and ||, which at present handles the && case if there
3775     was no ELSE block.  */
3776  if (cond_exec_process_if_block (ce_info, TRUE))
3777    return TRUE;
3778
3779  if (ce_info->num_multiple_test_blocks)
3780    {
3781      cancel_changes (0);
3782
3783      if (cond_exec_process_if_block (ce_info, FALSE))
3784	return TRUE;
3785    }
3786
3787  return FALSE;
3788}
3789
3790/* Convert a branch over a trap, or a branch
3791   to a trap, into a conditional trap.  */
3792
3793static int
3794find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3795{
3796  basic_block then_bb = then_edge->dest;
3797  basic_block else_bb = else_edge->dest;
3798  basic_block other_bb, trap_bb;
3799  rtx_insn *trap, *jump;
3800  rtx cond, seq;
3801  rtx_insn *cond_earliest;
3802  enum rtx_code code;
3803
3804  /* Locate the block with the trap instruction.  */
3805  /* ??? While we look for no successors, we really ought to allow
3806     EH successors.  Need to fix merge_if_block for that to work.  */
3807  if ((trap = block_has_only_trap (then_bb)) != NULL)
3808    trap_bb = then_bb, other_bb = else_bb;
3809  else if ((trap = block_has_only_trap (else_bb)) != NULL)
3810    trap_bb = else_bb, other_bb = then_bb;
3811  else
3812    return FALSE;
3813
3814  if (dump_file)
3815    {
3816      fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3817	       test_bb->index, trap_bb->index);
3818    }
3819
3820  /* If this is not a standard conditional jump, we can't parse it.  */
3821  jump = BB_END (test_bb);
3822  cond = noce_get_condition (jump, &cond_earliest, false);
3823  if (! cond)
3824    return FALSE;
3825
3826  /* If the conditional jump is more than just a conditional jump, then
3827     we can not do if-conversion on this block.  Give up for returnjump_p,
3828     changing a conditional return followed by unconditional trap for
3829     conditional trap followed by unconditional return is likely not
3830     beneficial and harder to handle.  */
3831  if (! onlyjump_p (jump) || returnjump_p (jump))
3832    return FALSE;
3833
3834  /* We must be comparing objects whose modes imply the size.  */
3835  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3836    return FALSE;
3837
3838  /* Reverse the comparison code, if necessary.  */
3839  code = GET_CODE (cond);
3840  if (then_bb == trap_bb)
3841    {
3842      code = reversed_comparison_code (cond, jump);
3843      if (code == UNKNOWN)
3844	return FALSE;
3845    }
3846
3847  /* Attempt to generate the conditional trap.  */
3848  seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3849		       copy_rtx (XEXP (cond, 1)),
3850		       TRAP_CODE (PATTERN (trap)));
3851  if (seq == NULL)
3852    return FALSE;
3853
3854  /* Emit the new insns before cond_earliest.  */
3855  emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATION (trap));
3856
3857  /* Delete the trap block if possible.  */
3858  remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3859  df_set_bb_dirty (test_bb);
3860  df_set_bb_dirty (then_bb);
3861  df_set_bb_dirty (else_bb);
3862
3863  if (EDGE_COUNT (trap_bb->preds) == 0)
3864    {
3865      delete_basic_block (trap_bb);
3866      num_true_changes++;
3867    }
3868
3869  /* Wire together the blocks again.  */
3870  if (current_ir_type () == IR_RTL_CFGLAYOUT)
3871    single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3872  else if (trap_bb == then_bb)
3873    {
3874      rtx lab;
3875      rtx_insn *newjump;
3876
3877      lab = JUMP_LABEL (jump);
3878      newjump = emit_jump_insn_after (gen_jump (lab), jump);
3879      LABEL_NUSES (lab) += 1;
3880      JUMP_LABEL (newjump) = lab;
3881      emit_barrier_after (newjump);
3882    }
3883  delete_insn (jump);
3884
3885  if (can_merge_blocks_p (test_bb, other_bb))
3886    {
3887      merge_blocks (test_bb, other_bb);
3888      num_true_changes++;
3889    }
3890
3891  num_updated_if_blocks++;
3892  return TRUE;
3893}
3894
3895/* Subroutine of find_cond_trap: if BB contains only a trap insn,
3896   return it.  */
3897
3898static rtx_insn *
3899block_has_only_trap (basic_block bb)
3900{
3901  rtx_insn *trap;
3902
3903  /* We're not the exit block.  */
3904  if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
3905    return NULL;
3906
3907  /* The block must have no successors.  */
3908  if (EDGE_COUNT (bb->succs) > 0)
3909    return NULL;
3910
3911  /* The only instruction in the THEN block must be the trap.  */
3912  trap = first_active_insn (bb);
3913  if (! (trap == BB_END (bb)
3914	 && GET_CODE (PATTERN (trap)) == TRAP_IF
3915         && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3916    return NULL;
3917
3918  return trap;
3919}
3920
3921/* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3922   transformable, but not necessarily the other.  There need be no
3923   JOIN block.
3924
3925   Return TRUE if we were successful at converting the block.
3926
3927   Cases we'd like to look at:
3928
3929   (1)
3930	if (test) goto over; // x not live
3931	x = a;
3932	goto label;
3933	over:
3934
3935   becomes
3936
3937	x = a;
3938	if (! test) goto label;
3939
3940   (2)
3941	if (test) goto E; // x not live
3942	x = big();
3943	goto L;
3944	E:
3945	x = b;
3946	goto M;
3947
3948   becomes
3949
3950	x = b;
3951	if (test) goto M;
3952	x = big();
3953	goto L;
3954
3955   (3) // This one's really only interesting for targets that can do
3956       // multiway branching, e.g. IA-64 BBB bundles.  For other targets
3957       // it results in multiple branches on a cache line, which often
3958       // does not sit well with predictors.
3959
3960	if (test1) goto E; // predicted not taken
3961	x = a;
3962	if (test2) goto F;
3963	...
3964	E:
3965	x = b;
3966	J:
3967
3968   becomes
3969
3970	x = a;
3971	if (test1) goto E;
3972	if (test2) goto F;
3973
3974   Notes:
3975
3976   (A) Don't do (2) if the branch is predicted against the block we're
3977   eliminating.  Do it anyway if we can eliminate a branch; this requires
3978   that the sole successor of the eliminated block postdominate the other
3979   side of the if.
3980
3981   (B) With CE, on (3) we can steal from both sides of the if, creating
3982
3983	if (test1) x = a;
3984	if (!test1) x = b;
3985	if (test1) goto J;
3986	if (test2) goto F;
3987	...
3988	J:
3989
3990   Again, this is most useful if J postdominates.
3991
3992   (C) CE substitutes for helpful life information.
3993
3994   (D) These heuristics need a lot of work.  */
3995
3996/* Tests for case 1 above.  */
3997
3998static int
3999find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
4000{
4001  basic_block then_bb = then_edge->dest;
4002  basic_block else_bb = else_edge->dest;
4003  basic_block new_bb;
4004  int then_bb_index, then_prob;
4005  rtx else_target = NULL_RTX;
4006
4007  /* If we are partitioning hot/cold basic blocks, we don't want to
4008     mess up unconditional or indirect jumps that cross between hot
4009     and cold sections.
4010
4011     Basic block partitioning may result in some jumps that appear to
4012     be optimizable (or blocks that appear to be mergeable), but which really
4013     must be left untouched (they are required to make it safely across
4014     partition boundaries).  See  the comments at the top of
4015     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
4016
4017  if ((BB_END (then_bb)
4018       && JUMP_P (BB_END (then_bb))
4019       && CROSSING_JUMP_P (BB_END (then_bb)))
4020      || (BB_END (test_bb)
4021	  && JUMP_P (BB_END (test_bb))
4022	  && CROSSING_JUMP_P (BB_END (test_bb)))
4023      || (BB_END (else_bb)
4024	  && JUMP_P (BB_END (else_bb))
4025	  && CROSSING_JUMP_P (BB_END (else_bb))))
4026    return FALSE;
4027
4028  /* THEN has one successor.  */
4029  if (!single_succ_p (then_bb))
4030    return FALSE;
4031
4032  /* THEN does not fall through, but is not strange either.  */
4033  if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
4034    return FALSE;
4035
4036  /* THEN has one predecessor.  */
4037  if (!single_pred_p (then_bb))
4038    return FALSE;
4039
4040  /* THEN must do something.  */
4041  if (forwarder_block_p (then_bb))
4042    return FALSE;
4043
4044  num_possible_if_blocks++;
4045  if (dump_file)
4046    fprintf (dump_file,
4047	     "\nIF-CASE-1 found, start %d, then %d\n",
4048	     test_bb->index, then_bb->index);
4049
4050  if (then_edge->probability)
4051    then_prob = REG_BR_PROB_BASE - then_edge->probability;
4052  else
4053    then_prob = REG_BR_PROB_BASE / 2;
4054
4055  /* We're speculating from the THEN path, we want to make sure the cost
4056     of speculation is within reason.  */
4057  if (! cheap_bb_rtx_cost_p (then_bb, then_prob,
4058	COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
4059				    predictable_edge_p (then_edge)))))
4060    return FALSE;
4061
4062  if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4063    {
4064      rtx_insn *jump = BB_END (else_edge->src);
4065      gcc_assert (JUMP_P (jump));
4066      else_target = JUMP_LABEL (jump);
4067    }
4068
4069  /* Registers set are dead, or are predicable.  */
4070  if (! dead_or_predicable (test_bb, then_bb, else_bb,
4071			    single_succ_edge (then_bb), 1))
4072    return FALSE;
4073
4074  /* Conversion went ok, including moving the insns and fixing up the
4075     jump.  Adjust the CFG to match.  */
4076
4077  /* We can avoid creating a new basic block if then_bb is immediately
4078     followed by else_bb, i.e. deleting then_bb allows test_bb to fall
4079     through to else_bb.  */
4080
4081  if (then_bb->next_bb == else_bb
4082      && then_bb->prev_bb == test_bb
4083      && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4084    {
4085      redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
4086      new_bb = 0;
4087    }
4088  else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4089    new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
4090					     else_bb, else_target);
4091  else
4092    new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
4093					     else_bb);
4094
4095  df_set_bb_dirty (test_bb);
4096  df_set_bb_dirty (else_bb);
4097
4098  then_bb_index = then_bb->index;
4099  delete_basic_block (then_bb);
4100
4101  /* Make rest of code believe that the newly created block is the THEN_BB
4102     block we removed.  */
4103  if (new_bb)
4104    {
4105      df_bb_replace (then_bb_index, new_bb);
4106      /* This should have been done above via force_nonfallthru_and_redirect
4107         (possibly called from redirect_edge_and_branch_force).  */
4108      gcc_checking_assert (BB_PARTITION (new_bb) == BB_PARTITION (test_bb));
4109    }
4110
4111  num_true_changes++;
4112  num_updated_if_blocks++;
4113
4114  return TRUE;
4115}
4116
4117/* Test for case 2 above.  */
4118
4119static int
4120find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
4121{
4122  basic_block then_bb = then_edge->dest;
4123  basic_block else_bb = else_edge->dest;
4124  edge else_succ;
4125  int then_prob, else_prob;
4126
4127  /* We do not want to speculate (empty) loop latches.  */
4128  if (current_loops
4129      && else_bb->loop_father->latch == else_bb)
4130    return FALSE;
4131
4132  /* If we are partitioning hot/cold basic blocks, we don't want to
4133     mess up unconditional or indirect jumps that cross between hot
4134     and cold sections.
4135
4136     Basic block partitioning may result in some jumps that appear to
4137     be optimizable (or blocks that appear to be mergeable), but which really
4138     must be left untouched (they are required to make it safely across
4139     partition boundaries).  See  the comments at the top of
4140     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
4141
4142  if ((BB_END (then_bb)
4143       && JUMP_P (BB_END (then_bb))
4144       && CROSSING_JUMP_P (BB_END (then_bb)))
4145      || (BB_END (test_bb)
4146	  && JUMP_P (BB_END (test_bb))
4147	  && CROSSING_JUMP_P (BB_END (test_bb)))
4148      || (BB_END (else_bb)
4149	  && JUMP_P (BB_END (else_bb))
4150	  && CROSSING_JUMP_P (BB_END (else_bb))))
4151    return FALSE;
4152
4153  /* ELSE has one successor.  */
4154  if (!single_succ_p (else_bb))
4155    return FALSE;
4156  else
4157    else_succ = single_succ_edge (else_bb);
4158
4159  /* ELSE outgoing edge is not complex.  */
4160  if (else_succ->flags & EDGE_COMPLEX)
4161    return FALSE;
4162
4163  /* ELSE has one predecessor.  */
4164  if (!single_pred_p (else_bb))
4165    return FALSE;
4166
4167  /* THEN is not EXIT.  */
4168  if (then_bb->index < NUM_FIXED_BLOCKS)
4169    return FALSE;
4170
4171  if (else_edge->probability)
4172    {
4173      else_prob = else_edge->probability;
4174      then_prob = REG_BR_PROB_BASE - else_prob;
4175    }
4176  else
4177    {
4178      else_prob = REG_BR_PROB_BASE / 2;
4179      then_prob = REG_BR_PROB_BASE / 2;
4180    }
4181
4182  /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
4183  if (else_prob > then_prob)
4184    ;
4185  else if (else_succ->dest->index < NUM_FIXED_BLOCKS
4186	   || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
4187			      else_succ->dest))
4188    ;
4189  else
4190    return FALSE;
4191
4192  num_possible_if_blocks++;
4193  if (dump_file)
4194    fprintf (dump_file,
4195	     "\nIF-CASE-2 found, start %d, else %d\n",
4196	     test_bb->index, else_bb->index);
4197
4198  /* We're speculating from the ELSE path, we want to make sure the cost
4199     of speculation is within reason.  */
4200  if (! cheap_bb_rtx_cost_p (else_bb, else_prob,
4201	COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
4202				    predictable_edge_p (else_edge)))))
4203    return FALSE;
4204
4205  /* Registers set are dead, or are predicable.  */
4206  if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0))
4207    return FALSE;
4208
4209  /* Conversion went ok, including moving the insns and fixing up the
4210     jump.  Adjust the CFG to match.  */
4211
4212  df_set_bb_dirty (test_bb);
4213  df_set_bb_dirty (then_bb);
4214  delete_basic_block (else_bb);
4215
4216  num_true_changes++;
4217  num_updated_if_blocks++;
4218
4219  /* ??? We may now fallthru from one of THEN's successors into a join
4220     block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */
4221
4222  return TRUE;
4223}
4224
4225/* Used by the code above to perform the actual rtl transformations.
4226   Return TRUE if successful.
4227
4228   TEST_BB is the block containing the conditional branch.  MERGE_BB
4229   is the block containing the code to manipulate.  DEST_EDGE is an
4230   edge representing a jump to the join block; after the conversion,
4231   TEST_BB should be branching to its destination.
4232   REVERSEP is true if the sense of the branch should be reversed.  */
4233
4234static int
4235dead_or_predicable (basic_block test_bb, basic_block merge_bb,
4236		    basic_block other_bb, edge dest_edge, int reversep)
4237{
4238  basic_block new_dest = dest_edge->dest;
4239  rtx_insn *head, *end, *jump;
4240  rtx_insn *earliest = NULL;
4241  rtx old_dest;
4242  bitmap merge_set = NULL;
4243  /* Number of pending changes.  */
4244  int n_validated_changes = 0;
4245  rtx new_dest_label = NULL_RTX;
4246
4247  jump = BB_END (test_bb);
4248
4249  /* Find the extent of the real code in the merge block.  */
4250  head = BB_HEAD (merge_bb);
4251  end = BB_END (merge_bb);
4252
4253  while (DEBUG_INSN_P (end) && end != head)
4254    end = PREV_INSN (end);
4255
4256  /* If merge_bb ends with a tablejump, predicating/moving insn's
4257     into test_bb and then deleting merge_bb will result in the jumptable
4258     that follows merge_bb being removed along with merge_bb and then we
4259     get an unresolved reference to the jumptable.  */
4260  if (tablejump_p (end, NULL, NULL))
4261    return FALSE;
4262
4263  if (LABEL_P (head))
4264    head = NEXT_INSN (head);
4265  while (DEBUG_INSN_P (head) && head != end)
4266    head = NEXT_INSN (head);
4267  if (NOTE_P (head))
4268    {
4269      if (head == end)
4270	{
4271	  head = end = NULL;
4272	  goto no_body;
4273	}
4274      head = NEXT_INSN (head);
4275      while (DEBUG_INSN_P (head) && head != end)
4276	head = NEXT_INSN (head);
4277    }
4278
4279  if (JUMP_P (end))
4280    {
4281      if (!onlyjump_p (end))
4282	return FALSE;
4283      if (head == end)
4284	{
4285	  head = end = NULL;
4286	  goto no_body;
4287	}
4288      end = PREV_INSN (end);
4289      while (DEBUG_INSN_P (end) && end != head)
4290	end = PREV_INSN (end);
4291    }
4292
4293  /* Don't move frame-related insn across the conditional branch.  This
4294     can lead to one of the paths of the branch having wrong unwind info.  */
4295  if (epilogue_completed)
4296    {
4297      rtx_insn *insn = head;
4298      while (1)
4299	{
4300	  if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn))
4301	    return FALSE;
4302	  if (insn == end)
4303	    break;
4304	  insn = NEXT_INSN (insn);
4305	}
4306    }
4307
4308  /* Disable handling dead code by conditional execution if the machine needs
4309     to do anything funny with the tests, etc.  */
4310#ifndef IFCVT_MODIFY_TESTS
4311  if (targetm.have_conditional_execution ())
4312    {
4313      /* In the conditional execution case, we have things easy.  We know
4314	 the condition is reversible.  We don't have to check life info
4315	 because we're going to conditionally execute the code anyway.
4316	 All that's left is making sure the insns involved can actually
4317	 be predicated.  */
4318
4319      rtx cond;
4320
4321      cond = cond_exec_get_condition (jump);
4322      if (! cond)
4323	return FALSE;
4324
4325      rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
4326      int prob_val = (note ? XINT (note, 0) : -1);
4327
4328      if (reversep)
4329	{
4330	  enum rtx_code rev = reversed_comparison_code (cond, jump);
4331	  if (rev == UNKNOWN)
4332	    return FALSE;
4333	  cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
4334			         XEXP (cond, 1));
4335	  if (prob_val >= 0)
4336	    prob_val = REG_BR_PROB_BASE - prob_val;
4337	}
4338
4339      if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
4340	  && verify_changes (0))
4341	n_validated_changes = num_validated_changes ();
4342      else
4343	cancel_changes (0);
4344
4345      earliest = jump;
4346    }
4347#endif
4348
4349  /* If we allocated new pseudos (e.g. in the conditional move
4350     expander called from noce_emit_cmove), we must resize the
4351     array first.  */
4352  if (max_regno < max_reg_num ())
4353    max_regno = max_reg_num ();
4354
4355  /* Try the NCE path if the CE path did not result in any changes.  */
4356  if (n_validated_changes == 0)
4357    {
4358      rtx cond;
4359      rtx_insn *insn;
4360      regset live;
4361      bool success;
4362
4363      /* In the non-conditional execution case, we have to verify that there
4364	 are no trapping operations, no calls, no references to memory, and
4365	 that any registers modified are dead at the branch site.  */
4366
4367      if (!any_condjump_p (jump))
4368	return FALSE;
4369
4370      /* Find the extent of the conditional.  */
4371      cond = noce_get_condition (jump, &earliest, false);
4372      if (!cond)
4373	return FALSE;
4374
4375      live = BITMAP_ALLOC (&reg_obstack);
4376      simulate_backwards_to_point (merge_bb, live, end);
4377      success = can_move_insns_across (head, end, earliest, jump,
4378				       merge_bb, live,
4379				       df_get_live_in (other_bb), NULL);
4380      BITMAP_FREE (live);
4381      if (!success)
4382	return FALSE;
4383
4384      /* Collect the set of registers set in MERGE_BB.  */
4385      merge_set = BITMAP_ALLOC (&reg_obstack);
4386
4387      FOR_BB_INSNS (merge_bb, insn)
4388	if (NONDEBUG_INSN_P (insn))
4389	  df_simulate_find_defs (insn, merge_set);
4390
4391      /* If shrink-wrapping, disable this optimization when test_bb is
4392	 the first basic block and merge_bb exits.  The idea is to not
4393	 move code setting up a return register as that may clobber a
4394	 register used to pass function parameters, which then must be
4395	 saved in caller-saved regs.  A caller-saved reg requires the
4396	 prologue, killing a shrink-wrap opportunity.  */
4397      if ((SHRINK_WRAPPING_ENABLED && !epilogue_completed)
4398	  && ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
4399	  && single_succ_p (new_dest)
4400	  && single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
4401	  && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
4402	{
4403	  regset return_regs;
4404	  unsigned int i;
4405
4406	  return_regs = BITMAP_ALLOC (&reg_obstack);
4407
4408	  /* Start off with the intersection of regs used to pass
4409	     params and regs used to return values.  */
4410	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4411	    if (FUNCTION_ARG_REGNO_P (i)
4412		&& targetm.calls.function_value_regno_p (i))
4413	      bitmap_set_bit (return_regs, INCOMING_REGNO (i));
4414
4415	  bitmap_and_into (return_regs,
4416			   df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4417	  bitmap_and_into (return_regs,
4418			   df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
4419	  if (!bitmap_empty_p (return_regs))
4420	    {
4421	      FOR_BB_INSNS_REVERSE (new_dest, insn)
4422		if (NONDEBUG_INSN_P (insn))
4423		  {
4424		    df_ref def;
4425
4426		    /* If this insn sets any reg in return_regs, add all
4427		       reg uses to the set of regs we're interested in.  */
4428		    FOR_EACH_INSN_DEF (def, insn)
4429		      if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
4430			{
4431			  df_simulate_uses (insn, return_regs);
4432			  break;
4433			}
4434		  }
4435	      if (bitmap_intersect_p (merge_set, return_regs))
4436		{
4437		  BITMAP_FREE (return_regs);
4438		  BITMAP_FREE (merge_set);
4439		  return FALSE;
4440		}
4441	    }
4442	  BITMAP_FREE (return_regs);
4443	}
4444    }
4445
4446 no_body:
4447  /* We don't want to use normal invert_jump or redirect_jump because
4448     we don't want to delete_insn called.  Also, we want to do our own
4449     change group management.  */
4450
4451  old_dest = JUMP_LABEL (jump);
4452  if (other_bb != new_dest)
4453    {
4454      if (!any_condjump_p (jump))
4455	goto cancel;
4456
4457      if (JUMP_P (BB_END (dest_edge->src)))
4458	new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
4459      else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4460	new_dest_label = ret_rtx;
4461      else
4462	new_dest_label = block_label (new_dest);
4463
4464      if (reversep
4465	  ? ! invert_jump_1 (jump, new_dest_label)
4466	  : ! redirect_jump_1 (jump, new_dest_label))
4467	goto cancel;
4468    }
4469
4470  if (verify_changes (n_validated_changes))
4471    confirm_change_group ();
4472  else
4473    goto cancel;
4474
4475  if (other_bb != new_dest)
4476    {
4477      redirect_jump_2 (jump, old_dest, new_dest_label, 0, reversep);
4478
4479      redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4480      if (reversep)
4481	{
4482	  gcov_type count, probability;
4483	  count = BRANCH_EDGE (test_bb)->count;
4484	  BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
4485	  FALLTHRU_EDGE (test_bb)->count = count;
4486	  probability = BRANCH_EDGE (test_bb)->probability;
4487	  BRANCH_EDGE (test_bb)->probability
4488	    = FALLTHRU_EDGE (test_bb)->probability;
4489	  FALLTHRU_EDGE (test_bb)->probability = probability;
4490	  update_br_prob_note (test_bb);
4491	}
4492    }
4493
4494  /* Move the insns out of MERGE_BB to before the branch.  */
4495  if (head != NULL)
4496    {
4497      rtx_insn *insn;
4498
4499      if (end == BB_END (merge_bb))
4500	BB_END (merge_bb) = PREV_INSN (head);
4501
4502      /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
4503	 notes being moved might become invalid.  */
4504      insn = head;
4505      do
4506	{
4507	  rtx note;
4508
4509	  if (! INSN_P (insn))
4510	    continue;
4511	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4512	  if (! note)
4513	    continue;
4514	  remove_note (insn, note);
4515	} while (insn != end && (insn = NEXT_INSN (insn)));
4516
4517      /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
4518	 notes referring to the registers being set might become invalid.  */
4519      if (merge_set)
4520	{
4521	  unsigned i;
4522	  bitmap_iterator bi;
4523
4524	  EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4525	    remove_reg_equal_equiv_notes_for_regno (i);
4526
4527	  BITMAP_FREE (merge_set);
4528	}
4529
4530      reorder_insns (head, end, PREV_INSN (earliest));
4531    }
4532
4533  /* Remove the jump and edge if we can.  */
4534  if (other_bb == new_dest)
4535    {
4536      delete_insn (jump);
4537      remove_edge (BRANCH_EDGE (test_bb));
4538      /* ??? Can't merge blocks here, as then_bb is still in use.
4539	 At minimum, the merge will get done just before bb-reorder.  */
4540    }
4541
4542  return TRUE;
4543
4544 cancel:
4545  cancel_changes (0);
4546
4547  if (merge_set)
4548    BITMAP_FREE (merge_set);
4549
4550  return FALSE;
4551}
4552
4553/* Main entry point for all if-conversion.  AFTER_COMBINE is true if
4554   we are after combine pass.  */
4555
4556static void
4557if_convert (bool after_combine)
4558{
4559  basic_block bb;
4560  int pass;
4561
4562  if (optimize == 1)
4563    {
4564      df_live_add_problem ();
4565      df_live_set_all_dirty ();
4566    }
4567
4568  /* Record whether we are after combine pass.  */
4569  ifcvt_after_combine = after_combine;
4570  num_possible_if_blocks = 0;
4571  num_updated_if_blocks = 0;
4572  num_true_changes = 0;
4573
4574  loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4575  mark_loop_exit_edges ();
4576  loop_optimizer_finalize ();
4577  free_dominance_info (CDI_DOMINATORS);
4578
4579  /* Compute postdominators.  */
4580  calculate_dominance_info (CDI_POST_DOMINATORS);
4581
4582  df_set_flags (DF_LR_RUN_DCE);
4583
4584  /* Go through each of the basic blocks looking for things to convert.  If we
4585     have conditional execution, we make multiple passes to allow us to handle
4586     IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
4587  pass = 0;
4588  do
4589    {
4590      df_analyze ();
4591      /* Only need to do dce on the first pass.  */
4592      df_clear_flags (DF_LR_RUN_DCE);
4593      cond_exec_changed_p = FALSE;
4594      pass++;
4595
4596#ifdef IFCVT_MULTIPLE_DUMPS
4597      if (dump_file && pass > 1)
4598	fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4599#endif
4600
4601      FOR_EACH_BB_FN (bb, cfun)
4602	{
4603          basic_block new_bb;
4604          while (!df_get_bb_dirty (bb)
4605                 && (new_bb = find_if_header (bb, pass)) != NULL)
4606            bb = new_bb;
4607	}
4608
4609#ifdef IFCVT_MULTIPLE_DUMPS
4610      if (dump_file && cond_exec_changed_p)
4611	print_rtl_with_bb (dump_file, get_insns (), dump_flags);
4612#endif
4613    }
4614  while (cond_exec_changed_p);
4615
4616#ifdef IFCVT_MULTIPLE_DUMPS
4617  if (dump_file)
4618    fprintf (dump_file, "\n\n========== no more changes\n");
4619#endif
4620
4621  free_dominance_info (CDI_POST_DOMINATORS);
4622
4623  if (dump_file)
4624    fflush (dump_file);
4625
4626  clear_aux_for_blocks ();
4627
4628  /* If we allocated new pseudos, we must resize the array for sched1.  */
4629  if (max_regno < max_reg_num ())
4630    max_regno = max_reg_num ();
4631
4632  /* Write the final stats.  */
4633  if (dump_file && num_possible_if_blocks > 0)
4634    {
4635      fprintf (dump_file,
4636	       "\n%d possible IF blocks searched.\n",
4637	       num_possible_if_blocks);
4638      fprintf (dump_file,
4639	       "%d IF blocks converted.\n",
4640	       num_updated_if_blocks);
4641      fprintf (dump_file,
4642	       "%d true changes made.\n\n\n",
4643	       num_true_changes);
4644    }
4645
4646  if (optimize == 1)
4647    df_remove_problem (df_live);
4648
4649#ifdef ENABLE_CHECKING
4650  verify_flow_info ();
4651#endif
4652}
4653
4654/* If-conversion and CFG cleanup.  */
4655static unsigned int
4656rest_of_handle_if_conversion (void)
4657{
4658  if (flag_if_conversion)
4659    {
4660      if (dump_file)
4661	{
4662	  dump_reg_info (dump_file);
4663	  dump_flow_info (dump_file, dump_flags);
4664	}
4665      cleanup_cfg (CLEANUP_EXPENSIVE);
4666      if_convert (false);
4667    }
4668
4669  cleanup_cfg (0);
4670  return 0;
4671}
4672
4673namespace {
4674
4675const pass_data pass_data_rtl_ifcvt =
4676{
4677  RTL_PASS, /* type */
4678  "ce1", /* name */
4679  OPTGROUP_NONE, /* optinfo_flags */
4680  TV_IFCVT, /* tv_id */
4681  0, /* properties_required */
4682  0, /* properties_provided */
4683  0, /* properties_destroyed */
4684  0, /* todo_flags_start */
4685  TODO_df_finish, /* todo_flags_finish */
4686};
4687
4688class pass_rtl_ifcvt : public rtl_opt_pass
4689{
4690public:
4691  pass_rtl_ifcvt (gcc::context *ctxt)
4692    : rtl_opt_pass (pass_data_rtl_ifcvt, ctxt)
4693  {}
4694
4695  /* opt_pass methods: */
4696  virtual bool gate (function *)
4697    {
4698      return (optimize > 0) && dbg_cnt (if_conversion);
4699    }
4700
4701  virtual unsigned int execute (function *)
4702    {
4703      return rest_of_handle_if_conversion ();
4704    }
4705
4706}; // class pass_rtl_ifcvt
4707
4708} // anon namespace
4709
4710rtl_opt_pass *
4711make_pass_rtl_ifcvt (gcc::context *ctxt)
4712{
4713  return new pass_rtl_ifcvt (ctxt);
4714}
4715
4716
4717/* Rerun if-conversion, as combine may have simplified things enough
4718   to now meet sequence length restrictions.  */
4719
4720namespace {
4721
4722const pass_data pass_data_if_after_combine =
4723{
4724  RTL_PASS, /* type */
4725  "ce2", /* name */
4726  OPTGROUP_NONE, /* optinfo_flags */
4727  TV_IFCVT, /* tv_id */
4728  0, /* properties_required */
4729  0, /* properties_provided */
4730  0, /* properties_destroyed */
4731  0, /* todo_flags_start */
4732  TODO_df_finish, /* todo_flags_finish */
4733};
4734
4735class pass_if_after_combine : public rtl_opt_pass
4736{
4737public:
4738  pass_if_after_combine (gcc::context *ctxt)
4739    : rtl_opt_pass (pass_data_if_after_combine, ctxt)
4740  {}
4741
4742  /* opt_pass methods: */
4743  virtual bool gate (function *)
4744    {
4745      return optimize > 0 && flag_if_conversion
4746	&& dbg_cnt (if_after_combine);
4747    }
4748
4749  virtual unsigned int execute (function *)
4750    {
4751      if_convert (true);
4752      return 0;
4753    }
4754
4755}; // class pass_if_after_combine
4756
4757} // anon namespace
4758
4759rtl_opt_pass *
4760make_pass_if_after_combine (gcc::context *ctxt)
4761{
4762  return new pass_if_after_combine (ctxt);
4763}
4764
4765
4766namespace {
4767
4768const pass_data pass_data_if_after_reload =
4769{
4770  RTL_PASS, /* type */
4771  "ce3", /* name */
4772  OPTGROUP_NONE, /* optinfo_flags */
4773  TV_IFCVT2, /* tv_id */
4774  0, /* properties_required */
4775  0, /* properties_provided */
4776  0, /* properties_destroyed */
4777  0, /* todo_flags_start */
4778  TODO_df_finish, /* todo_flags_finish */
4779};
4780
4781class pass_if_after_reload : public rtl_opt_pass
4782{
4783public:
4784  pass_if_after_reload (gcc::context *ctxt)
4785    : rtl_opt_pass (pass_data_if_after_reload, ctxt)
4786  {}
4787
4788  /* opt_pass methods: */
4789  virtual bool gate (function *)
4790    {
4791      return optimize > 0 && flag_if_conversion2
4792	&& dbg_cnt (if_after_reload);
4793    }
4794
4795  virtual unsigned int execute (function *)
4796    {
4797      if_convert (true);
4798      return 0;
4799    }
4800
4801}; // class pass_if_after_reload
4802
4803} // anon namespace
4804
4805rtl_opt_pass *
4806make_pass_if_after_reload (gcc::context *ctxt)
4807{
4808  return new pass_if_after_reload (ctxt);
4809}
4810